first docs
This commit is contained in:
52
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md
generated
vendored
Normal file
52
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
<!--
|
||||
Thank you for reporting an issue!
|
||||
|
||||
Remember, this issue tracker is for reporting issues ONLY with node-gyp.
|
||||
|
||||
If you have an issue installing a specific module, please file an issue on
|
||||
that module's issue tracker (`npm issues modulename`). Open issue here only if
|
||||
you are sure this is an issue with node-gyp, not with the module you are
|
||||
trying to build.
|
||||
|
||||
Fill out the form below. We probably won't investigate an issue that does not
|
||||
provide the basic information we require.
|
||||
|
||||
-->
|
||||
|
||||
Please look thru your error log for the string `gyp info using node-gyp@` and if the version number is less than the [current release of node-gyp](https://github.com/nodejs/node-gyp/releases) then __please upgrade__ using the instructions at https://github.com/nodejs/node-gyp/blob/master/docs/Updating-npm-bundled-node-gyp.md and try your command again.
|
||||
|
||||
Requests for help with [`node-sass` are very common](https://github.com/nodejs/node-gyp/issues?q=label%3A%22Node+Sass+--%3E+Dart+Sass%22). Please be aware that this package is deprecated, you should seek alternatives and avoid opening new issues about it here.
|
||||
|
||||
* **Node Version**: <!-- `node -v` and `npm -v` -->
|
||||
* **Platform**: <!-- `uname -a` (UNIX), or `systeminfo | findstr /B /C:"OS Name" /C:"OS Version" /C:"System Type"` (Windows) -->
|
||||
* **Compiler**: <!-- `cc -v` (UNIX) or `msbuild /version & cl` (Windows) -->
|
||||
* **Module**: <!-- what you tried to build/install -->
|
||||
|
||||
<details><summary>Verbose output (from npm or node-gyp):</summary>
|
||||
|
||||
```
|
||||
Paste your log here, between the backticks. It can be:
|
||||
- npm --verbose output,
|
||||
- or contents of npm-debug.log,
|
||||
- or output of node-gyp rebuild --verbose.
|
||||
Include the command you were trying to run.
|
||||
|
||||
This should look like this:
|
||||
|
||||
>npm --verbose
|
||||
npm info it worked if it ends with ok
|
||||
npm verb cli [
|
||||
npm verb cli 'C:\\...\\node\\13.9.0\\x64\\node.exe',
|
||||
npm verb cli 'C:\\...\\node\\13.9.0\\x64\\node_modules\\npm\\bin\\npm-cli.js',
|
||||
npm verb cli '--verbose'
|
||||
npm verb cli ]
|
||||
npm info using npm@6.13.7
|
||||
npm info using node@v13.9.0
|
||||
|
||||
Usage: npm <command>
|
||||
(...)
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<!-- Any further details -->
|
||||
17
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md
generated
vendored
Normal file
17
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
<!--
|
||||
Thank you for your pull request. Please review the below requirements.
|
||||
|
||||
Contributor guide: https://github.com/nodejs/node/blob/master/CONTRIBUTING.md
|
||||
-->
|
||||
|
||||
##### Checklist
|
||||
<!-- Remove items that do not apply. For completed items, change [ ] to [x]. -->
|
||||
|
||||
- [ ] `npm install && npm test` passes
|
||||
- [ ] tests are included <!-- Bug fixes and new features should include tests -->
|
||||
- [ ] documentation is changed or added
|
||||
- [ ] commit message follows [commit guidelines](https://github.com/googleapis/release-please#how-should-i-write-my-commits)
|
||||
|
||||
##### Description of change
|
||||
<!-- Provide a description of the change -->
|
||||
|
||||
56
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/workflows/release-please.yml
generated
vendored
Normal file
56
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/workflows/release-please.yml
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: release-please
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: google-github-actions/release-please-action@v2
|
||||
id: release
|
||||
with:
|
||||
package-name: node-gyp
|
||||
release-type: node
|
||||
changelog-types: >
|
||||
[{"type":"feat","section":"Features","hidden":false},
|
||||
{"type":"fix","section":"Bug Fixes","hidden":false},
|
||||
{"type":"bin","section":"Core","hidden":false},
|
||||
{"type":"gyp","section":"Core","hidden":false},
|
||||
{"type":"lib","section":"Core","hidden":false},
|
||||
{"type":"src","section":"Core","hidden":false},
|
||||
{"type":"test","section":"Tests","hidden":false},
|
||||
{"type":"build","section":"Core","hidden":false},
|
||||
{"type":"clean","section":"Core","hidden":false},
|
||||
{"type":"configure","section":"Core","hidden":false},
|
||||
{"type":"install","section":"Core","hidden":false},
|
||||
{"type":"list","section":"Core","hidden":false},
|
||||
{"type":"rebuild","section":"Core","hidden":false},
|
||||
{"type":"remove","section":"Core","hidden":false},
|
||||
{"type":"deps","section":"Core","hidden":false},
|
||||
{"type":"python","section":"Core","hidden":false},
|
||||
{"type":"lin","section":"Core","hidden":false},
|
||||
{"type":"linux","section":"Core","hidden":false},
|
||||
{"type":"mac","section":"Core","hidden":false},
|
||||
{"type":"macos","section":"Core","hidden":false},
|
||||
{"type":"win","section":"Core","hidden":false},
|
||||
{"type":"windows","section":"Core","hidden":false},
|
||||
{"type":"zos","section":"Core","hidden":false},
|
||||
{"type":"doc","section":"Doc","hidden":false},
|
||||
{"type":"docs","section":"Doc","hidden":false},
|
||||
{"type":"readme","section":"Doc","hidden":false},
|
||||
{"type":"chore","section":"Miscellaneous","hidden":false},
|
||||
{"type":"refactor","section":"Miscellaneous","hidden":false},
|
||||
{"type":"ci","section":"Miscellaneous","hidden":false},
|
||||
{"type":"meta","section":"Miscellaneous","hidden":false}]
|
||||
|
||||
# Standard Conventional Commits: `feat` and `fix`
|
||||
# node-gyp subdirectories: `bin`, `gyp`, `lib`, `src`, `test`
|
||||
# node-gyp subcommands: `build`, `clean`, `configure`, `install`, `list`, `rebuild`, `remove`
|
||||
# Core abstract category: `deps`
|
||||
# Languages/platforms: `python`, `lin`, `linux`, `mac`, `macos`, `win`, `window`, `zos`
|
||||
# Documentation: `doc`, `docs`, `readme`
|
||||
# Standard Conventional Commits: `chore` (under "Miscellaneous")
|
||||
# Miscellaneous abstract categories: `refactor`, `ci`, `meta`
|
||||
52
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/workflows/tests.yml
generated
vendored
Normal file
52
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/workflows/tests.yml
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
# https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
|
||||
# TODO: Line 48, enable pytest --doctest-modules
|
||||
|
||||
name: Tests
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
jobs:
|
||||
Tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 15
|
||||
matrix:
|
||||
node: [14.x, 16.x, 18.x]
|
||||
python: ["3.6", "3.8", "3.10"]
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Use Python ${{ matrix.python }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
env:
|
||||
PYTHON_VERSION: ${{ matrix.python }} # Why do this?
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --no-progress
|
||||
pip install flake8 pytest
|
||||
- name: Set Windows environment
|
||||
if: startsWith(matrix.os, 'windows')
|
||||
run: |
|
||||
echo 'GYP_MSVS_VERSION=2015' >> $Env:GITHUB_ENV
|
||||
echo 'GYP_MSVS_OVERRIDE_PATH=C:\\Dummy' >> $Env:GITHUB_ENV
|
||||
- name: Lint Python
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics
|
||||
- name: Run Python tests
|
||||
run: python -m pytest
|
||||
# - name: Run doctests with pytest
|
||||
# run: python -m pytest --doctest-modules
|
||||
- name: Environment Information
|
||||
run: npx envinfo
|
||||
- name: Run Node tests
|
||||
run: npm test
|
||||
33
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/workflows/visual-studio.yml
generated
vendored
Normal file
33
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.github/workflows/visual-studio.yml
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
|
||||
|
||||
name: visual-studio
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
jobs:
|
||||
visual-studio:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 8
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
msvs-version: [2016, 2019, 2022] # https://github.com/actions/virtual-environments/tree/main/images/win
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --no-progress
|
||||
# npm audit fix --force
|
||||
- name: Set Windows environment
|
||||
if: startsWith(matrix.os, 'windows')
|
||||
run: |
|
||||
echo 'GYP_MSVS_VERSION=${{ matrix.msvs-version }}' >> $Env:GITHUB_ENV
|
||||
echo 'GYP_MSVS_OVERRIDE_PATH=C:\\Dummy' >> $Env:GITHUB_ENV
|
||||
- name: Environment Information
|
||||
run: npx envinfo
|
||||
- name: Run Node tests
|
||||
run: npm test
|
||||
0
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.ready
generated
vendored
Normal file
0
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/.ready
generated
vendored
Normal file
784
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/CHANGELOG.md
generated
vendored
Normal file
784
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,784 @@
|
||||
# Changelog
|
||||
|
||||
## [9.3.0](https://www.github.com/nodejs/node-gyp/compare/v9.2.0...v9.3.0) (2022-10-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.14.0 ([#2749](https://www.github.com/nodejs/node-gyp/issues/2749)) ([713b8dc](https://www.github.com/nodejs/node-gyp/commit/713b8dcdbf44532ca9453a127da266386cc737f8))
|
||||
* remove support for VS2015 in Node.js >=19 ([#2746](https://www.github.com/nodejs/node-gyp/issues/2746)) ([131d1a4](https://www.github.com/nodejs/node-gyp/commit/131d1a463baf034a04154bcda753a8295f112a34))
|
||||
* support IBM Open XL C/C++ on z/OS ([#2743](https://www.github.com/nodejs/node-gyp/issues/2743)) ([7d0c83d](https://www.github.com/nodejs/node-gyp/commit/7d0c83d2a95aca743dff972826d0da26203acfc4))
|
||||
|
||||
## [9.2.0](https://www.github.com/nodejs/node-gyp/compare/v9.1.0...v9.2.0) (2022-10-02)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add proper support for IBM i ([a26494f](https://www.github.com/nodejs/node-gyp/commit/a26494fbb8883d9ef784503979e115dec3e2791e))
|
||||
* **gyp:** update gyp to v0.13.0 ([3e2a532](https://www.github.com/nodejs/node-gyp/commit/3e2a5324f1c24f3a04bca04cf54fe23d5c4d5e50))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* node.js debugger adds stderr (but exit code is 0) -> shouldn't throw ([#2719](https://www.github.com/nodejs/node-gyp/issues/2719)) ([c379a74](https://www.github.com/nodejs/node-gyp/commit/c379a744c65c7ab07c2c3193d9c7e8f25ae1b05e))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* enable support for zoslib on z/OS ([#2600](https://www.github.com/nodejs/node-gyp/issues/2600)) ([83c0a12](https://www.github.com/nodejs/node-gyp/commit/83c0a12bf23b4cbf3125d41f9e2d4201db76c9ae))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* update dependency - nopt@6.0.0 ([#2707](https://www.github.com/nodejs/node-gyp/issues/2707)) ([8958ecf](https://www.github.com/nodejs/node-gyp/commit/8958ecf2bb719227bbcbf155891c3186ee219a2e))
|
||||
|
||||
## [9.1.0](https://www.github.com/nodejs/node-gyp/compare/v9.0.0...v9.1.0) (2022-07-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Update function getSDK() to support Windows 11 SDK ([#2565](https://www.github.com/nodejs/node-gyp/issues/2565)) ([ea8520e](https://www.github.com/nodejs/node-gyp/commit/ea8520e3855374bd15b6d001fe112d58a8d7d737))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* extend tap timeout length to allow for slow CI ([6f74c76](https://www.github.com/nodejs/node-gyp/commit/6f74c762fe3c19bdd20245cb5c02e2dfa65d9451))
|
||||
* new ca & server certs, bundle in .js file and unpack for testing ([147e3d3](https://www.github.com/nodejs/node-gyp/commit/147e3d34f44a97deb7aa507207680cf0f4e662a2))
|
||||
* re-label ([#2689](https://www.github.com/nodejs/node-gyp/issues/2689)) ([f0b7863](https://www.github.com/nodejs/node-gyp/commit/f0b7863dadfa365afc173025ae95351aec79abd9))
|
||||
* typo on readme ([bf81cd4](https://www.github.com/nodejs/node-gyp/commit/bf81cd452b931dd4dfa82762c23dd530a075d992))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* update docs/README.md with latest version number ([62d2815](https://www.github.com/nodejs/node-gyp/commit/62d28151bf8266a34e1bcceeb25b4e6e2ae5ca5d))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* update due to rename of primary branch ([ca1f068](https://www.github.com/nodejs/node-gyp/commit/ca1f0681a5567ca8cd51acebccd37a633f19bc6a))
|
||||
|
||||
|
||||
### Tests
|
||||
|
||||
* Try msvs-version: [2016, 2019, 2022] ([#2700](https://www.github.com/nodejs/node-gyp/issues/2700)) ([68b5b5b](https://www.github.com/nodejs/node-gyp/commit/68b5b5be9c94ac20c55e88654ff6f55234d7130a))
|
||||
* Upgrade GitHub Actions ([#2623](https://www.github.com/nodejs/node-gyp/issues/2623)) ([245cd5b](https://www.github.com/nodejs/node-gyp/commit/245cd5bbe4441d4f05e88f2fa20a86425419b6af))
|
||||
* Upgrade GitHub Actions ([#2701](https://www.github.com/nodejs/node-gyp/issues/2701)) ([1c64ca7](https://www.github.com/nodejs/node-gyp/commit/1c64ca7f4702c6eb43ecd16fbd67b5d939041621))
|
||||
|
||||
## [9.0.0](https://www.github.com/nodejs/node-gyp/compare/v8.4.1...v9.0.0) (2022-02-24)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* increase "engines" to "node" : "^12.22 || ^14.13 || >=16" (#2601)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* _ in npm_config_ env variables ([eef4eef](https://www.github.com/nodejs/node-gyp/commit/eef4eefccb13ff6a32db862709ee5b2d4edf7e95))
|
||||
* update make-fetch-happen to a minimum of 10.0.3 ([839e414](https://www.github.com/nodejs/node-gyp/commit/839e414b63790c815a4a370d0feee8f24a94d40f))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* add minimal SECURITY.md ([#2560](https://www.github.com/nodejs/node-gyp/issues/2560)) ([c2a1850](https://www.github.com/nodejs/node-gyp/commit/c2a185056e2e589b520fbc0bcc59c2935cd07ede))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* Add notes/disclaimers for upgrading the copy of node-gyp that npm uses ([#2585](https://www.github.com/nodejs/node-gyp/issues/2585)) ([faf6d48](https://www.github.com/nodejs/node-gyp/commit/faf6d48f8a77c08a313baf9332358c4b1231c73c))
|
||||
* Rename and update Common-issues.md --> docs/README.md ([#2567](https://www.github.com/nodejs/node-gyp/issues/2567)) ([2ef5fb8](https://www.github.com/nodejs/node-gyp/commit/2ef5fb86277c4d81baffc0b9f642a8d86be1bfa5))
|
||||
* rephrase explanation of which node-gyp is used by npm ([#2587](https://www.github.com/nodejs/node-gyp/issues/2587)) ([a2f2988](https://www.github.com/nodejs/node-gyp/commit/a2f298870692022302fa27a1d42363c4a72df407))
|
||||
* title match content ([#2574](https://www.github.com/nodejs/node-gyp/issues/2574)) ([6e8f93b](https://www.github.com/nodejs/node-gyp/commit/6e8f93be0443f2649d4effa7bc773a9da06a33b4))
|
||||
* Update Python versions ([#2571](https://www.github.com/nodejs/node-gyp/issues/2571)) ([e069f13](https://www.github.com/nodejs/node-gyp/commit/e069f13658a8bfb5fd60f74708cf8be0856d92e3))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* add lib.target as path for searching libnode on z/OS ([1d499dd](https://www.github.com/nodejs/node-gyp/commit/1d499dd5606f39de2d34fa822fd0fa5ce17fbd06))
|
||||
* increase "engines" to "node" : "^12.22 || ^14.13 || >=16" ([#2601](https://www.github.com/nodejs/node-gyp/issues/2601)) ([6562f92](https://www.github.com/nodejs/node-gyp/commit/6562f92a6f2e67aeae081ddf5272ff117f1fab07))
|
||||
* make-fetch-happen@10.0.1 ([78f6660](https://www.github.com/nodejs/node-gyp/commit/78f66604e0df480d4f36a8fa4f3618c046a6fbdc))
|
||||
|
||||
### [8.4.1](https://www.github.com/nodejs/node-gyp/compare/v8.4.0...v8.4.1) (2021-11-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* windows command missing space ([#2553](https://www.github.com/nodejs/node-gyp/issues/2553)) ([cc37b88](https://www.github.com/nodejs/node-gyp/commit/cc37b880690706d3c5d04d5a68c76c392a0a23ed))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* fix typo in powershell node-gyp update ([787cf7f](https://www.github.com/nodejs/node-gyp/commit/787cf7f8e5ddd5039e02b64ace6b7b15e06fe0a4))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* npmlog@6.0.0 ([8083f6b](https://www.github.com/nodejs/node-gyp/commit/8083f6b855bd7f3326af04c5f5269fc28d7f2508))
|
||||
|
||||
## [8.4.0](https://www.github.com/nodejs/node-gyp/compare/v8.3.0...v8.4.0) (2021-11-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* build with config.gypi from node headers ([a27dc08](https://www.github.com/nodejs/node-gyp/commit/a27dc08696911c6d81e76cc228697243069103c1))
|
||||
* support vs2022 ([#2533](https://www.github.com/nodejs/node-gyp/issues/2533)) ([5a00387](https://www.github.com/nodejs/node-gyp/commit/5a00387e5f8018264a1822f6c4d5dbf425f21cf6))
|
||||
|
||||
## [8.3.0](https://www.github.com/nodejs/node-gyp/compare/v8.2.0...v8.3.0) (2021-10-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.10.0 ([#2521](https://www.github.com/nodejs/node-gyp/issues/2521)) ([5585792](https://www.github.com/nodejs/node-gyp/commit/5585792922a97f0629f143c560efd74470eae87f))
|
||||
|
||||
|
||||
### Tests
|
||||
|
||||
* Python 3.10 was release on Oct. 4th ([#2504](https://www.github.com/nodejs/node-gyp/issues/2504)) ([0a67dcd](https://www.github.com/nodejs/node-gyp/commit/0a67dcd1307f3560495219253241eafcbf4e2a69))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* **deps:** bump make-fetch-happen from 8.0.14 to 9.1.0 ([b05b4fe](https://www.github.com/nodejs/node-gyp/commit/b05b4fe9891f718f40edf547e9b50e982826d48a))
|
||||
* refactor the creation of config.gypi file ([f2ad87f](https://www.github.com/nodejs/node-gyp/commit/f2ad87ff65f98ad66daa7225ad59d99b759a2b07))
|
||||
|
||||
## [8.2.0](https://www.github.com/nodejs/node-gyp/compare/v8.1.0...v8.2.0) (2021-08-23)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.9.6 ([#2481](https://www.github.com/nodejs/node-gyp/issues/2481)) ([ed9a9ed](https://www.github.com/nodejs/node-gyp/commit/ed9a9ed653a17c84afa3c327161992d0da7d0cea))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add error arg back into catch block for older Node.js users ([5cde818](https://www.github.com/nodejs/node-gyp/commit/5cde818aac715477e9e9747966bb6b4c4ed070a8))
|
||||
* change default gyp update message ([#2420](https://www.github.com/nodejs/node-gyp/issues/2420)) ([cfd12ff](https://www.github.com/nodejs/node-gyp/commit/cfd12ff3bb0eb4525173413ef6a94b3cd8398cad))
|
||||
* doc how to update node-gyp independently from npm ([c8c0af7](https://www.github.com/nodejs/node-gyp/commit/c8c0af72e78141a02b5da4cd4d704838333a90bd))
|
||||
* missing spaces ([f0882b1](https://www.github.com/nodejs/node-gyp/commit/f0882b1264b2fa701adbc81a3be0b3cba80e333d))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* deep-copy process.config during configure ([#2368](https://www.github.com/nodejs/node-gyp/issues/2368)) ([5f1a06c](https://www.github.com/nodejs/node-gyp/commit/5f1a06c50f3b0c3d292f64948f85a004cfcc5c87))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* **deps:** bump tar from 6.1.0 to 6.1.2 ([#2474](https://www.github.com/nodejs/node-gyp/issues/2474)) ([ec15a3e](https://www.github.com/nodejs/node-gyp/commit/ec15a3e5012004172713c11eebcc9d852d32d380))
|
||||
* fix typos discovered by codespell ([#2442](https://www.github.com/nodejs/node-gyp/issues/2442)) ([2d0ce55](https://www.github.com/nodejs/node-gyp/commit/2d0ce5595e232a3fc7c562cdf39efb77e2312cc1))
|
||||
* GitHub Actions Test on node: [12.x, 14.x, 16.x] ([#2439](https://www.github.com/nodejs/node-gyp/issues/2439)) ([b7bccdb](https://www.github.com/nodejs/node-gyp/commit/b7bccdb527d93b0bb0ce99713f083ce2985fe85c))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* correct link to "binding.gyp files out in the wild" ([#2483](https://www.github.com/nodejs/node-gyp/issues/2483)) ([660dd7b](https://www.github.com/nodejs/node-gyp/commit/660dd7b2a822c184be8027b300e68be67b366772))
|
||||
* **wiki:** Add a link to the node-midi binding.gyp file. ([b354711](https://www.github.com/nodejs/node-gyp/commit/b3547115f6e356358138310e857c7f1ec627a8a7))
|
||||
* **wiki:** add bcrypt ([e199cfa](https://www.github.com/nodejs/node-gyp/commit/e199cfa8fc6161492d2a6ade2190510d0ebf7c0f))
|
||||
* **wiki:** Add helpful information ([4eda827](https://www.github.com/nodejs/node-gyp/commit/4eda8275c03dae6d2f5c40f3c1dbe930d84b0f2b))
|
||||
* **wiki:** Add node-canvas ([13a9553](https://www.github.com/nodejs/node-gyp/commit/13a955317b39caf98fd1f412d8d3f41599e979fd))
|
||||
* **wiki:** Add node-openvg-canvas and node-openvg. ([61f709e](https://www.github.com/nodejs/node-gyp/commit/61f709ec4d9f256a6467e9ff84430a48eeb629d1))
|
||||
* **wiki:** add one more example ([77f3632](https://www.github.com/nodejs/node-gyp/commit/77f363272930d3d4d24fd3973be22e6237128fcc))
|
||||
* **wiki:** add topcube, node-osmium, and node-osrm ([1a75d2b](https://www.github.com/nodejs/node-gyp/commit/1a75d2bf2f562ba50846893a516e111cfbb50885))
|
||||
* **wiki:** Added details for properly fixing ([3d4d9d5](https://www.github.com/nodejs/node-gyp/commit/3d4d9d52d6b5b49de06bb0bb5b68e2686d2b7ebd))
|
||||
* **wiki:** Added Ghostscript4JS ([bf4bed1](https://www.github.com/nodejs/node-gyp/commit/bf4bed1b96a7d22fba6f97f4552ad09f32ac3737))
|
||||
* **wiki:** added levelup ([1575bce](https://www.github.com/nodejs/node-gyp/commit/1575bce3a53db628bfb023fd6f3258fdf98c3195))
|
||||
* **wiki:** Added nk-mysql (nodamysql) ([5b4f2d0](https://www.github.com/nodejs/node-gyp/commit/5b4f2d0e1d5d3eadfd03aaf9c1668340f76c4bea))
|
||||
* **wiki:** Added nk-xrm-installer .gyp references, including .py scripts for providing complete reference to examples of fetching source via http, extracting, and moving files (as opposed to copying) ([ceb3088](https://www.github.com/nodejs/node-gyp/commit/ceb30885b74f6789374ef52267b84767be93ebe4))
|
||||
* **wiki:** Added tip about resolving frustrating LNK1181 error ([e64798d](https://www.github.com/nodejs/node-gyp/commit/e64798de8cac6031ad598a86d7599e81b4d20b17))
|
||||
* **wiki:** ADDED: Node.js binding to OpenCV ([e2dc777](https://www.github.com/nodejs/node-gyp/commit/e2dc77730b09d7ee8682d7713a7603a2d7aacabd))
|
||||
* **wiki:** Adding link to node-cryptopp's gyp file ([875adbe](https://www.github.com/nodejs/node-gyp/commit/875adbe2a4669fa5f2be0250ffbf98fb55e800fd))
|
||||
* **wiki:** Adding the sharp library to the list ([9dce0e4](https://www.github.com/nodejs/node-gyp/commit/9dce0e41650c3fa973e6135a79632d022c662a1d))
|
||||
* **wiki:** Adds node-fann ([23e3d48](https://www.github.com/nodejs/node-gyp/commit/23e3d485ed894ba7c631e9c062f5e366b50c416c))
|
||||
* **wiki:** Adds node-inotify and v8-profiler ([b6e542f](https://www.github.com/nodejs/node-gyp/commit/b6e542f644dbbfe22b88524ec500696e06ee4af7))
|
||||
* **wiki:** Bumping Python version from 2.3 to 2.7 as per the node-gyp readme ([55ebd6e](https://www.github.com/nodejs/node-gyp/commit/55ebd6ebacde975bf84f7bf4d8c66e64cc7cd0da))
|
||||
* **wiki:** C++ build tools version upgraded ([5b899b7](https://www.github.com/nodejs/node-gyp/commit/5b899b70db729c392ced7c98e8e17590c6499fc3))
|
||||
* **wiki:** change bcrypt url to binding.gyp file ([e11bdd8](https://www.github.com/nodejs/node-gyp/commit/e11bdd84de6144492d3eb327d67cbf2d62da1a76))
|
||||
* **wiki:** Clarification + direct link to VS2010 ([531c724](https://www.github.com/nodejs/node-gyp/commit/531c724561d947b5d870de8d52dd8c3c51c5ec2d))
|
||||
* **wiki:** Correcting the link to node-osmium ([fae7516](https://www.github.com/nodejs/node-gyp/commit/fae7516a1d2829b6e234eaded74fb112ebd79a05))
|
||||
* **wiki:** Created "binding.gyp" files out in the wild (markdown) ([d4fd143](https://www.github.com/nodejs/node-gyp/commit/d4fd14355bbe57f229f082f47bb2b3670868203f))
|
||||
* **wiki:** Created Common issues (markdown) ([a38299e](https://www.github.com/nodejs/node-gyp/commit/a38299ea340ceb0e732c6dc6a1b4760257644839))
|
||||
* **wiki:** Created Error: "pre" versions of node cannot be installed (markdown) ([98bc80d](https://www.github.com/nodejs/node-gyp/commit/98bc80d7a62ba70c881f3c39d94f804322e57852))
|
||||
* **wiki:** Created Linking to OpenSSL (markdown) ([c46d00d](https://www.github.com/nodejs/node-gyp/commit/c46d00d83bac5173dea8bbbb175a1a7de74fdaca))
|
||||
* **wiki:** Created Updating npm's bundled node gyp (markdown) ([e0ac8d1](https://www.github.com/nodejs/node-gyp/commit/e0ac8d15af46aadd1c220599e63199b154a514e6))
|
||||
* **wiki:** Created use of undeclared identifier 'TypedArray' (markdown) ([65ba711](https://www.github.com/nodejs/node-gyp/commit/65ba71139e9b7f64ac823e575ee9dbf17d937ce4))
|
||||
* **wiki:** Created Visual Studio 2010 Setup (markdown) ([5b80e83](https://www.github.com/nodejs/node-gyp/commit/5b80e834c8f79dda9fb2770a876ff3cf649c06f3))
|
||||
* **wiki:** Created Visual studio 2012 setup (markdown) ([becef31](https://www.github.com/nodejs/node-gyp/commit/becef316b6c46a33e783667720ee074a0141d1a5))
|
||||
* **wiki:** Destroyed Visual Studio 2010 Setup (markdown) ([93423b4](https://www.github.com/nodejs/node-gyp/commit/93423b43606de9664aeb79635825f5e9941ec9bc))
|
||||
* **wiki:** Destroyed Visual studio 2012 setup (markdown) ([3601508](https://www.github.com/nodejs/node-gyp/commit/3601508bb10fa05da0ddc7e70d57e4b4dd679657))
|
||||
* **wiki:** Different commands for Windows npm v6 vs. v7 ([0fce46b](https://www.github.com/nodejs/node-gyp/commit/0fce46b53340c85e8091cde347d5ed23a443c82f))
|
||||
* **wiki:** Drop in favor of ([9285ff6](https://www.github.com/nodejs/node-gyp/commit/9285ff6e451c52c070a05f05f0a9602621d91d53))
|
||||
* **wiki:** Explicit link to Visual C++ 2010 Express ([378c363](https://www.github.com/nodejs/node-gyp/commit/378c3632f02c096ed819ec8f2611c65bef0c0554))
|
||||
* **wiki:** fix link to gyp file used to build libsqlite3 ([54db8d7](https://www.github.com/nodejs/node-gyp/commit/54db8d7ac33e3f98220960b5d86cfa18a75b53cb))
|
||||
* **wiki:** Fix link to node-zipfile ([92e49a8](https://www.github.com/nodejs/node-gyp/commit/92e49a858ed69cb4847a26a5676ab56ef5e2de33))
|
||||
* **wiki:** fixed node-serialport link ([954ee53](https://www.github.com/nodejs/node-gyp/commit/954ee530b3972d1db591fce32368e4e31b5a25d8))
|
||||
* **wiki:** I highly missing it in common issue as every windows biggner face that issue ([d617fae](https://www.github.com/nodejs/node-gyp/commit/d617faee29c40871ca5c8f93efd0ce929a40d541))
|
||||
* **wiki:** if ouns that the -h did not help. I founs on github that there was support for visual studio 2015, while i couldn't install node-red beacuse it kept telling me the key 2015 was missing. looking in he gyp python code i found the local file was bot up t dat with the github repo. updating took several efforts before i tried to drop the -g option. ([408b72f](https://www.github.com/nodejs/node-gyp/commit/408b72f561329408daeb17834436e381406efcc8))
|
||||
* **wiki:** If permissions error, please try and then the command. ([ee8e1c1](https://www.github.com/nodejs/node-gyp/commit/ee8e1c1e5334096d58e0d6bca6c006f2ee9c88cb))
|
||||
* **wiki:** Improve Unix instructions ([c3e5487](https://www.github.com/nodejs/node-gyp/commit/c3e548736645b535ea5bce613d74ca3e98598243))
|
||||
* **wiki:** link to docs/ from README ([b52e487](https://www.github.com/nodejs/node-gyp/commit/b52e487eac1eb421573d1e67114a242eeff45a00))
|
||||
* **wiki:** Lower case L ([3aa2c6b](https://www.github.com/nodejs/node-gyp/commit/3aa2c6bdb07971b87505e32e32548d75264bd19f))
|
||||
* **wiki:** Make changes discussed in https://github.com/nodejs/node-gyp/issues/2416 ([1dcad87](https://www.github.com/nodejs/node-gyp/commit/1dcad873539027511a5f0243baf770ea90f6f4e2))
|
||||
* **wiki:** move wiki docs into doc/ ([f0a4835](https://www.github.com/nodejs/node-gyp/commit/f0a48355d86534ec3bdabcdb3ce3340fa2e17f39))
|
||||
* **wiki:** node-sass in the wild ([d310a73](https://www.github.com/nodejs/node-gyp/commit/d310a73d64d0065050377baac7047472f7424a1b))
|
||||
* **wiki:** node-srs was a 404 ([bbca21a](https://www.github.com/nodejs/node-gyp/commit/bbca21a1e1ede4c473aff365ca71989a5bda7b57))
|
||||
* **wiki:** Note: VS2010 seems to be no longer available! VS2013 or nothing! ([7b5dcaf](https://www.github.com/nodejs/node-gyp/commit/7b5dcafafccdceae4b8f2b53ac9081a694b6ade8))
|
||||
* **wiki:** safer doc names, remove unnecessary TypedArray doc ([161c235](https://www.github.com/nodejs/node-gyp/commit/161c2353ef5b562f4acfb2fd77608fcbd0800fc0))
|
||||
* **wiki:** sorry, forgot to mention a specific windows version. ([d69dffc](https://www.github.com/nodejs/node-gyp/commit/d69dffc16c2b1e3c60dcb5d1c35a49270ba22a35))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([7444b47](https://www.github.com/nodejs/node-gyp/commit/7444b47a7caac1e14d1da474a7fcfcf88d328017))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d766b74](https://www.github.com/nodejs/node-gyp/commit/d766b7427851e6c2edc02e2504a7be9be7e330c0))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d319b0e](https://www.github.com/nodejs/node-gyp/commit/d319b0e98c7085de8e51bc5595eba4264b99a7d5))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3c6692d](https://www.github.com/nodejs/node-gyp/commit/3c6692d538f0ce973869aa237118b7d2483feccd))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([93392d5](https://www.github.com/nodejs/node-gyp/commit/93392d559ce6f250b9c7fe8177e6c88603809dc1))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([8841158](https://www.github.com/nodejs/node-gyp/commit/88411588f300e9b7c00fe516ecd977a1feeeb15c))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([81bfa1f](https://www.github.com/nodejs/node-gyp/commit/81bfa1f1b63d522a9f8a9ae9ca0c7ae90fe75140))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d1cd237](https://www.github.com/nodejs/node-gyp/commit/d1cd237bad06fa507adb354b9e2181a14dc63d24))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3de9e17](https://www.github.com/nodejs/node-gyp/commit/3de9e17e0b8a387eafe7bd18d0ec1e3191d118e8))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([a9b7096](https://www.github.com/nodejs/node-gyp/commit/a9b70968fb956eab3b95672048b94350e1565ca3))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3236069](https://www.github.com/nodejs/node-gyp/commit/3236069689e7e0eb15b324fce74ab58158956f98))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([1462755](https://www.github.com/nodejs/node-gyp/commit/14627556966e5d513bdb8e5208f0e1300f68991f))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([7ab1337](https://www.github.com/nodejs/node-gyp/commit/7ab133752a6c402bb96dcd3d671d73e03e9487ad))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([640895d](https://www.github.com/nodejs/node-gyp/commit/640895d36b7448c646a3b850c1e159106f83c724))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([ced8c96](https://www.github.com/nodejs/node-gyp/commit/ced8c968457f285ab8989c291d28173d7730833c))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([27b883a](https://www.github.com/nodejs/node-gyp/commit/27b883a350ad0db6b9130d7b996f35855ec34c7a))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d29fb13](https://www.github.com/nodejs/node-gyp/commit/d29fb134f1c4b9dd729ba95f2979e69e0934809f))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([2765891](https://www.github.com/nodejs/node-gyp/commit/27658913e6220cf0371b4b73e25a0e4ab11108a1))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([dc97766](https://www.github.com/nodejs/node-gyp/commit/dc9776648d432bca6775c176641f16da14522d4c))
|
||||
* **wiki:** Updated Error: "pre" versions of node cannot be installed (markdown) ([e9f8b33](https://www.github.com/nodejs/node-gyp/commit/e9f8b33d1f87d04f22cb09a814d7c55d0fa38446))
|
||||
* **wiki:** Updated Home (markdown) ([3407109](https://www.github.com/nodejs/node-gyp/commit/3407109325cf7ba1e925656b9eb75feffab0557c))
|
||||
* **wiki:** Updated Home (markdown) ([6e392bc](https://www.github.com/nodejs/node-gyp/commit/6e392bcdd3dd1691773e6e16e1dffc35931b81e0))
|
||||
* **wiki:** Updated Home (markdown) ([65efe32](https://www.github.com/nodejs/node-gyp/commit/65efe32ccb8d446ce569453364f922dd9d27c945))
|
||||
* **wiki:** Updated Home (markdown) ([ea28f09](https://www.github.com/nodejs/node-gyp/commit/ea28f0947af91fa638be355143f5df89d2e431c8))
|
||||
* **wiki:** Updated Home (markdown) ([0e37ff4](https://www.github.com/nodejs/node-gyp/commit/0e37ff48b306c12149661b375895741d3d710da7))
|
||||
* **wiki:** Updated Home (markdown) ([b398ef4](https://www.github.com/nodejs/node-gyp/commit/b398ef46f660d2b1506508550dadfb4c35639e4b))
|
||||
* **wiki:** Updated Linking to OpenSSL (markdown) ([8919028](https://www.github.com/nodejs/node-gyp/commit/8919028921fd304f08044098434f0dc6071fb7cf))
|
||||
* **wiki:** Updated Linking to OpenSSL (markdown) ([c00eb77](https://www.github.com/nodejs/node-gyp/commit/c00eb778fc7dc27e4dab3a9219035ea20458b33b))
|
||||
* **wiki:** Updated node-levelup to node-leveldown (broken links) ([59668bb](https://www.github.com/nodejs/node-gyp/commit/59668bb0b904feccf3c09afa2fd37378c77af967))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([d314854](https://www.github.com/nodejs/node-gyp/commit/d31485415ef69d46effa6090c95698341965de1b))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([11858b0](https://www.github.com/nodejs/node-gyp/commit/11858b0655d1eee00c62ad628e719d4378803d14))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([33561e9](https://www.github.com/nodejs/node-gyp/commit/33561e9cbf5f4eb46111318503c77df2c6eb484a))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([4a7f2d0](https://www.github.com/nodejs/node-gyp/commit/4a7f2d0d869a65c99a78504976567017edadf657))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([979a706](https://www.github.com/nodejs/node-gyp/commit/979a7063b950c088a7f4896fc3a48e1d00dfd231))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([e50e04d](https://www.github.com/nodejs/node-gyp/commit/e50e04d7b6a3754ea0aa11fe8cef491b3bc5bdd4))
|
||||
|
||||
## [8.1.0](https://www.github.com/nodejs/node-gyp/compare/v8.0.0...v8.1.0) (2021-05-28)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.9.1 ([#2402](https://www.github.com/nodejs/node-gyp/issues/2402)) ([814b1b0](https://www.github.com/nodejs/node-gyp/commit/814b1b0eda102afb9fc87e81638a9cf5b650bb10))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* add `release-please-action` for automated releases ([#2395](https://www.github.com/nodejs/node-gyp/issues/2395)) ([07e9d7c](https://www.github.com/nodejs/node-gyp/commit/07e9d7c7ee80ba119ea760c635f72fd8e7efe198))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* fail gracefully if we can't find the username ([#2375](https://www.github.com/nodejs/node-gyp/issues/2375)) ([fca4795](https://www.github.com/nodejs/node-gyp/commit/fca4795512c67dc8420aaa0d913b5b89a4b147f3))
|
||||
* log as yes/no whether build dir was created ([#2370](https://www.github.com/nodejs/node-gyp/issues/2370)) ([245dee5](https://www.github.com/nodejs/node-gyp/commit/245dee5b62581309946872ae253226ea3a42c0e3))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* fix v8.0.0 release date ([4b83c3d](https://www.github.com/nodejs/node-gyp/commit/4b83c3de7300457919d53f26d96ea9ad6f6bedd8))
|
||||
* remove redundant version info ([#2403](https://www.github.com/nodejs/node-gyp/issues/2403)) ([1423670](https://www.github.com/nodejs/node-gyp/commit/14236709de64b100a424396b91a5115639daa0ef))
|
||||
* Update README.md Visual Studio Community page polski to auto ([#2371](https://www.github.com/nodejs/node-gyp/issues/2371)) ([1b4697a](https://www.github.com/nodejs/node-gyp/commit/1b4697abf69ef574a48faf832a7098f4c6c224a5))
|
||||
|
||||
## v8.0.0 2021-04-03
|
||||
|
||||
* [[`0d8a6f1b19`](https://github.com/nodejs/node-gyp/commit/0d8a6f1b19)] - **ci**: update actions/setup-node to v2 (#2302) (Sora Morimoto) [#2302](https://github.com/nodejs/node-gyp/pull/2302)
|
||||
* [[`15a5c7d45b`](https://github.com/nodejs/node-gyp/commit/15a5c7d45b)] - **ci**: migrate deprecated grammar (#2285) (Jiawen Geng) [#2285](https://github.com/nodejs/node-gyp/pull/2285)
|
||||
* [[`06ddde27f9`](https://github.com/nodejs/node-gyp/commit/06ddde27f9)] - **deps**: sync mutual dependencies with npm (DeeDeeG) [#2348](https://github.com/nodejs/node-gyp/pull/2348)
|
||||
* [[`a5fd1f41e3`](https://github.com/nodejs/node-gyp/commit/a5fd1f41e3)] - **doc**: add downloads badge (#2352) (Jiawen Geng) [#2352](https://github.com/nodejs/node-gyp/pull/2352)
|
||||
* [[`cc1cbce056`](https://github.com/nodejs/node-gyp/commit/cc1cbce056)] - **doc**: update macOS\_Catalina.md (#2293) (iMrLopez) [#2293](https://github.com/nodejs/node-gyp/pull/2293)
|
||||
* [[`6287118fc4`](https://github.com/nodejs/node-gyp/commit/6287118fc4)] - **doc**: updated README.md to copy easily (#2281) (மனோஜ்குமார் பழனிச்சாமி) [#2281](https://github.com/nodejs/node-gyp/pull/2281)
|
||||
* [[`66c0f04467`](https://github.com/nodejs/node-gyp/commit/66c0f04467)] - **doc**: add missing `sudo` to Catalina doc (Karl Horky) [#2244](https://github.com/nodejs/node-gyp/pull/2244)
|
||||
* [[`0da2e0140d`](https://github.com/nodejs/node-gyp/commit/0da2e0140d)] - **gyp**: update gyp to v0.8.1 (#2355) (DeeDeeG) [#2355](https://github.com/nodejs/node-gyp/pull/2355)
|
||||
* [[`0093ec8646`](https://github.com/nodejs/node-gyp/commit/0093ec8646)] - **gyp**: Improve our flake8 linting tests (Christian Clauss) [#2356](https://github.com/nodejs/node-gyp/pull/2356)
|
||||
* [[`a78b584236`](https://github.com/nodejs/node-gyp/commit/a78b584236)] - **(SEMVER-MAJOR)** **gyp**: remove support for Python 2 (#2300) (Christian Clauss) [#2300](https://github.com/nodejs/node-gyp/pull/2300)
|
||||
* [[`c3c510d89e`](https://github.com/nodejs/node-gyp/commit/c3c510d89e)] - **gyp**: update gyp to v0.8.0 (#2318) (Christian Clauss) [#2318](https://github.com/nodejs/node-gyp/pull/2318)
|
||||
* [[`9e1397c52e`](https://github.com/nodejs/node-gyp/commit/9e1397c52e)] - **(SEMVER-MAJOR)** **gyp**: update gyp to v0.7.0 (#2284) (Jiawen Geng) [#2284](https://github.com/nodejs/node-gyp/pull/2284)
|
||||
* [[`1bd18f3e77`](https://github.com/nodejs/node-gyp/commit/1bd18f3e77)] - **(SEMVER-MAJOR)** **lib**: drop Python 2 support in find-python.js (#2333) (DeeDeeG) [#2333](https://github.com/nodejs/node-gyp/pull/2333)
|
||||
* [[`e81602ef55`](https://github.com/nodejs/node-gyp/commit/e81602ef55)] - **(SEMVER-MAJOR)** **lib**: migrate requests to fetch (#2220) (Matias Lopez) [#2220](https://github.com/nodejs/node-gyp/pull/2220)
|
||||
* [[`392b7760b4`](https://github.com/nodejs/node-gyp/commit/392b7760b4)] - **lib**: avoid changing process.config (#2322) (Michaël Zasso) [#2322](https://github.com/nodejs/node-gyp/pull/2322)
|
||||
|
||||
## v7.1.2 2020-10-17
|
||||
|
||||
* [[`096e3aded5`](https://github.com/nodejs/node-gyp/commit/096e3aded5)] - **gyp**: update gyp to 0.6.2 (Myles Borins) [#2241](https://github.com/nodejs/node-gyp/pull/2241)
|
||||
* [[`54f97cd243`](https://github.com/nodejs/node-gyp/commit/54f97cd243)] - **doc**: add cmd to reset `xcode-select` to initial state (Valera Rozuvan) [#2235](https://github.com/nodejs/node-gyp/pull/2235)
|
||||
|
||||
## v7.1.1 2020-10-15
|
||||
|
||||
This release restores the location of shared library builds to the pre-v7
|
||||
location. In v7.0.0 until this release, shared library outputs were placed
|
||||
in a lib.target subdirectory inside the build/{Release,Debug} directory for
|
||||
builds using `make` (Linux, etc.). This is inconsistent with macOS (Xcode)
|
||||
behavior and previous node-gyp behavior so has been reverted.
|
||||
We consider this a bug-fix rather than semver-major change.
|
||||
|
||||
* [[`18bf2d1d38`](https://github.com/nodejs/node-gyp/commit/18bf2d1d38)] - **deps**: update deps to match npm@7 (Rod Vagg) [#2240](https://github.com/nodejs/node-gyp/pull/2240)
|
||||
* [[`ee6a837cb7`](https://github.com/nodejs/node-gyp/commit/ee6a837cb7)] - **gyp**: update gyp to 0.6.1 (Rod Vagg) [#2238](https://github.com/nodejs/node-gyp/pull/2238)
|
||||
* [[`3e7f8ccafc`](https://github.com/nodejs/node-gyp/commit/3e7f8ccafc)] - **lib**: better log message when ps fails (Martin Midtgaard) [#2229](https://github.com/nodejs/node-gyp/pull/2229)
|
||||
* [[`7fb314339f`](https://github.com/nodejs/node-gyp/commit/7fb314339f)] - **test**: GitHub Actions: Test on Python 3.9 (Christian Clauss) [#2230](https://github.com/nodejs/node-gyp/pull/2230)
|
||||
* [[`754996b9ec`](https://github.com/nodejs/node-gyp/commit/754996b9ec)] - **doc**: replace status badges with new Actions badge (Rod Vagg) [#2218](https://github.com/nodejs/node-gyp/pull/2218)
|
||||
* [[`2317dc400c`](https://github.com/nodejs/node-gyp/commit/2317dc400c)] - **ci**: switch to GitHub Actions (Shelley Vohr) [#2210](https://github.com/nodejs/node-gyp/pull/2210)
|
||||
* [[`2cca9b74f7`](https://github.com/nodejs/node-gyp/commit/2cca9b74f7)] - **doc**: drop the --production flag for installing windows-build-tools (DeeDeeG) [#2206](https://github.com/nodejs/node-gyp/pull/2206)
|
||||
|
||||
## v7.1.0 2020-08-12
|
||||
|
||||
* [[`aaf33c3029`](https://github.com/nodejs/node-gyp/commit/aaf33c3029)] - **build**: add update-gyp script (Samuel Attard) [#2167](https://github.com/nodejs/node-gyp/pull/2167)
|
||||
* * [[`3baa4e4172`](https://github.com/nodejs/node-gyp/commit/3baa4e4172)] - **(SEMVER-MINOR)** **gyp**: update gyp to 0.4.0 (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165)
|
||||
* * [[`f461d56c53`](https://github.com/nodejs/node-gyp/commit/f461d56c53)] - **(SEMVER-MINOR)** **build**: support apple silicon (arm64 darwin) builds (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165)
|
||||
* * [[`ee6fa7d3bc`](https://github.com/nodejs/node-gyp/commit/ee6fa7d3bc)] - **docs**: note that node-gyp@7 should solve Catalina CLT issues (Rod Vagg) [#2156](https://github.com/nodejs/node-gyp/pull/2156)
|
||||
* * [[`4fc8ff179d`](https://github.com/nodejs/node-gyp/commit/4fc8ff179d)] - **doc**: silence curl for macOS Catalina acid test (Chia Wei Ong) [#2150](https://github.com/nodejs/node-gyp/pull/2150)
|
||||
* * [[`7857cb2eb1`](https://github.com/nodejs/node-gyp/commit/7857cb2eb1)] - **deps**: increase "engines" to "node" : "\>= 10.12.0" (DeeDeeG) [#2153](https://github.com/nodejs/node-gyp/pull/2153)
|
||||
|
||||
## v7.0.0 2020-06-03
|
||||
|
||||
* [[`e18a61afc1`](https://github.com/nodejs/node-gyp/commit/e18a61afc1)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060)
|
||||
* [[`4937722cf5`](https://github.com/nodejs/node-gyp/commit/4937722cf5)] - **(SEMVER-MAJOR)** **deps**: replace mkdirp with {recursive} mkdir (Rod Vagg) [#2123](https://github.com/nodejs/node-gyp/pull/2123)
|
||||
* [[`d45438a047`](https://github.com/nodejs/node-gyp/commit/d45438a047)] - **(SEMVER-MAJOR)** **deps**: update deps, match to npm@7 (Rod Vagg) [#2126](https://github.com/nodejs/node-gyp/pull/2126)
|
||||
* [[`ba4f34b7d6`](https://github.com/nodejs/node-gyp/commit/ba4f34b7d6)] - **doc**: update catalina xcode clt download link (Dario Vladovic) [#2133](https://github.com/nodejs/node-gyp/pull/2133)
|
||||
* [[`f7bfce96ed`](https://github.com/nodejs/node-gyp/commit/f7bfce96ed)] - **doc**: update acid test and introduce curl|bash test script (Dario Vladovic) [#2105](https://github.com/nodejs/node-gyp/pull/2105)
|
||||
* [[`e529f3309d`](https://github.com/nodejs/node-gyp/commit/e529f3309d)] - **doc**: update README to reflect upgrade to gyp-next (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092)
|
||||
* [[`9aed6286a3`](https://github.com/nodejs/node-gyp/commit/9aed6286a3)] - **doc**: give more attention to Catalina issues doc (Matheus Marchini) [#2134](https://github.com/nodejs/node-gyp/pull/2134)
|
||||
* [[`963f2a7b48`](https://github.com/nodejs/node-gyp/commit/963f2a7b48)] - **doc**: improve Catalina discoverability for search engines (Matheus Marchini) [#2135](https://github.com/nodejs/node-gyp/pull/2135)
|
||||
* [[`7b75af349b`](https://github.com/nodejs/node-gyp/commit/7b75af349b)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078)
|
||||
* [[`4f23c7bee2`](https://github.com/nodejs/node-gyp/commit/4f23c7bee2)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073)
|
||||
* [[`473cfa283f`](https://github.com/nodejs/node-gyp/commit/473cfa283f)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072)
|
||||
* [[`e7402b4a7c`](https://github.com/nodejs/node-gyp/commit/e7402b4a7c)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044)
|
||||
* [[`35de45984f`](https://github.com/nodejs/node-gyp/commit/35de45984f)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034)
|
||||
* [[`48642191f5`](https://github.com/nodejs/node-gyp/commit/48642191f5)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029)
|
||||
* [[`ae5b150051`](https://github.com/nodejs/node-gyp/commit/ae5b150051)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022)
|
||||
* [[`d1dea13fe4`](https://github.com/nodejs/node-gyp/commit/d1dea13fe4)] - **doc**: fix changelog 6.1.0 release year to be 2020 (Quentin Vernot) [#2021](https://github.com/nodejs/node-gyp/pull/2021)
|
||||
* [[`6356117b08`](https://github.com/nodejs/node-gyp/commit/6356117b08)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096)
|
||||
* [[`a6b76a8b48`](https://github.com/nodejs/node-gyp/commit/a6b76a8b48)] - **gyp**: update gyp to 0.2.1 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092)
|
||||
* [[`ebc34ec823`](https://github.com/nodejs/node-gyp/commit/ebc34ec823)] - **gyp**: update gyp to 0.2.0 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092)
|
||||
* [[`972780bde7`](https://github.com/nodejs/node-gyp/commit/972780bde7)] - **(SEMVER-MAJOR)** **gyp**: sync code base with nodejs repo (#1975) (Michaël Zasso) [#1975](https://github.com/nodejs/node-gyp/pull/1975)
|
||||
* [[`c255ffbf6a`](https://github.com/nodejs/node-gyp/commit/c255ffbf6a)] - **lib**: drop "-2" flag for "py.exe" launcher (DeeDeeG) [#2131](https://github.com/nodejs/node-gyp/pull/2131)
|
||||
* [[`1f7e1e93b5`](https://github.com/nodejs/node-gyp/commit/1f7e1e93b5)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018)
|
||||
* [[`741ab096d5`](https://github.com/nodejs/node-gyp/commit/741ab096d5)] - **test**: remove support for EOL versions of Node.js (Shelley Vohr)
|
||||
* [[`ca86ef2539`](https://github.com/nodejs/node-gyp/commit/ca86ef2539)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063)
|
||||
|
||||
## v6.1.0 2020-01-08
|
||||
|
||||
* [[`9a7dd16b76`](https://github.com/nodejs/node-gyp/commit/9a7dd16b76)] - **doc**: remove backticks from Python version list (Rod Vagg) [#2011](https://github.com/nodejs/node-gyp/pull/2011)
|
||||
* [[`26cd6eaea6`](https://github.com/nodejs/node-gyp/commit/26cd6eaea6)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994)
|
||||
* [[`312c12ef4f`](https://github.com/nodejs/node-gyp/commit/312c12ef4f)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992)
|
||||
* [[`f7b6b6b77b`](https://github.com/nodejs/node-gyp/commit/f7b6b6b77b)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
* [[`6b8f2652dd`](https://github.com/nodejs/node-gyp/commit/6b8f2652dd)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971)
|
||||
* [[`20aa0b44f7`](https://github.com/nodejs/node-gyp/commit/20aa0b44f7)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962)
|
||||
* [[`14f2a07a39`](https://github.com/nodejs/node-gyp/commit/14f2a07a39)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009)
|
||||
* [[`f242ce4d2c`](https://github.com/nodejs/node-gyp/commit/f242ce4d2c)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006)
|
||||
* [[`3bcba2a01a`](https://github.com/nodejs/node-gyp/commit/3bcba2a01a)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978)
|
||||
* [[`470cc2178e`](https://github.com/nodejs/node-gyp/commit/470cc2178e)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993)
|
||||
* [[`31ecc8421d`](https://github.com/nodejs/node-gyp/commit/31ecc8421d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996)
|
||||
* [[`5a729e86ee`](https://github.com/nodejs/node-gyp/commit/5a729e86ee)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001)
|
||||
* [[`345c70e56d`](https://github.com/nodejs/node-gyp/commit/345c70e56d)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`d6a7e0e1fb`](https://github.com/nodejs/node-gyp/commit/d6a7e0e1fb)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`5a64e9bd32`](https://github.com/nodejs/node-gyp/commit/5a64e9bd32)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
* [[`04da736d38`](https://github.com/nodejs/node-gyp/commit/04da736d38)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961)
|
||||
* [[`0670e5189d`](https://github.com/nodejs/node-gyp/commit/0670e5189d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
* [[`c506a6a150`](https://github.com/nodejs/node-gyp/commit/c506a6a150)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
|
||||
## v6.0.1 2019-11-01
|
||||
|
||||
* [[`8ec2e681d5`](https://github.com/nodejs/node-gyp/commit/8ec2e681d5)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940)
|
||||
* [[`1b11be63cc`](https://github.com/nodejs/node-gyp/commit/1b11be63cc)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925)
|
||||
* [[`c0282daa48`](https://github.com/nodejs/node-gyp/commit/c0282daa48)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947)
|
||||
* [[`d8e09a1b6a`](https://github.com/nodejs/node-gyp/commit/d8e09a1b6a)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944)
|
||||
* [[`9c0f3404f0`](https://github.com/nodejs/node-gyp/commit/9c0f3404f0)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939)
|
||||
* [[`bb2eb72a3f`](https://github.com/nodejs/node-gyp/commit/bb2eb72a3f)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937)
|
||||
* [[`f0693413d9`](https://github.com/nodejs/node-gyp/commit/f0693413d9)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934)
|
||||
* [[`c60c22de58`](https://github.com/nodejs/node-gyp/commit/c60c22de58)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920)
|
||||
* [[`b91718eefc`](https://github.com/nodejs/node-gyp/commit/b91718eefc)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923)
|
||||
* [[`3538a317b6`](https://github.com/nodejs/node-gyp/commit/3538a317b6)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919)
|
||||
* [[`4fff8458c0`](https://github.com/nodejs/node-gyp/commit/4fff8458c0)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`60e4488f08`](https://github.com/nodejs/node-gyp/commit/60e4488f08)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`032db2a2d0`](https://github.com/nodejs/node-gyp/commit/032db2a2d0)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926)
|
||||
* [[`5a83630c33`](https://github.com/nodejs/node-gyp/commit/5a83630c33)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921)
|
||||
|
||||
## v6.0.0 2019-10-04
|
||||
|
||||
* [[`dd0e97ef0b`](https://github.com/nodejs/node-gyp/commit/dd0e97ef0b)] - **(SEMVER-MAJOR)** **lib**: try to find `python` after `python3` (Sam Roberts) [#1907](https://github.com/nodejs/node-gyp/pull/1907)
|
||||
* [[`f60ed47d14`](https://github.com/nodejs/node-gyp/commit/f60ed47d14)] - **travis**: add Python 3.5 and 3.6 tests on Linux (Christian Clauss) [#1903](https://github.com/nodejs/node-gyp/pull/1903)
|
||||
* [[`c763ca1838`](https://github.com/nodejs/node-gyp/commit/c763ca1838)] - **(SEMVER-MAJOR)** **doc**: Declare that node-gyp is Python 3 compatible (cclauss) [#1811](https://github.com/nodejs/node-gyp/pull/1811)
|
||||
* [[`3d1c60ab81`](https://github.com/nodejs/node-gyp/commit/3d1c60ab81)] - **(SEMVER-MAJOR)** **lib**: accept Python 3 by default (João Reis) [#1844](https://github.com/nodejs/node-gyp/pull/1844)
|
||||
* [[`c6e3b65a23`](https://github.com/nodejs/node-gyp/commit/c6e3b65a23)] - **(SEMVER-MAJOR)** **lib**: raise the minimum Python version from 2.6 to 2.7 (cclauss) [#1818](https://github.com/nodejs/node-gyp/pull/1818)
|
||||
|
||||
## v5.1.1 2020-05-25
|
||||
|
||||
* [[`bdd3a79abe`](https://github.com/nodejs/node-gyp/commit/bdd3a79abe)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060)
|
||||
* [[`1f2ba75bc0`](https://github.com/nodejs/node-gyp/commit/1f2ba75bc0)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078)
|
||||
* [[`c106d915f5`](https://github.com/nodejs/node-gyp/commit/c106d915f5)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044)
|
||||
* [[`9a6fea92e2`](https://github.com/nodejs/node-gyp/commit/9a6fea92e2)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034)
|
||||
* [[`59b0b1add8`](https://github.com/nodejs/node-gyp/commit/59b0b1add8)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029)
|
||||
* [[`bb8d0e7b10`](https://github.com/nodejs/node-gyp/commit/bb8d0e7b10)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022)
|
||||
* [[`fb2e80d4e3`](https://github.com/nodejs/node-gyp/commit/fb2e80d4e3)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073)
|
||||
* [[`251d9c885c`](https://github.com/nodejs/node-gyp/commit/251d9c885c)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072)
|
||||
* [[`2b6fc3c8d6`](https://github.com/nodejs/node-gyp/commit/2b6fc3c8d6)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096)
|
||||
* [[`a876ae58ad`](https://github.com/nodejs/node-gyp/commit/a876ae58ad)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063)
|
||||
|
||||
## v5.1.0 2020-02-05
|
||||
|
||||
* [[`f37a8b40d0`](https://github.com/nodejs/node-gyp/commit/f37a8b40d0)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994)
|
||||
* [[`cb3f6aae5e`](https://github.com/nodejs/node-gyp/commit/cb3f6aae5e)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992)
|
||||
* [[`0607596a4c`](https://github.com/nodejs/node-gyp/commit/0607596a4c)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
* [[`0d5a415a14`](https://github.com/nodejs/node-gyp/commit/0d5a415a14)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971)
|
||||
* [[`103740cd95`](https://github.com/nodejs/node-gyp/commit/103740cd95)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009)
|
||||
* [[`278dcddbdd`](https://github.com/nodejs/node-gyp/commit/278dcddbdd)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018)
|
||||
* [[`1694907bbf`](https://github.com/nodejs/node-gyp/commit/1694907bbf)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006)
|
||||
* [[`a3f1143514`](https://github.com/nodejs/node-gyp/commit/a3f1143514)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978)
|
||||
* [[`52365819c7`](https://github.com/nodejs/node-gyp/commit/52365819c7)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993)
|
||||
* [[`bc509c511d`](https://github.com/nodejs/node-gyp/commit/bc509c511d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996)
|
||||
* [[`91ee26dd48`](https://github.com/nodejs/node-gyp/commit/91ee26dd48)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001)
|
||||
* [[`0923f344c9`](https://github.com/nodejs/node-gyp/commit/0923f344c9)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`32c8744b34`](https://github.com/nodejs/node-gyp/commit/32c8744b34)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`fd4b1351e4`](https://github.com/nodejs/node-gyp/commit/fd4b1351e4)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
|
||||
## v5.0.7 2019-12-16
|
||||
|
||||
Republish of v5.0.6 with unnecessary tarball removed from pack file.
|
||||
|
||||
## v5.0.6 2019-12-16
|
||||
|
||||
* [[`cdec00286f`](https://github.com/nodejs/node-gyp/commit/cdec00286f)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919)
|
||||
* [[`b7c8233ef2`](https://github.com/nodejs/node-gyp/commit/b7c8233ef2)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961)
|
||||
* [[`e12b00ab0a`](https://github.com/nodejs/node-gyp/commit/e12b00ab0a)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962)
|
||||
* [[`70b9890c0d`](https://github.com/nodejs/node-gyp/commit/70b9890c0d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
* [[`4029fa8629`](https://github.com/nodejs/node-gyp/commit/4029fa8629)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
* [[`fe8b02cc8b`](https://github.com/nodejs/node-gyp/commit/fe8b02cc8b)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940)
|
||||
* [[`8ea47ce365`](https://github.com/nodejs/node-gyp/commit/8ea47ce365)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925)
|
||||
* [[`c7229716ba`](https://github.com/nodejs/node-gyp/commit/c7229716ba)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947)
|
||||
* [[`2a18b2a0f8`](https://github.com/nodejs/node-gyp/commit/2a18b2a0f8)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944)
|
||||
* [[`70f391e844`](https://github.com/nodejs/node-gyp/commit/70f391e844)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939)
|
||||
* [[`9f4f0fa34e`](https://github.com/nodejs/node-gyp/commit/9f4f0fa34e)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937)
|
||||
* [[`7cf507906d`](https://github.com/nodejs/node-gyp/commit/7cf507906d)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934)
|
||||
* [[`ad0d182c01`](https://github.com/nodejs/node-gyp/commit/ad0d182c01)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920)
|
||||
* [[`1553081ed6`](https://github.com/nodejs/node-gyp/commit/1553081ed6)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923)
|
||||
* [[`0705cae9aa`](https://github.com/nodejs/node-gyp/commit/0705cae9aa)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`7bfdb6f5bf`](https://github.com/nodejs/node-gyp/commit/7bfdb6f5bf)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`7edf7658fa`](https://github.com/nodejs/node-gyp/commit/7edf7658fa)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926)
|
||||
* [[`69056d04fe`](https://github.com/nodejs/node-gyp/commit/69056d04fe)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921)
|
||||
|
||||
## v5.0.5 2019-10-04
|
||||
|
||||
* [[`3891391746`](https://github.com/nodejs/node-gyp/commit/3891391746)] - **doc**: reconcile README with Python 3 compat changes (Rod Vagg) [#1911](https://github.com/nodejs/node-gyp/pull/1911)
|
||||
* [[`07f81f1920`](https://github.com/nodejs/node-gyp/commit/07f81f1920)] - **lib**: accept Python 3 after Python 2 (Sam Roberts) [#1910](https://github.com/nodejs/node-gyp/pull/1910)
|
||||
* [[`04ce59f4a2`](https://github.com/nodejs/node-gyp/commit/04ce59f4a2)] - **doc**: clarify Python configuration, etc (Sam Roberts) [#1908](https://github.com/nodejs/node-gyp/pull/1908)
|
||||
* [[`01c46ee3df`](https://github.com/nodejs/node-gyp/commit/01c46ee3df)] - **gyp**: add \_\_lt\_\_ to MSVSSolutionEntry (João Reis) [#1904](https://github.com/nodejs/node-gyp/pull/1904)
|
||||
* [[`735d961b99`](https://github.com/nodejs/node-gyp/commit/735d961b99)] - **win**: support VS 2017 Desktop Express (João Reis) [#1902](https://github.com/nodejs/node-gyp/pull/1902)
|
||||
* [[`3834156a92`](https://github.com/nodejs/node-gyp/commit/3834156a92)] - **test**: add Python 3.5 and 3.6 tests on Linux (cclauss) [#1909](https://github.com/nodejs/node-gyp/pull/1909)
|
||||
* [[`1196e990d8`](https://github.com/nodejs/node-gyp/commit/1196e990d8)] - **src**: update to standard@14 (Rod Vagg) [#1899](https://github.com/nodejs/node-gyp/pull/1899)
|
||||
* [[`53ee7dfe89`](https://github.com/nodejs/node-gyp/commit/53ee7dfe89)] - **gyp**: fix undefined name: cflags --\> ldflags (Christian Clauss) [#1901](https://github.com/nodejs/node-gyp/pull/1901)
|
||||
* [[`5871dcf6c9`](https://github.com/nodejs/node-gyp/commit/5871dcf6c9)] - **src,win**: add support for fetching arm64 node.lib (Richard Townsend) [#1875](https://github.com/nodejs/node-gyp/pull/1875)
|
||||
|
||||
## v5.0.4 2019-09-27
|
||||
|
||||
* [[`1236869ffc`](https://github.com/nodejs/node-gyp/commit/1236869ffc)] - **gyp**: modify XcodeVersion() to convert "4.2" to "0420" and "10.0" to "1000" (Christian Clauss) [#1895](https://github.com/nodejs/node-gyp/pull/1895)
|
||||
* [[`36638afe48`](https://github.com/nodejs/node-gyp/commit/36638afe48)] - **gyp**: more decode stdout on Python 3 (cclauss) [#1894](https://github.com/nodejs/node-gyp/pull/1894)
|
||||
* [[`f753c167c5`](https://github.com/nodejs/node-gyp/commit/f753c167c5)] - **gyp**: decode stdout on Python 3 (cclauss) [#1890](https://github.com/nodejs/node-gyp/pull/1890)
|
||||
* [[`60a4083523`](https://github.com/nodejs/node-gyp/commit/60a4083523)] - **doc**: update xcode install instructions to match Node's BUILDING (Nhan Khong) [#1884](https://github.com/nodejs/node-gyp/pull/1884)
|
||||
* [[`19dbc9ac32`](https://github.com/nodejs/node-gyp/commit/19dbc9ac32)] - **deps**: update tar to 4.4.12 (Matheus Marchini) [#1889](https://github.com/nodejs/node-gyp/pull/1889)
|
||||
* [[`5f3ed92181`](https://github.com/nodejs/node-gyp/commit/5f3ed92181)] - **bin**: fix the usage instructions (Halit Ogunc) [#1888](https://github.com/nodejs/node-gyp/pull/1888)
|
||||
* [[`aab118edf1`](https://github.com/nodejs/node-gyp/commit/aab118edf1)] - **lib**: adding keep-alive header to download requests (Milad Farazmand) [#1863](https://github.com/nodejs/node-gyp/pull/1863)
|
||||
* [[`1186e89326`](https://github.com/nodejs/node-gyp/commit/1186e89326)] - **lib**: ignore non-critical os.userInfo() failures (Rod Vagg) [#1835](https://github.com/nodejs/node-gyp/pull/1835)
|
||||
* [[`785e527c3d`](https://github.com/nodejs/node-gyp/commit/785e527c3d)] - **doc**: fix missing argument for setting python path (lagorsse) [#1802](https://github.com/nodejs/node-gyp/pull/1802)
|
||||
* [[`a97615196c`](https://github.com/nodejs/node-gyp/commit/a97615196c)] - **gyp**: rm semicolons (Python != JavaScript) (MattIPv4) [#1858](https://github.com/nodejs/node-gyp/pull/1858)
|
||||
* [[`06019bac24`](https://github.com/nodejs/node-gyp/commit/06019bac24)] - **gyp**: assorted typo fixes (XhmikosR) [#1853](https://github.com/nodejs/node-gyp/pull/1853)
|
||||
* [[`3f4972c1ca`](https://github.com/nodejs/node-gyp/commit/3f4972c1ca)] - **gyp**: use "is" when comparing to None (Vladyslav Burzakovskyy) [#1860](https://github.com/nodejs/node-gyp/pull/1860)
|
||||
* [[`1cb4708073`](https://github.com/nodejs/node-gyp/commit/1cb4708073)] - **src,win**: improve unmanaged handling (Peter Sabath) [#1852](https://github.com/nodejs/node-gyp/pull/1852)
|
||||
* [[`5553cd910e`](https://github.com/nodejs/node-gyp/commit/5553cd910e)] - **gyp**: improve Windows+Cygwin compatibility (Jose Quijada) [#1817](https://github.com/nodejs/node-gyp/pull/1817)
|
||||
* [[`8bcb1fbb43`](https://github.com/nodejs/node-gyp/commit/8bcb1fbb43)] - **gyp**: Python 3 Windows fixes (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843)
|
||||
* [[`2e24d0a326`](https://github.com/nodejs/node-gyp/commit/2e24d0a326)] - **test**: accept Python 3 in test-find-python.js (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843)
|
||||
* [[`1267b4dc1c`](https://github.com/nodejs/node-gyp/commit/1267b4dc1c)] - **build**: add test run Python 3.7 on macOS (Christian Clauss) [#1843](https://github.com/nodejs/node-gyp/pull/1843)
|
||||
* [[`da1b031aa3`](https://github.com/nodejs/node-gyp/commit/da1b031aa3)] - **build**: import StringIO on Python 2 and Python 3 (Christian Clauss) [#1836](https://github.com/nodejs/node-gyp/pull/1836)
|
||||
* [[`fa0ed4aa42`](https://github.com/nodejs/node-gyp/commit/fa0ed4aa42)] - **build**: more Python 3 compat, replace compile with ast (cclauss) [#1820](https://github.com/nodejs/node-gyp/pull/1820)
|
||||
* [[`18d5c7c9d0`](https://github.com/nodejs/node-gyp/commit/18d5c7c9d0)] - **win,src**: update win\_delay\_load\_hook.cc to work with /clr (Ivan Petrovic) [#1819](https://github.com/nodejs/node-gyp/pull/1819)
|
||||
|
||||
## v5.0.3 2019-07-17
|
||||
|
||||
* [[`66ad305775`](https://github.com/nodejs/node-gyp/commit/66ad305775)] - **python**: accept Python 3 conditionally (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815)
|
||||
* [[`7e7fce3fed`](https://github.com/nodejs/node-gyp/commit/7e7fce3fed)] - **python**: move Python detection to its own file (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815)
|
||||
* [[`e40c99e283`](https://github.com/nodejs/node-gyp/commit/e40c99e283)] - **src**: implement standard.js linting (Rod Vagg) [#1794](https://github.com/nodejs/node-gyp/pull/1794)
|
||||
* [[`bb92c761a9`](https://github.com/nodejs/node-gyp/commit/bb92c761a9)] - **test**: add Node.js 6 on Windows to Travis CI (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812)
|
||||
* [[`7fd924079f`](https://github.com/nodejs/node-gyp/commit/7fd924079f)] - **test**: increase tap timeout (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812)
|
||||
* [[`7e8127068f`](https://github.com/nodejs/node-gyp/commit/7e8127068f)] - **test**: cover supported node versions with travis (Rod Vagg) [#1809](https://github.com/nodejs/node-gyp/pull/1809)
|
||||
* [[`24109148df`](https://github.com/nodejs/node-gyp/commit/24109148df)] - **test**: downgrade to tap@^12 for continued Node 6 support (Rod Vagg) [#1808](https://github.com/nodejs/node-gyp/pull/1808)
|
||||
* [[`656117cc4a`](https://github.com/nodejs/node-gyp/commit/656117cc4a)] - **win**: make VS path match case-insensitive (João Reis) [#1806](https://github.com/nodejs/node-gyp/pull/1806)
|
||||
|
||||
## v5.0.2 2019-06-27
|
||||
|
||||
* [[`2761afbf73`](https://github.com/nodejs/node-gyp/commit/2761afbf73)] - **build,test**: add duplicate symbol test (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689)
|
||||
* [[`82f129d6de`](https://github.com/nodejs/node-gyp/commit/82f129d6de)] - **gyp**: replace optparse to argparse (KiYugadgeter) [#1591](https://github.com/nodejs/node-gyp/pull/1591)
|
||||
* [[`afaaa29c61`](https://github.com/nodejs/node-gyp/commit/afaaa29c61)] - **gyp**: remove from \_\_future\_\_ import with\_statement (cclauss) [#1799](https://github.com/nodejs/node-gyp/pull/1799)
|
||||
* [[`a991f633d6`](https://github.com/nodejs/node-gyp/commit/a991f633d6)] - **gyp**: fix the remaining Python 3 issues (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793)
|
||||
* [[`f952b08f84`](https://github.com/nodejs/node-gyp/commit/f952b08f84)] - **gyp**: move from \_\_future\_\_ import to the top of the file (cclauss) [#1789](https://github.com/nodejs/node-gyp/pull/1789)
|
||||
* [[`4f4a677dfa`](https://github.com/nodejs/node-gyp/commit/4f4a677dfa)] - **gyp**: use different default compiler for z/OS (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768)
|
||||
* [[`03683f09d6`](https://github.com/nodejs/node-gyp/commit/03683f09d6)] - **lib**: code de-duplication (Pavel Medvedev) [#965](https://github.com/nodejs/node-gyp/pull/965)
|
||||
* [[`611bc3c89f`](https://github.com/nodejs/node-gyp/commit/611bc3c89f)] - **lib**: add .json suffix for explicit require (Rod Vagg) [#1787](https://github.com/nodejs/node-gyp/pull/1787)
|
||||
* [[`d3478d7b0b`](https://github.com/nodejs/node-gyp/commit/d3478d7b0b)] - **meta**: add to .gitignore (Refael Ackermann) [#1573](https://github.com/nodejs/node-gyp/pull/1573)
|
||||
* [[`7a9a038e9e`](https://github.com/nodejs/node-gyp/commit/7a9a038e9e)] - **test**: add parallel test runs on macOS and Windows (cclauss) [#1800](https://github.com/nodejs/node-gyp/pull/1800)
|
||||
* [[`7dd7f2b2a2`](https://github.com/nodejs/node-gyp/commit/7dd7f2b2a2)] - **test**: fix Python syntax error in test-adding.js (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793)
|
||||
* [[`395f843de0`](https://github.com/nodejs/node-gyp/commit/395f843de0)] - **test**: replace self-signed cert with 'localhost' (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795)
|
||||
* [[`a52c6eb9e8`](https://github.com/nodejs/node-gyp/commit/a52c6eb9e8)] - **test**: migrate from tape to tap (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795)
|
||||
* [[`ec2eb44a30`](https://github.com/nodejs/node-gyp/commit/ec2eb44a30)] - **test**: use Nan in duplicate\_symbols (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689)
|
||||
* [[`1597c84aad`](https://github.com/nodejs/node-gyp/commit/1597c84aad)] - **test**: use Travis CI to run tests on every pull request (cclauss) [#1752](https://github.com/nodejs/node-gyp/pull/1752)
|
||||
* [[`dd9bf929ac`](https://github.com/nodejs/node-gyp/commit/dd9bf929ac)] - **zos**: update compiler options (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768)
|
||||
|
||||
## v5.0.1 2019-06-20
|
||||
|
||||
* [[`e3861722ed`](https://github.com/nodejs/node-gyp/commit/e3861722ed)] - **doc**: document --jobs max (David Sanders) [#1770](https://github.com/nodejs/node-gyp/pull/1770)
|
||||
* [[`1cfdb28886`](https://github.com/nodejs/node-gyp/commit/1cfdb28886)] - **lib**: reintroduce support for iojs file naming for releases \>= 1 && \< 4 (Samuel Attard) [#1777](https://github.com/nodejs/node-gyp/pull/1777)
|
||||
|
||||
## v5.0.0 2019-06-13
|
||||
|
||||
* [[`8a83972743`](https://github.com/nodejs/node-gyp/commit/8a83972743)] - **(SEMVER-MAJOR)** **bin**: follow XDG OS conventions for storing data (Selwyn) [#1570](https://github.com/nodejs/node-gyp/pull/1570)
|
||||
* [[`9e46872ea3`](https://github.com/nodejs/node-gyp/commit/9e46872ea3)] - **bin,lib**: remove extra comments/lines/spaces (Jon Moss) [#1508](https://github.com/nodejs/node-gyp/pull/1508)
|
||||
* [[`8098ebdeb4`](https://github.com/nodejs/node-gyp/commit/8098ebdeb4)] - **deps**: replace `osenv` dependency with native `os` (Selwyn)
|
||||
* [[`f83b457e03`](https://github.com/nodejs/node-gyp/commit/f83b457e03)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492)
|
||||
* [[`323cee7323`](https://github.com/nodejs/node-gyp/commit/323cee7323)] - **deps**: pin `request` version range (Refael Ackermann) [#1300](https://github.com/nodejs/node-gyp/pull/1300)
|
||||
* [[`c515912d08`](https://github.com/nodejs/node-gyp/commit/c515912d08)] - **doc**: improve issue template (Bartosz Sosnowski) [#1618](https://github.com/nodejs/node-gyp/pull/1618)
|
||||
* [[`cca2d66727`](https://github.com/nodejs/node-gyp/commit/cca2d66727)] - **doc**: python info needs own header (Taylor D. Lee) [#1245](https://github.com/nodejs/node-gyp/pull/1245)
|
||||
* [[`3e64c780f5`](https://github.com/nodejs/node-gyp/commit/3e64c780f5)] - **doc**: lint README.md (Jon Moss) [#1498](https://github.com/nodejs/node-gyp/pull/1498)
|
||||
* [[`a20faedc91`](https://github.com/nodejs/node-gyp/commit/a20faedc91)] - **(SEMVER-MAJOR)** **gyp**: enable MARMASM items only on new VS versions (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`721eb691cf`](https://github.com/nodejs/node-gyp/commit/721eb691cf)] - **gyp**: teach MSVS generator about MARMASM Items (Jon Kunkee) [#1679](https://github.com/nodejs/node-gyp/pull/1679)
|
||||
* [[`91744bfecc`](https://github.com/nodejs/node-gyp/commit/91744bfecc)] - **gyp**: add support for Windows on Arm (Richard Townsend) [#1739](https://github.com/nodejs/node-gyp/pull/1739)
|
||||
* [[`a6e0a6c7ed`](https://github.com/nodejs/node-gyp/commit/a6e0a6c7ed)] - **gyp**: move compile\_commands\_json (Paul Maréchal) [#1661](https://github.com/nodejs/node-gyp/pull/1661)
|
||||
* [[`92e8b52cee`](https://github.com/nodejs/node-gyp/commit/92e8b52cee)] - **gyp**: fix target --\> self.target (cclauss)
|
||||
* [[`febdfa2137`](https://github.com/nodejs/node-gyp/commit/febdfa2137)] - **gyp**: fix sntex error (cclauss) [#1333](https://github.com/nodejs/node-gyp/pull/1333)
|
||||
* [[`588d333c14`](https://github.com/nodejs/node-gyp/commit/588d333c14)] - **gyp**: \_winreg module was renamed to winreg in Python 3. (Craig Rodrigues)
|
||||
* [[`98226d198c`](https://github.com/nodejs/node-gyp/commit/98226d198c)] - **gyp**: replace basestring with str, but only on Python 3. (Craig Rodrigues)
|
||||
* [[`7535e4478e`](https://github.com/nodejs/node-gyp/commit/7535e4478e)] - **gyp**: replace deprecated functions (Craig Rodrigues)
|
||||
* [[`2040cd21cc`](https://github.com/nodejs/node-gyp/commit/2040cd21cc)] - **gyp**: use print as a function, as specified in PEP 3105. (Craig Rodrigues)
|
||||
* [[`abef93ded5`](https://github.com/nodejs/node-gyp/commit/abef93ded5)] - **gyp**: get ready for python 3 (cclauss)
|
||||
* [[`43031fadcb`](https://github.com/nodejs/node-gyp/commit/43031fadcb)] - **python**: clean-up detection (João Reis) [#1582](https://github.com/nodejs/node-gyp/pull/1582)
|
||||
* [[`49ab79d221`](https://github.com/nodejs/node-gyp/commit/49ab79d221)] - **python**: more informative error (Refael Ackermann) [#1269](https://github.com/nodejs/node-gyp/pull/1269)
|
||||
* [[`997bc3c748`](https://github.com/nodejs/node-gyp/commit/997bc3c748)] - **readme**: add ARM64 info to MSVC setup instructions (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655)
|
||||
* [[`788e767179`](https://github.com/nodejs/node-gyp/commit/788e767179)] - **test**: remove unused variable (João Reis)
|
||||
* [[`6f5a408934`](https://github.com/nodejs/node-gyp/commit/6f5a408934)] - **tools**: fix usage of inherited -fPIC and -fPIE (Jens) [#1340](https://github.com/nodejs/node-gyp/pull/1340)
|
||||
* [[`0efb8fb34b`](https://github.com/nodejs/node-gyp/commit/0efb8fb34b)] - **(SEMVER-MAJOR)** **win**: support running in VS Command Prompt (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`360ddbdf3a`](https://github.com/nodejs/node-gyp/commit/360ddbdf3a)] - **(SEMVER-MAJOR)** **win**: add support for Visual Studio 2019 (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`8f43f68275`](https://github.com/nodejs/node-gyp/commit/8f43f68275)] - **(SEMVER-MAJOR)** **win**: detect all VS versions in node-gyp (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`7fe4095974`](https://github.com/nodejs/node-gyp/commit/7fe4095974)] - **(SEMVER-MAJOR)** **win**: generic Visual Studio 2017 detection (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`7a71d68bce`](https://github.com/nodejs/node-gyp/commit/7a71d68bce)] - **win**: use msbuild from the configure stage (Bartosz Sosnowski) [#1654](https://github.com/nodejs/node-gyp/pull/1654)
|
||||
* [[`d3b21220a0`](https://github.com/nodejs/node-gyp/commit/d3b21220a0)] - **win**: fix delay-load hook for electron 4 (Andy Dill)
|
||||
* [[`81f3a92338`](https://github.com/nodejs/node-gyp/commit/81f3a92338)] - Update list of Node.js versions to test against. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670)
|
||||
* [[`4748f6ab75`](https://github.com/nodejs/node-gyp/commit/4748f6ab75)] - Remove deprecated compatibility code. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670)
|
||||
* [[`45e3221fd4`](https://github.com/nodejs/node-gyp/commit/45e3221fd4)] - Remove an outdated workaround for Python 2.4 (cclauss) [#1650](https://github.com/nodejs/node-gyp/pull/1650)
|
||||
* [[`721dc7d314`](https://github.com/nodejs/node-gyp/commit/721dc7d314)] - Add ARM64 to MSBuild /Platform logic (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655)
|
||||
* [[`a5b7410497`](https://github.com/nodejs/node-gyp/commit/a5b7410497)] - Add ESLint no-unused-vars rule (Jon Moss) [#1497](https://github.com/nodejs/node-gyp/pull/1497)
|
||||
|
||||
## v4.0.0 2019-04-24
|
||||
|
||||
* [[`ceed5cbe10`](https://github.com/nodejs/node-gyp/commit/ceed5cbe10)] - **deps**: updated tar package version to 4.4.8 (Pobegaylo Maksim) [#1713](https://github.com/nodejs/node-gyp/pull/1713)
|
||||
* [[`374519e066`](https://github.com/nodejs/node-gyp/commit/374519e066)] - **(SEMVER-MAJOR)** Upgrade to tar v3 (isaacs) [#1212](https://github.com/nodejs/node-gyp/pull/1212)
|
||||
* [[`e6699d13cd`](https://github.com/nodejs/node-gyp/commit/e6699d13cd)] - **test**: fix addon test for Node.js 12 and V8 7.4 (Richard Lau) [#1705](https://github.com/nodejs/node-gyp/pull/1705)
|
||||
* [[`0c6bf530a0`](https://github.com/nodejs/node-gyp/commit/0c6bf530a0)] - **lib**: use print() for python version detection (GreenAddress) [#1534](https://github.com/nodejs/node-gyp/pull/1534)
|
||||
|
||||
## v3.8.0 2018-08-09
|
||||
|
||||
* [[`c5929cb4fe`](https://github.com/nodejs/node-gyp/commit/c5929cb4fe)] - **doc**: update Xcode preferences tab name. (Ivan Daniluk) [#1330](https://github.com/nodejs/node-gyp/pull/1330)
|
||||
* [[`8b488da8b9`](https://github.com/nodejs/node-gyp/commit/8b488da8b9)] - **doc**: update link to commit guidelines (Jonas Hermsmeier) [#1456](https://github.com/nodejs/node-gyp/pull/1456)
|
||||
* [[`b4fe8c16f9`](https://github.com/nodejs/node-gyp/commit/b4fe8c16f9)] - **doc**: fix visual studio links (Bartosz Sosnowski) [#1490](https://github.com/nodejs/node-gyp/pull/1490)
|
||||
* [[`536759c7e9`](https://github.com/nodejs/node-gyp/commit/536759c7e9)] - **configure**: use sys.version\_info to get python version (Yang Guo) [#1504](https://github.com/nodejs/node-gyp/pull/1504)
|
||||
* [[`94c39c604e`](https://github.com/nodejs/node-gyp/commit/94c39c604e)] - **gyp**: fix ninja build failure (GYP patch) (Daniel Bevenius) [nodejs/node#12484](https://github.com/nodejs/node/pull/12484)
|
||||
* [[`e8ea74e0fa`](https://github.com/nodejs/node-gyp/commit/e8ea74e0fa)] - **tools**: patch gyp to avoid xcrun errors (Ujjwal Sharma) [nodejs/node#21520](https://github.com/nodejs/node/pull/21520)
|
||||
* [[`ea9aff44f2`](https://github.com/nodejs/node-gyp/commit/ea9aff44f2)] - **tools**: fix "the the" typos in comments (Masashi Hirano) [nodejs/node#20716](https://github.com/nodejs/node/pull/20716)
|
||||
* [[`207e5aa4fd`](https://github.com/nodejs/node-gyp/commit/207e5aa4fd)] - **gyp**: implement LD/LDXX for ninja and FIPS (Sam Roberts)
|
||||
* [[`b416c5f4b7`](https://github.com/nodejs/node-gyp/commit/b416c5f4b7)] - **gyp**: enable cctest to use objects (gyp part) (Daniel Bevenius) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450)
|
||||
* [[`40692d016b`](https://github.com/nodejs/node-gyp/commit/40692d016b)] - **gyp**: add compile\_commands.json gyp generator (Ben Noordhuis) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450)
|
||||
* [[`fc3c4e2b10`](https://github.com/nodejs/node-gyp/commit/fc3c4e2b10)] - **gyp**: float gyp patch for long filenames (Anna Henningsen) [nodejs/node#7963](https://github.com/nodejs/node/pull/7963)
|
||||
* [[`8aedbfdef6`](https://github.com/nodejs/node-gyp/commit/8aedbfdef6)] - **gyp**: backport GYP fix to fix AIX shared suffix (Stewart Addison)
|
||||
* [[`6cd84b84fc`](https://github.com/nodejs/node-gyp/commit/6cd84b84fc)] - **test**: formatting and minor fixes for execFileSync replacement (Rod Vagg) [#1521](https://github.com/nodejs/node-gyp/pull/1521)
|
||||
* [[`60e421363f`](https://github.com/nodejs/node-gyp/commit/60e421363f)] - **test**: added test/processExecSync.js for when execFileSync is not available. (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492)
|
||||
* [[`969447c5bd`](https://github.com/nodejs/node-gyp/commit/969447c5bd)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492)
|
||||
* [[`340403ccfe`](https://github.com/nodejs/node-gyp/commit/340403ccfe)] - **win**: improve parsing of SDK version (Alessandro Vergani) [#1516](https://github.com/nodejs/node-gyp/pull/1516)
|
||||
|
||||
## v3.7.0 2018-06-08
|
||||
|
||||
* [[`84cea7b30d`](https://github.com/nodejs/node-gyp/commit/84cea7b30d)] - Remove unused gyp test scripts. (Ben Noordhuis) [#1458](https://github.com/nodejs/node-gyp/pull/1458)
|
||||
* [[`0540e4ec63`](https://github.com/nodejs/node-gyp/commit/0540e4ec63)] - **gyp**: escape spaces in filenames in make generator (Jeff Senn) [#1436](https://github.com/nodejs/node-gyp/pull/1436)
|
||||
* [[`88fc6fa0ec`](https://github.com/nodejs/node-gyp/commit/88fc6fa0ec)] - Drop dependency on minimatch. (Brian Woodward) [#1158](https://github.com/nodejs/node-gyp/pull/1158)
|
||||
* [[`1e203c5148`](https://github.com/nodejs/node-gyp/commit/1e203c5148)] - Fix include path when pointing to Node.js source (Richard Lau) [#1055](https://github.com/nodejs/node-gyp/pull/1055)
|
||||
* [[`53d8cb967c`](https://github.com/nodejs/node-gyp/commit/53d8cb967c)] - Prefix build targets with /t: on Windows (Natalie Wolfe) [#1164](https://github.com/nodejs/node-gyp/pull/1164)
|
||||
* [[`53a5f8ff38`](https://github.com/nodejs/node-gyp/commit/53a5f8ff38)] - **gyp**: add support for .mm files to msvs generator (Julien Racle) [#1167](https://github.com/nodejs/node-gyp/pull/1167)
|
||||
* [[`dd8561e528`](https://github.com/nodejs/node-gyp/commit/dd8561e528)] - **zos**: don't use universal-new-lines mode (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451)
|
||||
* [[`e5a69010ed`](https://github.com/nodejs/node-gyp/commit/e5a69010ed)] - **zos**: add search locations for libnode.x (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451)
|
||||
* [[`79febace53`](https://github.com/nodejs/node-gyp/commit/79febace53)] - **doc**: update macOS information in README (Josh Parnham) [#1323](https://github.com/nodejs/node-gyp/pull/1323)
|
||||
* [[`9425448945`](https://github.com/nodejs/node-gyp/commit/9425448945)] - **gyp**: don't print xcodebuild not found errors (Gibson Fahnestock) [#1370](https://github.com/nodejs/node-gyp/pull/1370)
|
||||
* [[`6f1286f5b2`](https://github.com/nodejs/node-gyp/commit/6f1286f5b2)] - Fix infinite install loop. (Ben Noordhuis) [#1384](https://github.com/nodejs/node-gyp/pull/1384)
|
||||
* [[`2580b9139e`](https://github.com/nodejs/node-gyp/commit/2580b9139e)] - Update `--nodedir` description in README. (Ben Noordhuis) [#1372](https://github.com/nodejs/node-gyp/pull/1372)
|
||||
* [[`a61360391a`](https://github.com/nodejs/node-gyp/commit/a61360391a)] - Update README with another way to install on windows (JeffAtDeere) [#1352](https://github.com/nodejs/node-gyp/pull/1352)
|
||||
* [[`47496bf6dc`](https://github.com/nodejs/node-gyp/commit/47496bf6dc)] - Fix IndexError when parsing GYP files. (Ben Noordhuis) [#1267](https://github.com/nodejs/node-gyp/pull/1267)
|
||||
* [[`b2024dee7b`](https://github.com/nodejs/node-gyp/commit/b2024dee7b)] - **zos**: support platform (John Barboza) [#1276](https://github.com/nodejs/node-gyp/pull/1276)
|
||||
* [[`90d86512f4`](https://github.com/nodejs/node-gyp/commit/90d86512f4)] - **win**: run PS with `-NoProfile` (Refael Ackermann) [#1292](https://github.com/nodejs/node-gyp/pull/1292)
|
||||
* [[`2da5f86ef7`](https://github.com/nodejs/node-gyp/commit/2da5f86ef7)] - **doc**: add github PR and Issue templates (Gibson Fahnestock) [#1228](https://github.com/nodejs/node-gyp/pull/1228)
|
||||
* [[`a46a770d68`](https://github.com/nodejs/node-gyp/commit/a46a770d68)] - **doc**: update proposed DCO and CoC (Mikeal Rogers) [#1229](https://github.com/nodejs/node-gyp/pull/1229)
|
||||
* [[`7e803d58e0`](https://github.com/nodejs/node-gyp/commit/7e803d58e0)] - **doc**: headerify the Install instructions (Nick Schonning) [#1225](https://github.com/nodejs/node-gyp/pull/1225)
|
||||
* [[`f27599193a`](https://github.com/nodejs/node-gyp/commit/f27599193a)] - **gyp**: update xml string encoding conversion (Liu Chao) [#1203](https://github.com/nodejs/node-gyp/pull/1203)
|
||||
* [[`0a07e481f7`](https://github.com/nodejs/node-gyp/commit/0a07e481f7)] - **configure**: don't set ensure if tarball is set (Gibson Fahnestock) [#1220](https://github.com/nodejs/node-gyp/pull/1220)
|
||||
|
||||
## v3.6.3 2018-06-08
|
||||
|
||||
* [[`90cd2e8da9`](https://github.com/nodejs/node-gyp/commit/90cd2e8da9)] - **gyp**: fix regex to match multi-digit versions (Jonas Hermsmeier) [#1455](https://github.com/nodejs/node-gyp/pull/1455)
|
||||
* [[`7900122337`](https://github.com/nodejs/node-gyp/commit/7900122337)] - deps: pin `request` version range (Refael Ackerman) [#1300](https://github.com/nodejs/node-gyp/pull/1300)
|
||||
|
||||
## v3.6.2 2017-06-01
|
||||
|
||||
* [[`72afdd62cd`](https://github.com/nodejs/node-gyp/commit/72afdd62cd)] - **build**: rename copyNodeLib() to doBuild() (Liu Chao) [#1206](https://github.com/nodejs/node-gyp/pull/1206)
|
||||
* [[`bad903ac70`](https://github.com/nodejs/node-gyp/commit/bad903ac70)] - **win**: more robust parsing of SDK version (Refael Ackermann) [#1198](https://github.com/nodejs/node-gyp/pull/1198)
|
||||
* [[`241752f381`](https://github.com/nodejs/node-gyp/commit/241752f381)] - Log dist-url. (Ben Noordhuis) [#1170](https://github.com/nodejs/node-gyp/pull/1170)
|
||||
* [[`386746c7d1`](https://github.com/nodejs/node-gyp/commit/386746c7d1)] - **configure**: use full path in node_lib_file GYP var (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964)
|
||||
* [[`0913b2dd99`](https://github.com/nodejs/node-gyp/commit/0913b2dd99)] - **build, win**: use target_arch to link with node.lib (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964)
|
||||
* [[`c307b302f7`](https://github.com/nodejs/node-gyp/commit/c307b302f7)] - **doc**: blorb about setting `npm_config_OPTION_NAME` (Refael Ackermann) [#1185](https://github.com/nodejs/node-gyp/pull/1185)
|
||||
|
||||
## v3.6.1 2017-04-30
|
||||
|
||||
* [[`49801716c2`](https://github.com/nodejs/node-gyp/commit/49801716c2)] - **test**: fix test-find-python on v0.10.x buildbot. (Ben Noordhuis) [#1172](https://github.com/nodejs/node-gyp/pull/1172)
|
||||
* [[`a83a3801fc`](https://github.com/nodejs/node-gyp/commit/a83a3801fc)] - **test**: fix test/test-configure-python on AIX (Richard Lau) [#1131](https://github.com/nodejs/node-gyp/pull/1131)
|
||||
* [[`8a767145c9`](https://github.com/nodejs/node-gyp/commit/8a767145c9)] - **gyp**: Revert quote_cmd workaround (Kunal Pathak) [#1153](https://github.com/nodejs/node-gyp/pull/1153)
|
||||
* [[`c09cf7671e`](https://github.com/nodejs/node-gyp/commit/c09cf7671e)] - **doc**: add a note for using `configure` on Windows (Vse Mozhet Byt) [#1152](https://github.com/nodejs/node-gyp/pull/1152)
|
||||
* [[`da9cb5f411`](https://github.com/nodejs/node-gyp/commit/da9cb5f411)] - Delete superfluous .patch files. (Ben Noordhuis) [#1122](https://github.com/nodejs/node-gyp/pull/1122)
|
||||
|
||||
## v3.6.0 2017-03-16
|
||||
|
||||
* [[`ae141e1906`](https://github.com/nodejs/node-gyp/commit/ae141e1906)] - **win**: find and setup for VS2017 (Refael Ackermann) [#1130](https://github.com/nodejs/node-gyp/pull/1130)
|
||||
* [[`ec5fc36a80`](https://github.com/nodejs/node-gyp/commit/ec5fc36a80)] - Add support to build node.js with chakracore for ARM. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873)
|
||||
* [[`a04ea3051a`](https://github.com/nodejs/node-gyp/commit/a04ea3051a)] - Add support to build node.js with chakracore. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873)
|
||||
* [[`93d7fa83c8`](https://github.com/nodejs/node-gyp/commit/93d7fa83c8)] - Upgrade semver dependency. (Ben Noordhuis) [#1107](https://github.com/nodejs/node-gyp/pull/1107)
|
||||
* [[`ff9a6fadfd`](https://github.com/nodejs/node-gyp/commit/ff9a6fadfd)] - Update link of gyp as Google code is shutting down (Peter Dave Hello) [#1061](https://github.com/nodejs/node-gyp/pull/1061)
|
||||
|
||||
## v3.5.0 2017-01-10
|
||||
|
||||
* [[`762d19a39e`](https://github.com/nodejs/node-gyp/commit/762d19a39e)] - \[doc\] merge History.md and CHANGELOG.md (Rod Vagg)
|
||||
* [[`80fc5c3d31`](https://github.com/nodejs/node-gyp/commit/80fc5c3d31)] - Fix deprecated dependency warning (Simone Primarosa) [#1069](https://github.com/nodejs/node-gyp/pull/1069)
|
||||
* [[`05c44944fd`](https://github.com/nodejs/node-gyp/commit/05c44944fd)] - Open the build file with universal-newlines mode (Guy Margalit) [#1053](https://github.com/nodejs/node-gyp/pull/1053)
|
||||
* [[`37ae7be114`](https://github.com/nodejs/node-gyp/commit/37ae7be114)] - Try python launcher when stock python is python 3. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992)
|
||||
* [[`e3778d9907`](https://github.com/nodejs/node-gyp/commit/e3778d9907)] - Add lots of findPython() tests. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992)
|
||||
* [[`afc766adf6`](https://github.com/nodejs/node-gyp/commit/afc766adf6)] - Unset executable bit for .bat files (Pavel Medvedev) [#969](https://github.com/nodejs/node-gyp/pull/969)
|
||||
* [[`ddac348991`](https://github.com/nodejs/node-gyp/commit/ddac348991)] - Use push on PYTHONPATH and add tests (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
|
||||
* [[`b182a19042`](https://github.com/nodejs/node-gyp/commit/b182a19042)] - ***Revert*** "add "path-array" dep" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
|
||||
* [[`7c08b85c5a`](https://github.com/nodejs/node-gyp/commit/7c08b85c5a)] - ***Revert*** "**configure**: use "path-array" for PYTHONPATH" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
|
||||
* [[`9c8d275526`](https://github.com/nodejs/node-gyp/commit/9c8d275526)] - Add --devdir flag. (Ben Noordhuis) [#916](https://github.com/nodejs/node-gyp/pull/916)
|
||||
* [[`f6eab1f9e4`](https://github.com/nodejs/node-gyp/commit/f6eab1f9e4)] - **doc**: add windows-build-tools to readme (Felix Rieseberg) [#970](https://github.com/nodejs/node-gyp/pull/970)
|
||||
|
||||
## v3.4.0 2016-06-28
|
||||
|
||||
* [[`ce5fd04e94`](https://github.com/nodejs/node-gyp/commit/ce5fd04e94)] - **deps**: update minimatch version (delphiactual) [#961](https://github.com/nodejs/node-gyp/pull/961)
|
||||
* [[`77383ddd85`](https://github.com/nodejs/node-gyp/commit/77383ddd85)] - Replace fs.accessSync call to fs.statSync (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955)
|
||||
* [[`0dba4bda57`](https://github.com/nodejs/node-gyp/commit/0dba4bda57)] - **test**: add simple addon test (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955)
|
||||
* [[`c4344b3889`](https://github.com/nodejs/node-gyp/commit/c4344b3889)] - **doc**: add --target option to README (Gibson Fahnestock) [#958](https://github.com/nodejs/node-gyp/pull/958)
|
||||
* [[`cc778e9215`](https://github.com/nodejs/node-gyp/commit/cc778e9215)] - Override BUILDING_UV_SHARED, BUILDING_V8_SHARED. (Ben Noordhuis) [#915](https://github.com/nodejs/node-gyp/pull/915)
|
||||
* [[`af35b2ad32`](https://github.com/nodejs/node-gyp/commit/af35b2ad32)] - Move VC++ Build Tools to Build Tools landing page. (Andrew Pardoe) [#953](https://github.com/nodejs/node-gyp/pull/953)
|
||||
* [[`f31482e226`](https://github.com/nodejs/node-gyp/commit/f31482e226)] - **win**: work around __pfnDliNotifyHook2 type change (Alexis Campailla) [#952](https://github.com/nodejs/node-gyp/pull/952)
|
||||
* [[`3df8222fa5`](https://github.com/nodejs/node-gyp/commit/3df8222fa5)] - Allow for npmlog@3.x (Rebecca Turner) [#950](https://github.com/nodejs/node-gyp/pull/950)
|
||||
* [[`a4fa07b390`](https://github.com/nodejs/node-gyp/commit/a4fa07b390)] - More verbose error on locating msbuild.exe failure. (Mateusz Jaworski) [#930](https://github.com/nodejs/node-gyp/pull/930)
|
||||
* [[`4ee31329e0`](https://github.com/nodejs/node-gyp/commit/4ee31329e0)] - **doc**: add command options to README.md (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937)
|
||||
* [[`c8c7ca86b9`](https://github.com/nodejs/node-gyp/commit/c8c7ca86b9)] - Add --silent option for zero output. (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937)
|
||||
* [[`ac29d23a7c`](https://github.com/nodejs/node-gyp/commit/ac29d23a7c)] - Upgrade to glob@7.0.3. (Ben Noordhuis) [#943](https://github.com/nodejs/node-gyp/pull/943)
|
||||
* [[`15fd56be3d`](https://github.com/nodejs/node-gyp/commit/15fd56be3d)] - Enable V8 deprecation warnings for native modules (Matt Loring) [#920](https://github.com/nodejs/node-gyp/pull/920)
|
||||
* [[`7f1c1b960c`](https://github.com/nodejs/node-gyp/commit/7f1c1b960c)] - **gyp**: improvements for android generator (Robert Chiras) [#935](https://github.com/nodejs/node-gyp/pull/935)
|
||||
* [[`088082766c`](https://github.com/nodejs/node-gyp/commit/088082766c)] - Update Windows install instructions (Sara Itani) [#867](https://github.com/nodejs/node-gyp/pull/867)
|
||||
* [[`625c1515f9`](https://github.com/nodejs/node-gyp/commit/625c1515f9)] - **gyp**: inherit CC/CXX for CC/CXX.host (Johan Bergström) [#908](https://github.com/nodejs/node-gyp/pull/908)
|
||||
* [[`3bcb1720e4`](https://github.com/nodejs/node-gyp/commit/3bcb1720e4)] - Add support for the Python launcher on Windows (Patrick Westerhoff) [#894](https://github.com/nodejs/node-gyp/pull/894
|
||||
|
||||
## v3.3.1 2016-03-04
|
||||
|
||||
* [[`a981ef847a`](https://github.com/nodejs/node-gyp/commit/a981ef847a)] - **gyp**: fix android generator (Robert Chiras) [#889](https://github.com/nodejs/node-gyp/pull/889)
|
||||
|
||||
## v3.3.0 2016-02-16
|
||||
|
||||
* [[`818d854a4d`](https://github.com/nodejs/node-gyp/commit/818d854a4d)] - Introduce NODEJS_ORG_MIRROR and IOJS_ORG_MIRROR (Rod Vagg) [#878](https://github.com/nodejs/node-gyp/pull/878)
|
||||
* [[`d1e4cc4b62`](https://github.com/nodejs/node-gyp/commit/d1e4cc4b62)] - **(SEMVER-MINOR)** Download headers tarball for ~0.12.10 || ~0.10.42 (Rod Vagg) [#877](https://github.com/nodejs/node-gyp/pull/877)
|
||||
* [[`6e28ad1bea`](https://github.com/nodejs/node-gyp/commit/6e28ad1bea)] - Allow for npmlog@2.x (Rebecca Turner) [#861](https://github.com/nodejs/node-gyp/pull/861)
|
||||
* [[`07371e5812`](https://github.com/nodejs/node-gyp/commit/07371e5812)] - Use -fPIC for NetBSD. (Marcin Cieślak) [#856](https://github.com/nodejs/node-gyp/pull/856)
|
||||
* [[`8c4b0ffa50`](https://github.com/nodejs/node-gyp/commit/8c4b0ffa50)] - **(SEMVER-MINOR)** Add --cafile command line option. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837)
|
||||
* [[`b3ad43498e`](https://github.com/nodejs/node-gyp/commit/b3ad43498e)] - **(SEMVER-MINOR)** Make download() function testable. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837)
|
||||
|
||||
## v3.2.1 2015-12-03
|
||||
|
||||
* [[`ab89b477c4`](https://github.com/nodejs/node-gyp/commit/ab89b477c4)] - Upgrade gyp to b3cef02. (Ben Noordhuis) [#831](https://github.com/nodejs/node-gyp/pull/831)
|
||||
* [[`90078ecb17`](https://github.com/nodejs/node-gyp/commit/90078ecb17)] - Define WIN32_LEAN_AND_MEAN conditionally. (Ben Noordhuis) [#824](https://github.com/nodejs/node-gyp/pull/824)
|
||||
|
||||
## v3.2.0 2015-11-25
|
||||
|
||||
* [[`268f1ca4c7`](https://github.com/nodejs/node-gyp/commit/268f1ca4c7)] - Use result of `which` when searching for python. (Refael Ackermann) [#668](https://github.com/nodejs/node-gyp/pull/668)
|
||||
* [[`817ed9bd78`](https://github.com/nodejs/node-gyp/commit/817ed9bd78)] - Add test for python executable search logic. (Ben Noordhuis) [#756](https://github.com/nodejs/node-gyp/pull/756)
|
||||
* [[`0e2dfda1f3`](https://github.com/nodejs/node-gyp/commit/0e2dfda1f3)] - Fix test/test-options when run through `npm test`. (Ben Noordhuis) [#755](https://github.com/nodejs/node-gyp/pull/755)
|
||||
* [[`9bfa0876b4`](https://github.com/nodejs/node-gyp/commit/9bfa0876b4)] - Add support for AIX (Michael Dawson) [#753](https://github.com/nodejs/node-gyp/pull/753)
|
||||
* [[`a8d441a0a2`](https://github.com/nodejs/node-gyp/commit/a8d441a0a2)] - Update README for Windows 10 support. (Jason Williams) [#766](https://github.com/nodejs/node-gyp/pull/766)
|
||||
* [[`d1d6015276`](https://github.com/nodejs/node-gyp/commit/d1d6015276)] - Update broken links and switch to HTTPS. (andrew morton)
|
||||
|
||||
## v3.1.0 2015-11-14
|
||||
|
||||
* [[`9049241f91`](https://github.com/nodejs/node-gyp/commit/9049241f91)] - **gyp**: don't use links at all, just copy the files instead (Nathan Zadoks)
|
||||
* [[`8ef90348d1`](https://github.com/nodejs/node-gyp/commit/8ef90348d1)] - **gyp**: apply https://codereview.chromium.org/11361103/ (Nathan Rajlich)
|
||||
* [[`a2ed0df84e`](https://github.com/nodejs/node-gyp/commit/a2ed0df84e)] - **gyp**: always install into $PRODUCT_DIR (Nathan Rajlich)
|
||||
* [[`cc8b2fa83e`](https://github.com/nodejs/node-gyp/commit/cc8b2fa83e)] - Update gyp to b3cef02. (Imran Iqbal) [#781](https://github.com/nodejs/node-gyp/pull/781)
|
||||
* [[`f5d86eb84e`](https://github.com/nodejs/node-gyp/commit/f5d86eb84e)] - Update to tar@2.0.0. (Edgar Muentes) [#797](https://github.com/nodejs/node-gyp/pull/797)
|
||||
* [[`2ac7de02c4`](https://github.com/nodejs/node-gyp/commit/2ac7de02c4)] - Fix infinite loop with zero-length options. (Ben Noordhuis) [#745](https://github.com/nodejs/node-gyp/pull/745)
|
||||
* [[`101bed639b`](https://github.com/nodejs/node-gyp/commit/101bed639b)] - This platform value came from debian package, and now the value (Jérémy Lal) [#738](https://github.com/nodejs/node-gyp/pull/738)
|
||||
|
||||
## v3.0.3 2015-09-14
|
||||
|
||||
* [[`ad827cda30`](https://github.com/nodejs/node-gyp/commit/ad827cda30)] - tarballUrl global and && when checking for iojs (Lars-Magnus Skog) [#729](https://github.com/nodejs/node-gyp/pull/729)
|
||||
|
||||
## v3.0.2 2015-09-12
|
||||
|
||||
* [[`6e8c3bf3c6`](https://github.com/nodejs/node-gyp/commit/6e8c3bf3c6)] - add back support for passing additional cmdline args (Rod Vagg) [#723](https://github.com/nodejs/node-gyp/pull/723)
|
||||
* [[`ff82f2f3b9`](https://github.com/nodejs/node-gyp/commit/ff82f2f3b9)] - fixed broken link in docs to Visual Studio 2013 download (simon-p-r) [#722](https://github.com/nodejs/node-gyp/pull/722)
|
||||
|
||||
## v3.0.1 2015-09-08
|
||||
|
||||
* [[`846337e36b`](https://github.com/nodejs/node-gyp/commit/846337e36b)] - normalise versions for target == this comparison (Rod Vagg) [#716](https://github.com/nodejs/node-gyp/pull/716)
|
||||
|
||||
## v3.0.0 2015-09-08
|
||||
|
||||
* [[`9720d0373c`](https://github.com/nodejs/node-gyp/commit/9720d0373c)] - remove node_modules from tree (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`6dcf220db7`](https://github.com/nodejs/node-gyp/commit/6dcf220db7)] - test version major directly, don't use semver.satisfies() (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`938dd18d1c`](https://github.com/nodejs/node-gyp/commit/938dd18d1c)] - refactor for clarity, fix dist-url, add env var dist-url functionality (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`9e9df66a06`](https://github.com/nodejs/node-gyp/commit/9e9df66a06)] - use process.release, make aware of io.js & node v4 differences (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`1ea7ed01f4`](https://github.com/nodejs/node-gyp/commit/1ea7ed01f4)] - **deps**: update graceful-fs dependency to the latest (Sakthipriyan Vairamani) [#714](https://github.com/nodejs/node-gyp/pull/714)
|
||||
* [[`0fbc387b35`](https://github.com/nodejs/node-gyp/commit/0fbc387b35)] - Update repository URLs. (Ben Noordhuis) [#715](https://github.com/nodejs/node-gyp/pull/715)
|
||||
* [[`bbedb8868b`](https://github.com/nodejs/node-gyp/commit/bbedb8868b)] - **(SEMVER-MAJOR)** **win**: enable delay-load hook by default (Jeremiah Senkpiel) [#708](https://github.com/nodejs/node-gyp/pull/708)
|
||||
* [[`85ed107565`](https://github.com/nodejs/node-gyp/commit/85ed107565)] - Merge pull request #664 from othiym23/othiym23/allow-semver-5 (Nathan Rajlich)
|
||||
* [[`0c720d234c`](https://github.com/nodejs/node-gyp/commit/0c720d234c)] - allow semver@5 (Forrest L Norvell)
|
||||
|
||||
## 2.0.2 / 2015-07-14
|
||||
|
||||
* Use HTTPS for dist url (#656, @SonicHedgehog)
|
||||
* Merge pull request #648 from nevosegal/master
|
||||
* Merge pull request #650 from magic890/patch-1
|
||||
* Updated Installation section on README
|
||||
* Updated link to gyp user documentation
|
||||
* Fix download error message spelling (#643, @tomxtobin)
|
||||
* Merge pull request #637 from lygstate/master
|
||||
* Set NODE_GYP_DIR for addon.gypi to setting absolute path for
|
||||
src/win_delay_load_hook.c, and fixes of the long relative path issue on Win32.
|
||||
Fixes #636 (#637, @lygstate).
|
||||
|
||||
## 2.0.1 / 2015-05-28
|
||||
|
||||
* configure: try/catch the semver range.test() call
|
||||
* README: update for visual studio 2013 (#510, @samccone)
|
||||
|
||||
## 2.0.0 / 2015-05-24
|
||||
|
||||
* configure: check for python2 executable by default, fallback to python
|
||||
* configure: don't clobber existing $PYTHONPATH
|
||||
* configure: use "path-array" for PYTHONPATH
|
||||
* gyp: fix for non-acsii userprofile name on Windows
|
||||
* gyp: always install into $PRODUCT_DIR
|
||||
* gyp: apply https://codereview.chromium.org/11361103/
|
||||
* gyp: don't use links at all, just copy the files instead
|
||||
* gyp: update gyp to e1c8fcf7
|
||||
* Updated README.md with updated Windows build info
|
||||
* Show URL when a download fails
|
||||
* package: add a "license" field
|
||||
* move HMODULE m declaration to top
|
||||
* Only add "-undefined dynamic_lookup" to loadable_module targets
|
||||
* win: optionally allow node.exe/iojs.exe to be renamed
|
||||
* Avoid downloading shasums if using tarPath
|
||||
* Add target name preprocessor define: `NODE_GYP_MODULE_NAME`
|
||||
* Show better error message in case of bad network settings
|
||||
34
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/CONTRIBUTING.md
generated
vendored
Normal file
34
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# Contributing to node-gyp
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Please read the
|
||||
[Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md)
|
||||
which explains the minimum behavior expectations for node-gyp contributors.
|
||||
|
||||
<a id="developers-certificate-of-origin"></a>
|
||||
## Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
* (a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
* (b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
* (c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
* (d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
24
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/LICENSE
generated
vendored
Normal file
24
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
258
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/README.md
generated
vendored
Normal file
258
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/README.md
generated
vendored
Normal file
@@ -0,0 +1,258 @@
|
||||
# `node-gyp` - Node.js native addon build tool
|
||||
|
||||
[](https://github.com/nodejs/node-gyp/actions?query=workflow%3ATests+branch%3Amaster)
|
||||

|
||||
|
||||
`node-gyp` is a cross-platform command-line tool written in Node.js for
|
||||
compiling native addon modules for Node.js. It contains a vendored copy of the
|
||||
[gyp-next](https://github.com/nodejs/gyp-next) project that was previously used
|
||||
by the Chromium team, extended to support the development of Node.js native addons.
|
||||
|
||||
Note that `node-gyp` is _not_ used to build Node.js itself.
|
||||
|
||||
Multiple target versions of Node.js are supported (i.e. `0.8`, ..., `4`, `5`, `6`,
|
||||
etc.), regardless of what version of Node.js is actually installed on your system
|
||||
(`node-gyp` downloads the necessary development files or headers for the target version).
|
||||
|
||||
## Features
|
||||
|
||||
* The same build commands work on any of the supported platforms
|
||||
* Supports the targeting of different versions of Node.js
|
||||
|
||||
## Installation
|
||||
|
||||
You can install `node-gyp` using `npm`:
|
||||
|
||||
``` bash
|
||||
npm install -g node-gyp
|
||||
```
|
||||
|
||||
Depending on your operating system, you will need to install:
|
||||
|
||||
### On Unix
|
||||
|
||||
* Python v3.7, v3.8, v3.9, or v3.10
|
||||
* `make`
|
||||
* A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org)
|
||||
|
||||
### On macOS
|
||||
|
||||
**ATTENTION**: If your Mac has been _upgraded_ to macOS Catalina (10.15) or higher, please read [macOS_Catalina.md](macOS_Catalina.md).
|
||||
|
||||
* Python v3.7, v3.8, v3.9, or v3.10
|
||||
* `XCode Command Line Tools` which will install `clang`, `clang++`, and `make`.
|
||||
* Install the `XCode Command Line Tools` standalone by running `xcode-select --install`. -- OR --
|
||||
* Alternatively, if you already have the [full Xcode installed](https://developer.apple.com/xcode/download/), you can install the Command Line Tools under the menu `Xcode -> Open Developer Tool -> More Developer Tools...`.
|
||||
|
||||
|
||||
### On Windows
|
||||
|
||||
Install the current version of Python from the [Microsoft Store package](https://www.microsoft.com/en-us/p/python-310/9pjpw5ldxlz5).
|
||||
|
||||
Install tools and configuration manually:
|
||||
* Install Visual C++ Build Environment: [Visual Studio Build Tools](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools)
|
||||
(using "Visual C++ build tools" workload) or [Visual Studio Community](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=Community)
|
||||
(using the "Desktop development with C++" workload)
|
||||
* Launch cmd, `npm config set msvs_version 2017`
|
||||
|
||||
If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips.
|
||||
|
||||
To target native ARM64 Node.js on Windows 10 on ARM, add the components "Visual C++ compilers and libraries for ARM64" and "Visual C++ ATL for ARM64".
|
||||
|
||||
### Configuring Python Dependency
|
||||
|
||||
`node-gyp` requires that you have installed a compatible version of Python, one of: v3.7, v3.8,
|
||||
v3.9, or v3.10. If you have multiple Python versions installed, you can identify which Python
|
||||
version `node-gyp` should use in one of the following ways:
|
||||
|
||||
1. by setting the `--python` command-line option, e.g.:
|
||||
|
||||
``` bash
|
||||
node-gyp <command> --python /path/to/executable/python
|
||||
```
|
||||
|
||||
2. If `node-gyp` is called by way of `npm`, *and* you have multiple versions of
|
||||
Python installed, then you can set `npm`'s 'python' config key to the appropriate
|
||||
value:
|
||||
|
||||
``` bash
|
||||
npm config set python /path/to/executable/python
|
||||
```
|
||||
|
||||
3. If the `PYTHON` environment variable is set to the path of a Python executable,
|
||||
then that version will be used, if it is a compatible version.
|
||||
|
||||
4. If the `NODE_GYP_FORCE_PYTHON` environment variable is set to the path of a
|
||||
Python executable, it will be used instead of any of the other configured or
|
||||
builtin Python search paths. If it's not a compatible version, no further
|
||||
searching will be done.
|
||||
|
||||
### Build for Third Party Node.js Runtimes
|
||||
|
||||
When building modules for third party Node.js runtimes like Electron, which have
|
||||
different build configurations from the official Node.js distribution, you
|
||||
should use `--dist-url` or `--nodedir` flags to specify the headers of the
|
||||
runtime to build for.
|
||||
|
||||
Also when `--dist-url` or `--nodedir` flags are passed, node-gyp will use the
|
||||
`config.gypi` shipped in the headers distribution to generate build
|
||||
configurations, which is different from the default mode that would use the
|
||||
`process.config` object of the running Node.js instance.
|
||||
|
||||
Some old versions of Electron shipped malformed `config.gypi` in their headers
|
||||
distributions, and you might need to pass `--force-process-config` to node-gyp
|
||||
to work around configuration errors.
|
||||
|
||||
## How to Use
|
||||
|
||||
To compile your native addon, first go to its root directory:
|
||||
|
||||
``` bash
|
||||
cd my_node_addon
|
||||
```
|
||||
|
||||
The next step is to generate the appropriate project build files for the current
|
||||
platform. Use `configure` for that:
|
||||
|
||||
``` bash
|
||||
node-gyp configure
|
||||
```
|
||||
|
||||
Auto-detection fails for Visual C++ Build Tools 2015, so `--msvs_version=2015`
|
||||
needs to be added (not needed when run by npm as configured above):
|
||||
``` bash
|
||||
node-gyp configure --msvs_version=2015
|
||||
```
|
||||
|
||||
__Note__: The `configure` step looks for a `binding.gyp` file in the current
|
||||
directory to process. See below for instructions on creating a `binding.gyp` file.
|
||||
|
||||
Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file
|
||||
(on Windows) in the `build/` directory. Next, invoke the `build` command:
|
||||
|
||||
``` bash
|
||||
node-gyp build
|
||||
```
|
||||
|
||||
Now you have your compiled `.node` bindings file! The compiled bindings end up
|
||||
in `build/Debug/` or `build/Release/`, depending on the build mode. At this point,
|
||||
you can require the `.node` file with Node.js and run your tests!
|
||||
|
||||
__Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or
|
||||
`-d`) switch when running either the `configure`, `build` or `rebuild` commands.
|
||||
|
||||
## The `binding.gyp` file
|
||||
|
||||
A `binding.gyp` file describes the configuration to build your module, in a
|
||||
JSON-like format. This file gets placed in the root of your package, alongside
|
||||
`package.json`.
|
||||
|
||||
A barebones `gyp` file appropriate for building a Node.js addon could look like:
|
||||
|
||||
```python
|
||||
{
|
||||
"targets": [
|
||||
{
|
||||
"target_name": "binding",
|
||||
"sources": [ "src/binding.cc" ]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Further reading
|
||||
|
||||
The **[docs](./docs/)** directory contains additional documentation on specific node-gyp topics that may be useful if you are experiencing problems installing or building addons using node-gyp.
|
||||
|
||||
Some additional resources for Node.js native addons and writing `gyp` configuration files:
|
||||
|
||||
* ["Going Native" a nodeschool.io tutorial](http://nodeschool.io/#goingnative)
|
||||
* ["Hello World" node addon example](https://github.com/nodejs/node/tree/master/test/addons/hello-world)
|
||||
* [gyp user documentation](https://gyp.gsrc.io/docs/UserDocumentation.md)
|
||||
* [gyp input format reference](https://gyp.gsrc.io/docs/InputFormatReference.md)
|
||||
* [*"binding.gyp" files out in the wild* wiki page](./docs/binding.gyp-files-in-the-wild.md)
|
||||
|
||||
## Commands
|
||||
|
||||
`node-gyp` responds to the following commands:
|
||||
|
||||
| **Command** | **Description**
|
||||
|:--------------|:---------------------------------------------------------------
|
||||
| `help` | Shows the help dialog
|
||||
| `build` | Invokes `make`/`msbuild.exe` and builds the native addon
|
||||
| `clean` | Removes the `build` directory if it exists
|
||||
| `configure` | Generates project build files for the current platform
|
||||
| `rebuild` | Runs `clean`, `configure` and `build` all in a row
|
||||
| `install` | Installs Node.js header files for the given version
|
||||
| `list` | Lists the currently installed Node.js header versions
|
||||
| `remove` | Removes the Node.js header files for the given version
|
||||
|
||||
|
||||
## Command Options
|
||||
|
||||
`node-gyp` accepts the following command options:
|
||||
|
||||
| **Command** | **Description**
|
||||
|:----------------------------------|:------------------------------------------
|
||||
| `-j n`, `--jobs n` | Run `make` in parallel. The value `max` will use all available CPU cores
|
||||
| `--target=v6.2.1` | Node.js version to build for (default is `process.version`)
|
||||
| `--silly`, `--loglevel=silly` | Log all progress to console
|
||||
| `--verbose`, `--loglevel=verbose` | Log most progress to console
|
||||
| `--silent`, `--loglevel=silent` | Don't log anything to console
|
||||
| `debug`, `--debug` | Make Debug build (default is `Release`)
|
||||
| `--release`, `--no-debug` | Make Release build
|
||||
| `-C $dir`, `--directory=$dir` | Run command in different directory
|
||||
| `--make=$make` | Override `make` command (e.g. `gmake`)
|
||||
| `--thin=yes` | Enable thin static libraries
|
||||
| `--arch=$arch` | Set target architecture (e.g. ia32)
|
||||
| `--tarball=$path` | Get headers from a local tarball
|
||||
| `--devdir=$path` | SDK download directory (default is OS cache directory)
|
||||
| `--ensure` | Don't reinstall headers if already present
|
||||
| `--dist-url=$url` | Download header tarball from custom URL
|
||||
| `--proxy=$url` | Set HTTP(S) proxy for downloading header tarball
|
||||
| `--noproxy=$urls` | Set urls to ignore proxies when downloading header tarball
|
||||
| `--cafile=$cafile` | Override default CA chain (to download tarball)
|
||||
| `--nodedir=$path` | Set the path to the node source code
|
||||
| `--python=$path` | Set path to the Python binary
|
||||
| `--msvs_version=$version` | Set Visual Studio version (Windows only)
|
||||
| `--solution=$solution` | Set Visual Studio Solution version (Windows only)
|
||||
| `--force-process-config` | Force using runtime's `process.config` object to generate `config.gypi` file
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment variables
|
||||
|
||||
Use the form `npm_config_OPTION_NAME` for any of the command options listed
|
||||
above (dashes in option names should be replaced by underscores).
|
||||
|
||||
For example, to set `devdir` equal to `/tmp/.gyp`, you would:
|
||||
|
||||
Run this on Unix:
|
||||
|
||||
```bash
|
||||
export npm_config_devdir=/tmp/.gyp
|
||||
```
|
||||
|
||||
Or this on Windows:
|
||||
|
||||
```console
|
||||
set npm_config_devdir=c:\temp\.gyp
|
||||
```
|
||||
|
||||
### `npm` configuration
|
||||
|
||||
Use the form `OPTION_NAME` for any of the command options listed above.
|
||||
|
||||
For example, to set `devdir` equal to `/tmp/.gyp`, you would run:
|
||||
|
||||
```bash
|
||||
npm config set [--global] devdir /tmp/.gyp
|
||||
```
|
||||
|
||||
**Note:** Configuration set via `npm` will only be used when `node-gyp`
|
||||
is run via `npm`, not when `node-gyp` is run directly.
|
||||
|
||||
## License
|
||||
|
||||
`node-gyp` is available under the MIT license. See the [LICENSE
|
||||
file](LICENSE) for details.
|
||||
2
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/SECURITY.md
generated
vendored
Normal file
2
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/SECURITY.md
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
If you believe you have found a security issue in the software in this
|
||||
repository, please consult https://github.com/nodejs/node/blob/HEAD/SECURITY.md.
|
||||
204
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/addon.gypi
generated
vendored
Normal file
204
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/addon.gypi
generated
vendored
Normal file
@@ -0,0 +1,204 @@
|
||||
{
|
||||
'variables' : {
|
||||
'node_engine_include_dir%': 'deps/v8/include',
|
||||
'node_host_binary%': 'node',
|
||||
'node_with_ltcg%': 'true',
|
||||
},
|
||||
'target_defaults': {
|
||||
'type': 'loadable_module',
|
||||
'win_delay_load_hook': 'true',
|
||||
'product_prefix': '',
|
||||
|
||||
'conditions': [
|
||||
[ 'node_engine=="chakracore"', {
|
||||
'variables': {
|
||||
'node_engine_include_dir%': 'deps/chakrashim/include'
|
||||
},
|
||||
}]
|
||||
],
|
||||
|
||||
'include_dirs': [
|
||||
'<(node_root_dir)/include/node',
|
||||
'<(node_root_dir)/src',
|
||||
'<(node_root_dir)/deps/openssl/config',
|
||||
'<(node_root_dir)/deps/openssl/openssl/include',
|
||||
'<(node_root_dir)/deps/uv/include',
|
||||
'<(node_root_dir)/deps/zlib',
|
||||
'<(node_root_dir)/<(node_engine_include_dir)'
|
||||
],
|
||||
'defines!': [
|
||||
'BUILDING_UV_SHARED=1', # Inherited from common.gypi.
|
||||
'BUILDING_V8_SHARED=1', # Inherited from common.gypi.
|
||||
],
|
||||
'defines': [
|
||||
'NODE_GYP_MODULE_NAME=>(_target_name)',
|
||||
'USING_UV_SHARED=1',
|
||||
'USING_V8_SHARED=1',
|
||||
# Warn when using deprecated V8 APIs.
|
||||
'V8_DEPRECATION_WARNINGS=1'
|
||||
],
|
||||
|
||||
'target_conditions': [
|
||||
['_type=="loadable_module"', {
|
||||
'product_extension': 'node',
|
||||
'defines': [
|
||||
'BUILDING_NODE_EXTENSION'
|
||||
],
|
||||
'xcode_settings': {
|
||||
'OTHER_LDFLAGS': [
|
||||
'-undefined dynamic_lookup'
|
||||
],
|
||||
},
|
||||
}],
|
||||
|
||||
['_type=="static_library"', {
|
||||
# set to `1` to *disable* the -T thin archive 'ld' flag.
|
||||
# older linkers don't support this flag.
|
||||
'standalone_static_library': '<(standalone_static_library)'
|
||||
}],
|
||||
|
||||
['_type!="executable"', {
|
||||
'conditions': [
|
||||
[ 'OS=="android"', {
|
||||
'cflags!': [ '-fPIE' ],
|
||||
}]
|
||||
]
|
||||
}],
|
||||
|
||||
['_win_delay_load_hook=="true"', {
|
||||
# If the addon specifies `'win_delay_load_hook': 'true'` in its
|
||||
# binding.gyp, link a delay-load hook into the DLL. This hook ensures
|
||||
# that the addon will work regardless of whether the node/iojs binary
|
||||
# is named node.exe, iojs.exe, or something else.
|
||||
'conditions': [
|
||||
[ 'OS=="win"', {
|
||||
'defines': [ 'HOST_BINARY=\"<(node_host_binary)<(EXECUTABLE_SUFFIX)\"', ],
|
||||
'sources': [
|
||||
'<(node_gyp_dir)/src/win_delay_load_hook.cc',
|
||||
],
|
||||
'msvs_settings': {
|
||||
'VCLinkerTool': {
|
||||
'DelayLoadDLLs': [ '<(node_host_binary)<(EXECUTABLE_SUFFIX)' ],
|
||||
# Don't print a linker warning when no imports from either .exe
|
||||
# are used.
|
||||
'AdditionalOptions': [ '/ignore:4199' ],
|
||||
},
|
||||
},
|
||||
}],
|
||||
],
|
||||
}],
|
||||
],
|
||||
|
||||
'conditions': [
|
||||
[ 'OS=="mac"', {
|
||||
'defines': [
|
||||
'_DARWIN_USE_64_BIT_INODE=1'
|
||||
],
|
||||
'xcode_settings': {
|
||||
'DYLIB_INSTALL_NAME_BASE': '@rpath'
|
||||
},
|
||||
}],
|
||||
[ 'OS=="aix"', {
|
||||
'ldflags': [
|
||||
'-Wl,-bimport:<(node_exp_file)'
|
||||
],
|
||||
}],
|
||||
[ 'OS=="os400"', {
|
||||
'ldflags': [
|
||||
'-Wl,-bimport:<(node_exp_file)'
|
||||
],
|
||||
}],
|
||||
[ 'OS=="zos"', {
|
||||
'conditions': [
|
||||
[ '"<!(echo $CC)" != "clang" and \
|
||||
"<!(echo $CC)" != "ibm-clang64" and \
|
||||
"<!(echo $CC)" != "ibm-clang"', {
|
||||
'cflags': [
|
||||
'-q64',
|
||||
'-Wc,DLL',
|
||||
'-qlonglong',
|
||||
'-qenum=int',
|
||||
'-qxclang=-fexec-charset=ISO8859-1'
|
||||
],
|
||||
'ldflags': [
|
||||
'-q64',
|
||||
'<(node_exp_file)',
|
||||
],
|
||||
}, {
|
||||
'cflags': [
|
||||
'-m64',
|
||||
],
|
||||
'ldflags': [
|
||||
'-m64',
|
||||
'<(node_exp_file)',
|
||||
],
|
||||
}],
|
||||
],
|
||||
'defines': [
|
||||
'_ALL_SOURCE',
|
||||
'MAP_FAILED=-1',
|
||||
'_UNIX03_SOURCE',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="win"', {
|
||||
'conditions': [
|
||||
['node_engine=="chakracore"', {
|
||||
'library_dirs': [ '<(node_root_dir)/$(ConfigurationName)' ],
|
||||
'libraries': [ '<@(node_engine_libs)' ],
|
||||
}],
|
||||
['node_with_ltcg=="true"', {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'WholeProgramOptimization': 'true' # /GL, whole program optimization, needed for LTCG
|
||||
},
|
||||
'VCLibrarianTool': {
|
||||
'AdditionalOptions': [
|
||||
'/LTCG:INCREMENTAL', # incremental link-time code generation
|
||||
]
|
||||
},
|
||||
'VCLinkerTool': {
|
||||
'OptimizeReferences': 2, # /OPT:REF
|
||||
'EnableCOMDATFolding': 2, # /OPT:ICF
|
||||
'LinkIncremental': 1, # disable incremental linking
|
||||
'AdditionalOptions': [
|
||||
'/LTCG:INCREMENTAL', # incremental link-time code generation
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
],
|
||||
'libraries': [
|
||||
'-lkernel32.lib',
|
||||
'-luser32.lib',
|
||||
'-lgdi32.lib',
|
||||
'-lwinspool.lib',
|
||||
'-lcomdlg32.lib',
|
||||
'-ladvapi32.lib',
|
||||
'-lshell32.lib',
|
||||
'-lole32.lib',
|
||||
'-loleaut32.lib',
|
||||
'-luuid.lib',
|
||||
'-lodbc32.lib',
|
||||
'-lDelayImp.lib',
|
||||
'-l"<(node_lib_file)"'
|
||||
],
|
||||
'msvs_disabled_warnings': [
|
||||
# warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent<T>'
|
||||
# needs to have dll-interface to be used by
|
||||
# clients of class 'node::ObjectWrap'
|
||||
4251
|
||||
],
|
||||
}, {
|
||||
# OS!="win"
|
||||
'defines': [
|
||||
'_LARGEFILE_SOURCE',
|
||||
'_FILE_OFFSET_BITS=64'
|
||||
],
|
||||
}],
|
||||
[ 'OS in "freebsd openbsd netbsd solaris android" or \
|
||||
(OS=="linux" and target_arch!="ia32")', {
|
||||
'cflags': [ '-fPIC' ],
|
||||
}],
|
||||
]
|
||||
}
|
||||
}
|
||||
140
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/bin/node-gyp.js
generated
vendored
Executable file
140
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/bin/node-gyp.js
generated
vendored
Executable file
@@ -0,0 +1,140 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
process.title = 'node-gyp'
|
||||
|
||||
const envPaths = require('env-paths')
|
||||
const gyp = require('../')
|
||||
const log = require('npmlog')
|
||||
const os = require('os')
|
||||
|
||||
/**
|
||||
* Process and execute the selected commands.
|
||||
*/
|
||||
|
||||
const prog = gyp()
|
||||
var completed = false
|
||||
prog.parseArgv(process.argv)
|
||||
prog.devDir = prog.opts.devdir
|
||||
|
||||
var homeDir = os.homedir()
|
||||
if (prog.devDir) {
|
||||
prog.devDir = prog.devDir.replace(/^~/, homeDir)
|
||||
} else if (homeDir) {
|
||||
prog.devDir = envPaths('node-gyp', { suffix: '' }).cache
|
||||
} else {
|
||||
throw new Error(
|
||||
"node-gyp requires that the user's home directory is specified " +
|
||||
'in either of the environmental variables HOME or USERPROFILE. ' +
|
||||
'Overide with: --devdir /path/to/.node-gyp')
|
||||
}
|
||||
|
||||
if (prog.todo.length === 0) {
|
||||
if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
|
||||
console.log('v%s', prog.version)
|
||||
} else {
|
||||
console.log('%s', prog.usage())
|
||||
}
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
log.info('it worked if it ends with', 'ok')
|
||||
log.verbose('cli', process.argv)
|
||||
log.info('using', 'node-gyp@%s', prog.version)
|
||||
log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch)
|
||||
|
||||
/**
|
||||
* Change dir if -C/--directory was passed.
|
||||
*/
|
||||
|
||||
var dir = prog.opts.directory
|
||||
if (dir) {
|
||||
var fs = require('fs')
|
||||
try {
|
||||
var stat = fs.statSync(dir)
|
||||
if (stat.isDirectory()) {
|
||||
log.info('chdir', dir)
|
||||
process.chdir(dir)
|
||||
} else {
|
||||
log.warn('chdir', dir + ' is not a directory')
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') {
|
||||
log.warn('chdir', dir + ' is not a directory')
|
||||
} else {
|
||||
log.warn('chdir', 'error during chdir() "%s"', e.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function run () {
|
||||
var command = prog.todo.shift()
|
||||
if (!command) {
|
||||
// done!
|
||||
completed = true
|
||||
log.info('ok')
|
||||
return
|
||||
}
|
||||
|
||||
prog.commands[command.name](command.args, function (err) {
|
||||
if (err) {
|
||||
log.error(command.name + ' error')
|
||||
log.error('stack', err.stack)
|
||||
errorMessage()
|
||||
log.error('not ok')
|
||||
return process.exit(1)
|
||||
}
|
||||
if (command.name === 'list') {
|
||||
var versions = arguments[1]
|
||||
if (versions.length > 0) {
|
||||
versions.forEach(function (version) {
|
||||
console.log(version)
|
||||
})
|
||||
} else {
|
||||
console.log('No node development files installed. Use `node-gyp install` to install a version.')
|
||||
}
|
||||
} else if (arguments.length >= 2) {
|
||||
console.log.apply(console, [].slice.call(arguments, 1))
|
||||
}
|
||||
|
||||
// now run the next command in the queue
|
||||
process.nextTick(run)
|
||||
})
|
||||
}
|
||||
|
||||
process.on('exit', function (code) {
|
||||
if (!completed && !code) {
|
||||
log.error('Completion callback never invoked!')
|
||||
issueMessage()
|
||||
process.exit(6)
|
||||
}
|
||||
})
|
||||
|
||||
process.on('uncaughtException', function (err) {
|
||||
log.error('UNCAUGHT EXCEPTION')
|
||||
log.error('stack', err.stack)
|
||||
issueMessage()
|
||||
process.exit(7)
|
||||
})
|
||||
|
||||
function errorMessage () {
|
||||
// copied from npm's lib/utils/error-handler.js
|
||||
var os = require('os')
|
||||
log.error('System', os.type() + ' ' + os.release())
|
||||
log.error('command', process.argv
|
||||
.map(JSON.stringify).join(' '))
|
||||
log.error('cwd', process.cwd())
|
||||
log.error('node -v', process.version)
|
||||
log.error('node-gyp -v', 'v' + prog.package.version)
|
||||
}
|
||||
|
||||
function issueMessage () {
|
||||
errorMessage()
|
||||
log.error('', ['Node-gyp failed to build your package.',
|
||||
'Try to update npm and/or node-gyp and if it does not help file an issue with the package author.'
|
||||
].join('\n'))
|
||||
}
|
||||
|
||||
// start running the given commands!
|
||||
run()
|
||||
94
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md
generated
vendored
Normal file
94
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
When using `node-gyp` you might see an error like this when attempting to compile/install a node.js native addon:
|
||||
|
||||
```
|
||||
$ npm install bcrypt
|
||||
npm http GET https://registry.npmjs.org/bcrypt/0.7.5
|
||||
npm http 304 https://registry.npmjs.org/bcrypt/0.7.5
|
||||
npm http GET https://registry.npmjs.org/bindings/1.0.0
|
||||
npm http 304 https://registry.npmjs.org/bindings/1.0.0
|
||||
|
||||
> bcrypt@0.7.5 install /home/ubuntu/public/song-swap/node_modules/bcrypt
|
||||
> node-gyp rebuild
|
||||
|
||||
gyp ERR! configure error
|
||||
gyp ERR! stack Error: "pre" versions of node cannot be installed, use the --nodedir flag instead
|
||||
gyp ERR! stack at install (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/install.js:69:16)
|
||||
gyp ERR! stack at Object.self.commands.(anonymous function) [as install] (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/node-gyp.js:56:37)
|
||||
gyp ERR! stack at getNodeDir (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/configure.js:219:20)
|
||||
gyp ERR! stack at /usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/configure.js:105:9
|
||||
gyp ERR! stack at ChildProcess.exithandler (child_process.js:630:7)
|
||||
gyp ERR! stack at ChildProcess.EventEmitter.emit (events.js:99:17)
|
||||
gyp ERR! stack at maybeClose (child_process.js:730:16)
|
||||
gyp ERR! stack at Process.ChildProcess._handle.onexit (child_process.js:797:5)
|
||||
gyp ERR! System Linux 3.5.0-21-generic
|
||||
gyp ERR! command "node" "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js" "rebuild"
|
||||
gyp ERR! cwd /home/ubuntu/public/song-swap/node_modules/bcrypt
|
||||
gyp ERR! node -v v0.11.2-pre
|
||||
gyp ERR! node-gyp -v v0.9.5
|
||||
gyp ERR! not ok
|
||||
npm ERR! bcrypt@0.7.5 install: `node-gyp rebuild`
|
||||
npm ERR! `sh "-c" "node-gyp rebuild"` failed with 1
|
||||
npm ERR!
|
||||
npm ERR! Failed at the bcrypt@0.7.5 install script.
|
||||
npm ERR! This is most likely a problem with the bcrypt package,
|
||||
npm ERR! not with npm itself.
|
||||
npm ERR! Tell the author that this fails on your system:
|
||||
npm ERR! node-gyp rebuild
|
||||
npm ERR! You can get their info via:
|
||||
npm ERR! npm owner ls bcrypt
|
||||
npm ERR! There is likely additional logging output above.
|
||||
|
||||
npm ERR! System Linux 3.5.0-21-generic
|
||||
npm ERR! command "/usr/local/bin/node" "/usr/local/bin/npm" "install" "bcrypt"
|
||||
npm ERR! cwd /home/ubuntu/public/song-swap
|
||||
npm ERR! node -v v0.11.2-pre
|
||||
npm ERR! npm -v 1.2.18
|
||||
npm ERR! code ELIFECYCLE
|
||||
npm ERR!
|
||||
npm ERR! Additional logging details can be found in:
|
||||
npm ERR! /home/ubuntu/public/song-swap/npm-debug.log
|
||||
npm ERR! not ok code 0
|
||||
```
|
||||
|
||||
The main error here is:
|
||||
|
||||
```
|
||||
Error: "pre" versions of node cannot be installed, use the --nodedir flag instead
|
||||
```
|
||||
|
||||
This error is caused when you attempt to compile a native addon using a version of node.js with `-pre` at the end of the version number:
|
||||
|
||||
``` bash
|
||||
$ node -v
|
||||
v0.10.4-pre
|
||||
```
|
||||
|
||||
## How to avoid (the short answer)
|
||||
|
||||
To avoid this error completely just use a stable release of node.js. i.e. `v0.10.4`, and __not__ `v0.10.4-pre`.
|
||||
|
||||
## How to fix (the long answer)
|
||||
|
||||
This error happens because `node-gyp` does not know what header files were used to compile your "pre" version of node, and therefore it needs you to specify the node source code directory path using the `--nodedir` flag.
|
||||
|
||||
For example, if I compiled my development ("pre") version of node.js using the source code in `/Users/nrajlich/node`, then I could invoke `node-gyp` like:
|
||||
|
||||
``` bash
|
||||
$ node-gyp rebuild --nodedir=/Users/nrajlich/node
|
||||
```
|
||||
|
||||
Or install an native addon through `npm` like:
|
||||
|
||||
``` bash
|
||||
$ npm install bcrypt --nodedir=/Users/nrajlich/node
|
||||
```
|
||||
|
||||
### Always use `--nodedir`
|
||||
|
||||
__Note:__ This is for advanced users who use `-pre` versions of node more often than tagged releases.
|
||||
|
||||
If you're invoking `node-gyp` through `npm`, then you can leverage `npm`'s configuration system and not have to specify the `--nodedir` flag all the time:
|
||||
|
||||
``` bash
|
||||
$ npm config set nodedir /Users/nrajlich/node
|
||||
```
|
||||
47
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Force-npm-to-use-global-node-gyp.md
generated
vendored
Normal file
47
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Force-npm-to-use-global-node-gyp.md
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# Force npm to use global installed node-gyp
|
||||
|
||||
**Note: These instructions only work with npm 6 or older. For a solution that works with npm 8 (or older), see [Updating-npm-bundled-node-gyp.md](Updating-npm-bundled-node-gyp.md).**
|
||||
|
||||
[Many issues](https://github.com/nodejs/node-gyp/labels/ERR%21%20node-gyp%20-v%20%3C%3D%20v5.1.0) are opened by users who are
|
||||
not running a [current version of node-gyp](https://github.com/nodejs/node-gyp/releases).
|
||||
|
||||
npm bundles its own, internal, copy of node-gyp located at `npm/node_modules`, within npm's private dependencies which are separate from *globally* accessible packages. Therefore this internal copy of node-gyp is independent from any globally installed copy of node-gyp that
|
||||
may have been installed via `npm install -g node-gyp`.
|
||||
|
||||
So npm's internal copy of node-gyp **isn't** stored inside *global* `node_modules` and thus isn't available for use as a standalone package. npm uses it's *internal* copy of `node-gyp` to automatically build native addons.
|
||||
|
||||
When you install a _new_ version of node-gyp outside of npm, it'll go into your *global* `node_modules`, but not under the `npm/node_modules` (where internal copy of node-gyp is stored). So it will get into your `$PATH` and you will be able to use this globally installed version (**but not internal node-gyp of npm**) as any other globally installed package.
|
||||
|
||||
The catch is that npm **won't** use global version unless you tell it to, it'll keep on using the **internal one**. You need to instruct it to by setting the `node_gyp` config variable (which goes into your `~/.npmrc`). You do this by running the `npm config set` command as below. Then npm will use the command in the path you supply whenever it needs to build a native addon.
|
||||
|
||||
**Important**: You also need to remember to unset this when you upgrade npm with a newer version of node-gyp, or you have to manually keep your globally installed node-gyp to date. See "Undo" below.
|
||||
|
||||
## Linux and macOS
|
||||
```
|
||||
npm install --global node-gyp@latest
|
||||
npm config set node_gyp $(npm prefix -g)/lib/node_modules/node-gyp/bin/node-gyp.js
|
||||
```
|
||||
|
||||
`sudo` may be required for the first command if you get a permission error.
|
||||
|
||||
## Windows
|
||||
|
||||
### Windows Command Prompt
|
||||
```
|
||||
npm install --global node-gyp@latest
|
||||
for /f "delims=" %P in ('npm prefix -g') do npm config set node_gyp "%P\node_modules\node-gyp\bin\node-gyp.js"
|
||||
```
|
||||
|
||||
### Powershell
|
||||
```
|
||||
npm install --global node-gyp@latest
|
||||
npm prefix -g | % {npm config set node_gyp "$_\node_modules\node-gyp\bin\node-gyp.js"}
|
||||
```
|
||||
|
||||
## Undo
|
||||
**Beware** if you don't unset the `node_gyp` config option, npm will continue to use the globally installed version of node-gyp rather than the one it ships with, which may end up being newer.
|
||||
|
||||
```
|
||||
npm config delete node_gyp
|
||||
npm uninstall --global node-gyp
|
||||
```
|
||||
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Home.md
generated
vendored
Normal file
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Home.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
Welcome to the node-gyp wiki!
|
||||
|
||||
* [["binding.gyp" files out in the wild]]
|
||||
* [[Linking to OpenSSL]]
|
||||
* [[Common Issues]]
|
||||
* [[Updating npm's bundled node-gyp]]
|
||||
* [[Error: "pre" versions of node cannot be installed]]
|
||||
86
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Linking-to-OpenSSL.md
generated
vendored
Normal file
86
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Linking-to-OpenSSL.md
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
A handful of native addons require linking to OpenSSL in one way or another. This introduces a small challenge since node will sometimes bundle OpenSSL statically (the default for node >= v0.8.x), or sometimes dynamically link to the system OpenSSL (default for node <= v0.6.x).
|
||||
|
||||
Good native addons should account for both scenarios. It's recommended that you use the `binding.gyp` file provided below as a starting-point for any addon that needs to use OpenSSL:
|
||||
|
||||
``` python
|
||||
{
|
||||
'variables': {
|
||||
# node v0.6.x doesn't give us its build variables,
|
||||
# but on Unix it was only possible to use the system OpenSSL library,
|
||||
# so default the variable to "true", v0.8.x node and up will overwrite it.
|
||||
'node_shared_openssl%': 'true'
|
||||
},
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'binding',
|
||||
'sources': [
|
||||
'src/binding.cc'
|
||||
],
|
||||
'conditions': [
|
||||
['node_shared_openssl=="false"', {
|
||||
# so when "node_shared_openssl" is "false", then OpenSSL has been
|
||||
# bundled into the node executable. So we need to include the same
|
||||
# header files that were used when building node.
|
||||
'include_dirs': [
|
||||
'<(node_root_dir)/deps/openssl/openssl/include'
|
||||
],
|
||||
"conditions" : [
|
||||
["target_arch=='ia32'", {
|
||||
"include_dirs": [ "<(node_root_dir)/deps/openssl/config/piii" ]
|
||||
}],
|
||||
["target_arch=='x64'", {
|
||||
"include_dirs": [ "<(node_root_dir)/deps/openssl/config/k8" ]
|
||||
}],
|
||||
["target_arch=='arm'", {
|
||||
"include_dirs": [ "<(node_root_dir)/deps/openssl/config/arm" ]
|
||||
}]
|
||||
]
|
||||
}]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
This ensures that when OpenSSL is statically linked into `node` then, the bundled OpenSSL headers are included, but when the system OpenSSL is in use, then only those headers will be used.
|
||||
|
||||
## Windows?
|
||||
|
||||
As you can see this baseline `binding.gyp` file only accounts for the Unix scenario. Currently on Windows the situation is a little less ideal. On Windows, OpenSSL is _always_ statically compiled into the `node` executable, so ideally it would be possible to use that copy of OpenSSL when building native addons.
|
||||
|
||||
Unfortunately it doesn't seem like that is possible at the moment, as there would need to be tweaks made to the generated `node.lib` file to include the openssl glue functions, or a new `openssl.lib` file would need to be created during the node build. I'm not sure which is the easiest/most feasible.
|
||||
|
||||
In the meantime, one possible solution is using another copy of OpenSSL, which is what [`node-bcrypt`](https://github.com/ncb000gt/node.bcrypt.js) currently does. Adding something like this to your `binding.gyp` file's `"conditions"` block would enable this:
|
||||
|
||||
``` python
|
||||
[ 'OS=="win"', {
|
||||
'conditions': [
|
||||
# "openssl_root" is the directory on Windows of the OpenSSL files.
|
||||
# Check the "target_arch" variable to set good default values for
|
||||
# both 64-bit and 32-bit builds of the module.
|
||||
['target_arch=="x64"', {
|
||||
'variables': {
|
||||
'openssl_root%': 'C:/OpenSSL-Win64'
|
||||
},
|
||||
}, {
|
||||
'variables': {
|
||||
'openssl_root%': 'C:/OpenSSL-Win32'
|
||||
},
|
||||
}],
|
||||
],
|
||||
'libraries': [
|
||||
'-l<(openssl_root)/lib/libeay32.lib',
|
||||
],
|
||||
'include_dirs': [
|
||||
'<(openssl_root)/include',
|
||||
],
|
||||
}]
|
||||
```
|
||||
|
||||
Now you can direct your users to install OpenSSL on Windows from here (be sure to tell them to install the 64-bit version if they're compiling against a 64-bit version of node): http://slproweb.com/products/Win32OpenSSL.html
|
||||
|
||||
Also note that both `node-gyp` and `npm` allow you to overwrite that default `openssl_root` variable on the command line:
|
||||
|
||||
``` bash
|
||||
$ node-gyp rebuild --openssl-root="C:\Users\Nathan\Desktop\openssl"
|
||||
```
|
||||
11
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/README.md
generated
vendored
Normal file
11
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/README.md
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
## Versions of `node-gyp` that are earlier than v9.x.x
|
||||
|
||||
Please look thru your error log for the string `gyp info using node-gyp@` and if that version number is less than the [current release of node-gyp](https://github.com/nodejs/node-gyp/releases) then __please upgrade__ using [these instructions](https://github.com/nodejs/node-gyp/blob/master/docs/Updating-npm-bundled-node-gyp.md) and then try your command again.
|
||||
|
||||
## `node-sass` is deprecated
|
||||
|
||||
Please be aware that the package [`node-sass` is deprecated](https://github.com/sass/node-sass#node-sass) so you should actively seek alternatives. Please avoid opening new `node-sass` issues on this repo. You can try `npm install --global node-sass@latest` but we [cannot help much](https://github.com/nodejs/node-gyp/issues?q=is%3Aissue+label%3A%22Node+Sass+--%3E+Dart+Sass%22+) here.
|
||||
|
||||
## Issues finding the installed Visual Studio
|
||||
|
||||
In cmd, [`npm config set msvs_version 20xx`](https://github.com/nodejs/node-gyp#on-windows) with ___xx___ matching your locally installed version of Visual Studio.
|
||||
72
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md
generated
vendored
Normal file
72
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
# Updating the npm-bundled version of node-gyp
|
||||
|
||||
**Note: These instructions are (only) tested and known to work with npm 8 and older.**
|
||||
|
||||
**Note: These instructions will be undone if you reinstall or upgrade npm or node! For a more permanent (and simpler) solution, see [Force-npm-to-use-global-node-gyp.md](Force-npm-to-use-global-node-gyp.md). (npm 6 or older only!)**
|
||||
|
||||
[Many issues](https://github.com/nodejs/node-gyp/issues?q=label%3A"ERR!+node-gyp+-v+<%3D+v9.x.x") are opened by users who are
|
||||
not running a [current version of node-gyp](https://github.com/nodejs/node-gyp/releases).
|
||||
|
||||
`npm` bundles its own, internal, copy of `node-gyp`. This internal copy is independent of any globally installed copy of node-gyp that
|
||||
may have been installed via `npm install -g node-gyp`.
|
||||
|
||||
This means that while `node-gyp` doesn't get installed into your `$PATH` by default, npm still keeps its own copy to invoke when you
|
||||
attempt to `npm install` a native add-on.
|
||||
|
||||
Sometimes, you may need to update npm's internal node-gyp to a newer version than what is installed. A simple `npm install -g node-gyp`
|
||||
_won't_ do the trick since npm will still continue to use its internal copy over the global one.
|
||||
|
||||
So instead:
|
||||
|
||||
## Version of npm
|
||||
|
||||
We need to start by knowing your version of `npm`:
|
||||
```bash
|
||||
npm --version
|
||||
```
|
||||
|
||||
## Linux, macOS, Solaris, etc.
|
||||
|
||||
Unix is easy. Just run the following command.
|
||||
|
||||
If your npm is version ___7 or 8___, do:
|
||||
```bash
|
||||
$ npm explore npm/node_modules/@npmcli/run-script -g -- npm_config_global=false npm install node-gyp@latest
|
||||
```
|
||||
|
||||
Else if your npm is version ___less than 7___, do:
|
||||
```bash
|
||||
$ npm explore npm/node_modules/npm-lifecycle -g -- npm install node-gyp@latest
|
||||
```
|
||||
|
||||
If the command fails with a permissions error, please try `sudo` and then the command.
|
||||
|
||||
## Windows
|
||||
|
||||
Windows is a bit trickier, since `npm` might be installed to the "Program Files" directory, which needs admin privileges in order to
|
||||
modify on current Windows. Therefore, run the following commands __inside a `cmd.exe` started with "Run as Administrator"__:
|
||||
|
||||
First we need to find the location of `node`. If you don't already know the location that `node.exe` got installed to, then run:
|
||||
```bash
|
||||
$ where node
|
||||
```
|
||||
|
||||
Now `cd` to the directory that `node.exe` is contained in e.g.:
|
||||
```bash
|
||||
$ cd "C:\Program Files\nodejs"
|
||||
```
|
||||
|
||||
If your npm version is ___7 or 8___, do:
|
||||
```bash
|
||||
cd node_modules\npm\node_modules\@npmcli\run-script
|
||||
```
|
||||
|
||||
Else if your npm version is ___less than 7___, do:
|
||||
```bash
|
||||
cd node_modules\npm\node_modules\npm-lifecycle
|
||||
```
|
||||
|
||||
Finish by running:
|
||||
```bash
|
||||
$ npm install node-gyp@latest
|
||||
```
|
||||
49
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md
generated
vendored
Normal file
49
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
This page contains links to some examples of existing `binding.gyp` files that other node modules are using. Take a look at them for inspiration.
|
||||
|
||||
To add to this page, just add the link to the project's `binding.gyp` file below:
|
||||
|
||||
* [ons](https://github.com/XadillaX/aliyun-ons/blob/master/binding.gyp)
|
||||
* [thmclrx](https://github.com/XadillaX/thmclrx/blob/master/binding.gyp)
|
||||
* [libxmljs](https://github.com/polotek/libxmljs/blob/master/binding.gyp)
|
||||
* [node-buffertools](https://github.com/bnoordhuis/node-buffertools/blob/master/binding.gyp)
|
||||
* [node-canvas](https://github.com/LearnBoost/node-canvas/blob/master/binding.gyp)
|
||||
* [node-ffi](https://github.com/rbranson/node-ffi/blob/master/binding.gyp) + [libffi](https://github.com/rbranson/node-ffi/blob/master/deps/libffi/libffi.gyp)
|
||||
* [node-time](https://github.com/TooTallNate/node-time/blob/master/binding.gyp)
|
||||
* [node-sass](https://github.com/sass/node-sass/blob/master/binding.gyp) + [libsass](https://github.com/sass/node-sass/blob/master/src/libsass.gyp)
|
||||
* [node-serialport](https://github.com/voodootikigod/node-serialport/blob/master/binding.gyp)
|
||||
* [node-weak](https://github.com/TooTallNate/node-weak/blob/master/binding.gyp)
|
||||
* [pty.js](https://github.com/chjj/pty.js/blob/master/binding.gyp)
|
||||
* [ref](https://github.com/TooTallNate/ref/blob/master/binding.gyp)
|
||||
* [appjs](https://github.com/milani/appjs/blob/master/binding.gyp)
|
||||
* [nwm](https://github.com/mixu/nwm/blob/master/binding.gyp)
|
||||
* [bcrypt](https://github.com/ncb000gt/node.bcrypt.js/blob/master/binding.gyp)
|
||||
* [nk-mysql](https://github.com/mmod/nodamysql/blob/master/binding.gyp)
|
||||
* [nk-xrm-installer](https://github.com/mmod/nk-xrm-installer/blob/master/binding.gyp) + [includable.gypi](https://github.com/mmod/nk-xrm-installer/blob/master/includable.gypi) + [unpack.py](https://github.com/mmod/nk-xrm-installer/blob/master/unpack.py) + [disburse.py](https://github.com/mmod/nk-xrm-installer/blob/master/disburse.py)
|
||||
<sub>.py files above provide complete reference for examples of fetching source via http, extracting, and moving files.</sub>
|
||||
* [node-memwatch](https://github.com/lloyd/node-memwatch/blob/master/binding.gyp)
|
||||
* [node-ip2location](https://github.com/bolgovr/node-ip2location/blob/master/binding.gyp)
|
||||
* [node-midi](https://github.com/justinlatimer/node-midi/blob/master/binding.gyp)
|
||||
* [node-sqlite3](https://github.com/developmentseed/node-sqlite3/blob/master/binding.gyp) + [libsqlite3](https://github.com/developmentseed/node-sqlite3/blob/master/deps/sqlite3.gyp)
|
||||
* [node-zipfile](https://github.com/mapbox/node-zipfile/blob/master/binding.gyp)
|
||||
* [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/binding.gyp)
|
||||
* [node-inotify](https://github.com/c4milo/node-inotify/blob/master/binding.gyp)
|
||||
* [v8-profiler](https://github.com/c4milo/v8-profiler/blob/master/binding.gyp)
|
||||
* [airtunes](https://github.com/radioline/node_airtunes/blob/master/binding.gyp)
|
||||
* [node-fann](https://github.com/c4milo/node-fann/blob/master/binding.gyp)
|
||||
* [node-talib](https://github.com/oransel/node-talib/blob/master/binding.gyp)
|
||||
* [node-leveldown](https://github.com/rvagg/node-leveldown/blob/master/binding.gyp) + [leveldb.gyp](https://github.com/rvagg/node-leveldown/blob/master/deps/leveldb/leveldb.gyp) + [snappy.gyp](https://github.com/rvagg/node-leveldown/blob/master/deps/snappy/snappy.gyp)
|
||||
* [node-expat](https://github.com/astro/node-expat/blob/master/binding.gyp) + [libexpat](https://github.com/astro/node-expat/blob/master/deps/libexpat/libexpat.gyp)
|
||||
* [node-openvg-canvas](https://github.com/luismreis/node-openvg-canvas/blob/master/binding.gyp) + [node-openvg](https://github.com/luismreis/node-openvg/blob/master/binding.gyp)
|
||||
* [node-cryptopp](https://github.com/BatikhSouri/node-cryptopp/blob/master/binding.gyp)
|
||||
* [topcube](https://github.com/creationix/topcube/blob/master/binding.gyp)
|
||||
* [node-osmium](https://github.com/osmcode/node-osmium/blob/master/binding.gyp)
|
||||
* [node-osrm](https://github.com/DennisOSRM/node-osrm)
|
||||
* [node-oracle](https://github.com/joeferner/node-oracle/blob/master/binding.gyp)
|
||||
* [node-process-list](https://github.com/ReklatsMasters/node-process-list/blob/master/binding.gyp)
|
||||
* [node-nanomsg](https://github.com/nickdesaulniers/node-nanomsg/blob/master/binding.gyp)
|
||||
* [Ghostscript4JS](https://github.com/NickNaso/ghostscript4js/blob/master/binding.gyp)
|
||||
* [nodecv](https://github.com/xudafeng/nodecv/blob/master/binding.gyp)
|
||||
* [magick-cli](https://github.com/NickNaso/magick-cli/blob/master/binding.gyp)
|
||||
* [sharp](https://github.com/lovell/sharp/blob/master/binding.gyp)
|
||||
* [krb5](https://github.com/adaltas/node-krb5/blob/master/binding.gyp)
|
||||
* [node-heapdump](https://github.com/bnoordhuis/node-heapdump/blob/master/binding.gyp)
|
||||
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.flake8
generated
vendored
Normal file
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.flake8
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
[flake8]
|
||||
max-complexity = 101
|
||||
max-line-length = 88
|
||||
extend-ignore = E203 # whitespace before ':' to agree with psf/black
|
||||
36
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml
generated
vendored
Normal file
36
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# TODO: Enable os: windows-latest
|
||||
# TODO: Enable pytest --doctest-modules
|
||||
|
||||
name: Python_tests
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
Python_tests:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 8
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest] # , windows-latest]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools
|
||||
pip install --editable ".[dev]"
|
||||
- run: ./gyp -V && ./gyp --version && gyp -V && gyp --version
|
||||
- name: Lint with flake8
|
||||
run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics
|
||||
- name: Test with pytest
|
||||
run: pytest
|
||||
# - name: Run doctests with pytest
|
||||
# run: pytest --doctest-modules
|
||||
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml
generated
vendored
Normal file
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: node-gyp integration
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
integration:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
python: ["3.7", "3.10"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Clone gyp-next
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: gyp-next
|
||||
- name: Clone nodejs/node-gyp
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: nodejs/node-gyp
|
||||
path: node-gyp
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd node-gyp
|
||||
npm install --no-progress
|
||||
- name: Replace gyp in node-gyp
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf node-gyp/gyp
|
||||
cp -r gyp-next node-gyp/gyp
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd node-gyp
|
||||
npm test
|
||||
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml
generated
vendored
Normal file
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Node.js Windows integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-windows:
|
||||
runs-on: windows-2019
|
||||
steps:
|
||||
- name: Clone gyp-next
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: gyp-next
|
||||
- name: Clone nodejs/node
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: nodejs/node
|
||||
path: node
|
||||
- name: Install deps
|
||||
run: choco install nasm
|
||||
- name: Replace gyp in Node.js
|
||||
run: |
|
||||
rm -Recurse node/tools/gyp
|
||||
cp -Recurse gyp-next node/tools/gyp
|
||||
- name: Build Node.js
|
||||
run: |
|
||||
cd node
|
||||
./vcbuild.bat
|
||||
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/release-please.yml
generated
vendored
Normal file
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/release-please.yml
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: release-please
|
||||
jobs:
|
||||
release-please:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: google-github-actions/release-please-action@v3
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
release-type: python
|
||||
package-name: gyp-next
|
||||
bump-minor-pre-major: true
|
||||
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/AUTHORS
generated
vendored
Normal file
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/AUTHORS
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Names should be added to this file like so:
|
||||
# Name or Organization <email address>
|
||||
|
||||
Google Inc. <*@google.com>
|
||||
Bloomberg Finance L.P. <*@bloomberg.net>
|
||||
IBM Inc. <*@*.ibm.com>
|
||||
Yandex LLC <*@yandex-team.ru>
|
||||
|
||||
Steven Knight <knight@baldmt.com>
|
||||
Ryan Norton <rnorton10@gmail.com>
|
||||
David J. Sankel <david@sankelsoftware.com>
|
||||
Eric N. Vander Weele <ericvw@gmail.com>
|
||||
Tom Freudenberg <th.freudenberg@gmail.com>
|
||||
Julien Brianceau <jbriance@cisco.com>
|
||||
Refael Ackermann <refack@gmail.com>
|
||||
Ujjwal Sharma <ryzokuken@disroot.org>
|
||||
233
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CHANGELOG.md
generated
vendored
Normal file
233
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,233 @@
|
||||
# Changelog
|
||||
|
||||
## [0.14.0](https://github.com/nodejs/gyp-next/compare/v0.13.0...v0.14.0) (2022-10-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add command line argument for `gyp --version` ([#164](https://github.com/nodejs/gyp-next/issues/164)) ([5c9f4d0](https://github.com/nodejs/gyp-next/commit/5c9f4d05678dd855e18ed2327219e5d18e5374db))
|
||||
* ninja build for iOS ([#174](https://github.com/nodejs/gyp-next/issues/174)) ([b6f2714](https://github.com/nodejs/gyp-next/commit/b6f271424e0033d7ed54d437706695af2ba7a1bf))
|
||||
* **zos:** support IBM Open XL C/C++ & PL/I compilers on z/OS ([#178](https://github.com/nodejs/gyp-next/issues/178)) ([43a7211](https://github.com/nodejs/gyp-next/commit/43a72110ae3fafb13c9625cc7a969624b27cda47))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* lock windows env ([#163](https://github.com/nodejs/gyp-next/issues/163)) ([44bd0dd](https://github.com/nodejs/gyp-next/commit/44bd0ddc93ea0b5770a44dd326a2e4ae62c21442))
|
||||
* move configuration information into pyproject.toml ([#176](https://github.com/nodejs/gyp-next/issues/176)) ([d69d8ec](https://github.com/nodejs/gyp-next/commit/d69d8ece6dbff7af4f2ea073c9fd170baf8cb7f7))
|
||||
* node.js debugger adds stderr (but exit code is 0) -> shouldn't throw ([#179](https://github.com/nodejs/gyp-next/issues/179)) ([1a457d9](https://github.com/nodejs/gyp-next/commit/1a457d9ed08cfd30c9fa551bc5cf0d90fb583787))
|
||||
|
||||
## [0.13.0](https://www.github.com/nodejs/gyp-next/compare/v0.12.1...v0.13.0) (2022-05-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add PRODUCT_DIR_ABS variable ([#151](https://www.github.com/nodejs/gyp-next/issues/151)) ([80d2626](https://www.github.com/nodejs/gyp-next/commit/80d26263581db829b61b312a7bdb5cc791df7824))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* execvp: printf: Argument list too long ([#147](https://www.github.com/nodejs/gyp-next/issues/147)) ([c4e14f3](https://www.github.com/nodejs/gyp-next/commit/c4e14f301673fadbac3ab7882d0b5f4d02530cb9))
|
||||
|
||||
### [0.12.1](https://www.github.com/nodejs/gyp-next/compare/v0.12.0...v0.12.1) (2022-04-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **msvs:** avoid fixing path for arguments with "=" ([#143](https://www.github.com/nodejs/gyp-next/issues/143)) ([7e8f16e](https://www.github.com/nodejs/gyp-next/commit/7e8f16eb165e042e64bec98fa6c2a0232a42c26b))
|
||||
|
||||
## [0.12.0](https://www.github.com/nodejs/gyp-next/compare/v0.11.0...v0.12.0) (2022-04-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support building shared libraries on z/OS ([#137](https://www.github.com/nodejs/gyp-next/issues/137)) ([293bcfa](https://www.github.com/nodejs/gyp-next/commit/293bcfa4c25c6adb743377adafc45a80fee492c6))
|
||||
|
||||
## [0.11.0](https://www.github.com/nodejs/gyp-next/compare/v0.10.1...v0.11.0) (2022-03-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add proper support for IBM i ([#140](https://www.github.com/nodejs/gyp-next/issues/140)) ([fdda4a3](https://www.github.com/nodejs/gyp-next/commit/fdda4a3038b8a7042ad960ce7a223687c24a21b1))
|
||||
|
||||
### [0.10.1](https://www.github.com/nodejs/gyp-next/compare/v0.10.0...v0.10.1) (2021-11-24)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **make:** only generate makefile for multiple toolsets if requested ([#133](https://www.github.com/nodejs/gyp-next/issues/133)) ([f463a77](https://www.github.com/nodejs/gyp-next/commit/f463a77705973289ea38fec1b244c922ac438e26))
|
||||
|
||||
## [0.10.0](https://www.github.com/nodejs/gyp-next/compare/v0.9.6...v0.10.0) (2021-08-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **msvs:** add support for Visual Studio 2022 ([#124](https://www.github.com/nodejs/gyp-next/issues/124)) ([4bd9215](https://www.github.com/nodejs/gyp-next/commit/4bd9215c44d300f06e916aec1d6327c22b78272d))
|
||||
|
||||
### [0.9.6](https://www.github.com/nodejs/gyp-next/compare/v0.9.5...v0.9.6) (2021-08-23)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* align flake8 test ([#122](https://www.github.com/nodejs/gyp-next/issues/122)) ([f1faa8d](https://www.github.com/nodejs/gyp-next/commit/f1faa8d3081e1a47e917ff910892f00dff16cf8a))
|
||||
* **msvs:** fix paths again in action command arguments ([#121](https://www.github.com/nodejs/gyp-next/issues/121)) ([7159dfb](https://www.github.com/nodejs/gyp-next/commit/7159dfbc5758c9ec717e215f2c36daf482c846a1))
|
||||
|
||||
### [0.9.5](https://www.github.com/nodejs/gyp-next/compare/v0.9.4...v0.9.5) (2021-08-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add python 3.6 to node-gyp integration test ([3462d4c](https://www.github.com/nodejs/gyp-next/commit/3462d4ce3c31cce747513dc7ca9760c81d57c60e))
|
||||
* revert for windows compatibility ([d078e7d](https://www.github.com/nodejs/gyp-next/commit/d078e7d7ae080ddae243188f6415f940376a7368))
|
||||
* support msvs_quote_cmd in ninja generator ([#117](https://www.github.com/nodejs/gyp-next/issues/117)) ([46486ac](https://www.github.com/nodejs/gyp-next/commit/46486ac6e9329529d51061e006a5b39631e46729))
|
||||
|
||||
### [0.9.4](https://www.github.com/nodejs/gyp-next/compare/v0.9.3...v0.9.4) (2021-08-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* .S is an extension for asm file on Windows ([#115](https://www.github.com/nodejs/gyp-next/issues/115)) ([d2fad44](https://www.github.com/nodejs/gyp-next/commit/d2fad44ef3a79ca8900f1307060153ded57053fc))
|
||||
|
||||
### [0.9.3](https://www.github.com/nodejs/gyp-next/compare/v0.9.2...v0.9.3) (2021-07-07)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* build failure with ninja and Python 3 on Windows ([#113](https://www.github.com/nodejs/gyp-next/issues/113)) ([c172d10](https://www.github.com/nodejs/gyp-next/commit/c172d105deff5db4244e583942215918fa80dd3c))
|
||||
|
||||
### [0.9.2](https://www.github.com/nodejs/gyp-next/compare/v0.9.1...v0.9.2) (2021-05-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add support of utf8 encoding ([#105](https://www.github.com/nodejs/gyp-next/issues/105)) ([4d0f93c](https://www.github.com/nodejs/gyp-next/commit/4d0f93c249286d1f0c0f665f5fe7346119f98cf1))
|
||||
|
||||
### [0.9.1](https://www.github.com/nodejs/gyp-next/compare/v0.9.0...v0.9.1) (2021-05-14)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* py lint ([3b6a8ee](https://www.github.com/nodejs/gyp-next/commit/3b6a8ee7a66193a8a6867eba9e1d2b70bdf04402))
|
||||
|
||||
## [0.9.0](https://www.github.com/nodejs/gyp-next/compare/v0.8.1...v0.9.0) (2021-05-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* use LDFLAGS_host for host toolset ([#98](https://www.github.com/nodejs/gyp-next/issues/98)) ([bea5c7b](https://www.github.com/nodejs/gyp-next/commit/bea5c7bd67d6ad32acbdce79767a5481c70675a2))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* msvs.py: remove overindentation ([#102](https://www.github.com/nodejs/gyp-next/issues/102)) ([3f83e99](https://www.github.com/nodejs/gyp-next/commit/3f83e99056d004d9579ceb786e06b624ddc36529))
|
||||
* update gyp.el to change case to cl-case ([#93](https://www.github.com/nodejs/gyp-next/issues/93)) ([13d5b66](https://www.github.com/nodejs/gyp-next/commit/13d5b66aab35985af9c2fb1174fdc6e1c1407ecc))
|
||||
|
||||
### [0.8.1](https://www.github.com/nodejs/gyp-next/compare/v0.8.0...v0.8.1) (2021-02-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update shebang lines from python to python3 ([#94](https://www.github.com/nodejs/gyp-next/issues/94)) ([a1b0d41](https://www.github.com/nodejs/gyp-next/commit/a1b0d4171a8049a4ab7a614202063dec332f2df4))
|
||||
|
||||
## [0.8.0](https://www.github.com/nodejs/gyp-next/compare/v0.7.0...v0.8.0) (2021-01-15)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* remove support for Python 2
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* revert posix build job ([#86](https://www.github.com/nodejs/gyp-next/issues/86)) ([39dc34f](https://www.github.com/nodejs/gyp-next/commit/39dc34f0799c074624005fb9bbccf6e028607f9d))
|
||||
|
||||
|
||||
### gyp
|
||||
|
||||
* Remove support for Python 2 ([#88](https://www.github.com/nodejs/gyp-next/issues/88)) ([22e4654](https://www.github.com/nodejs/gyp-next/commit/22e465426fd892403c95534229af819a99c3f8dc))
|
||||
|
||||
## [0.7.0](https://www.github.com/nodejs/gyp-next/compare/v0.6.2...v0.7.0) (2020-12-17)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **msvs:** On Windows, arguments passed to the "action" commands are no longer transformed to replace slashes with backslashes.
|
||||
|
||||
### Features
|
||||
|
||||
* **xcode:** --cross-compiling overrides arch-specific settings ([973bae0](https://www.github.com/nodejs/gyp-next/commit/973bae0b7b08be7b680ecae9565fbd04b3e0787d))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **msvs:** do not fix paths in action command arguments ([fc22f83](https://www.github.com/nodejs/gyp-next/commit/fc22f8335e2016da4aae4f4233074bd651d2faea))
|
||||
* cmake on python 3 ([fd61f5f](https://www.github.com/nodejs/gyp-next/commit/fd61f5faa5275ec8fc98e3c7868c0dd46f109540))
|
||||
* ValueError: invalid mode: 'rU' while trying to load binding.gyp ([d0504e6](https://www.github.com/nodejs/gyp-next/commit/d0504e6700ce48f44957a4d5891b142a60be946f))
|
||||
* xcode cmake parsing ([eefe8d1](https://www.github.com/nodejs/gyp-next/commit/eefe8d10e99863bc4ac7e2ed32facd608d400d4b))
|
||||
|
||||
### [0.6.2](https://www.github.com/nodejs/gyp-next/compare/v0.6.1...v0.6.2) (2020-10-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not rewrite absolute paths to avoid long paths ([#74](https://www.github.com/nodejs/gyp-next/issues/74)) ([c2ccc1a](https://www.github.com/nodejs/gyp-next/commit/c2ccc1a81f7f94433a94f4d01a2e820db4c4331a))
|
||||
* only include MARMASM when toolset is target ([5a2794a](https://www.github.com/nodejs/gyp-next/commit/5a2794aefb58f0c00404ff042b61740bc8b8d5cd))
|
||||
|
||||
### [0.6.1](https://github.com/nodejs/gyp-next/compare/v0.6.0...v0.6.1) (2020-10-14)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Correctly rename object files for absolute paths in MSVS generator.
|
||||
|
||||
## [0.6.0](https://github.com/nodejs/gyp-next/compare/v0.5.0...v0.6.0) (2020-10-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* The Makefile generator will now output shared libraries directly to the product directory on all platforms (previously only macOS).
|
||||
|
||||
## [0.5.0](https://github.com/nodejs/gyp-next/compare/v0.4.0...v0.5.0) (2020-09-30)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Extended compile_commands_json generator to consider more file extensions than just `c` and `cc`. `cpp` and `cxx` are now supported.
|
||||
* Source files with duplicate basenames are now supported.
|
||||
|
||||
### Removed
|
||||
|
||||
* The `--no-duplicate-basename-check` option was removed.
|
||||
* The `msvs_enable_marmasm` configuration option was removed in favor of auto-inclusion of the "marmasm" sections for Windows on ARM.
|
||||
|
||||
## [0.4.0](https://github.com/nodejs/gyp-next/compare/v0.3.0...v0.4.0) (2020-07-14)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Added support for passing arbitrary architectures to Xcode builds, enables `arm64` builds.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixed a bug on Solaris where copying archives failed.
|
||||
|
||||
## [0.3.0](https://github.com/nodejs/gyp-next/compare/v0.2.1...v0.3.0) (2020-06-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Added support for MSVC cross-compilation. This allows compilation on x64 for a Windows ARM target.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixed XCode CLT version detection on macOS Catalina.
|
||||
|
||||
### [0.2.1](https://github.com/nodejs/gyp-next/compare/v0.2.0...v0.2.1) (2020-05-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Relicensed to Node.js contributors.
|
||||
* Fixed Windows bug introduced in v0.2.0.
|
||||
|
||||
## [0.2.0](https://github.com/nodejs/gyp-next/releases/tag/v0.2.0) (2020-04-06)
|
||||
|
||||
This is the first release of this project, based on https://chromium.googlesource.com/external/gyp with changes made over the years in Node.js and node-gyp.
|
||||
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
generated
vendored
Normal file
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Code of Conduct
|
||||
|
||||
* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md)
|
||||
* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/HEAD/Moderation-Policy.md)
|
||||
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CONTRIBUTING.md
generated
vendored
Normal file
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# Contributing to gyp-next
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md).
|
||||
|
||||
<a id="developers-certificate-of-origin"></a>
|
||||
## Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
* (a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
* (b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
* (c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
* (d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
28
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Normal file
28
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
Copyright (c) 2020 Node.js contributors. All rights reserved.
|
||||
Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/README.md
generated
vendored
Normal file
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/README.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
GYP can Generate Your Projects.
|
||||
===================================
|
||||
|
||||
Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
|
||||
|
||||
__gyp-next__ is [released](https://github.com/nodejs/gyp-next/releases) to the [__Python Packaging Index__](https://pypi.org/project/gyp-next) (PyPI) and can be installed with the command:
|
||||
* `python3 -m pip install gyp-next`
|
||||
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Normal file
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
// Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
|
||||
// then used during the final link for modules that have large PDBs. Otherwise,
|
||||
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
|
||||
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
|
||||
// (rather than the linker), this limit is avoided. With this in place PDBs may
|
||||
// grow to 2GB.
|
||||
//
|
||||
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
|
||||
8
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp
generated
vendored
Executable file
8
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp
generated
vendored
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
set -e
|
||||
base=$(dirname "$0")
|
||||
exec python "${base}/gyp_main.py" "$@"
|
||||
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Executable file
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Executable file
@@ -0,0 +1,5 @@
|
||||
@rem Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
@rem Use of this source code is governed by a BSD-style license that can be
|
||||
@rem found in the LICENSE file.
|
||||
|
||||
@python "%~dp0gyp_main.py" %*
|
||||
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Executable file
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
# Function copied from pylib/gyp/common.py
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return "CYGWIN" in stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def UnixifyPath(path):
|
||||
try:
|
||||
if not IsCygwin():
|
||||
return path
|
||||
out = subprocess.Popen(
|
||||
["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return path
|
||||
|
||||
|
||||
# Make sure we're using the version of pylib in this repo, not one installed
|
||||
# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
|
||||
# else the 'gyp' library will not be found
|
||||
path = UnixifyPath(sys.argv[0])
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib"))
|
||||
import gyp # noqa: E402
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(gyp.script_main())
|
||||
367
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Normal file
367
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Normal file
@@ -0,0 +1,367 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""New implementation of Visual Studio project generation."""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import random
|
||||
from operator import attrgetter
|
||||
|
||||
import gyp.common
|
||||
|
||||
|
||||
def cmp(x, y):
|
||||
return (x > y) - (x < y)
|
||||
|
||||
|
||||
# Initialize random number generator
|
||||
random.seed()
|
||||
|
||||
# GUIDs for project types
|
||||
ENTRY_TYPE_GUIDS = {
|
||||
"project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
|
||||
"folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}",
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Helper functions
|
||||
|
||||
|
||||
def MakeGuid(name, seed="msvs_new"):
|
||||
"""Returns a GUID for the specified target name.
|
||||
|
||||
Args:
|
||||
name: Target name.
|
||||
seed: Seed for MD5 hash.
|
||||
Returns:
|
||||
A GUID-line string calculated from the name and seed.
|
||||
|
||||
This generates something which looks like a GUID, but depends only on the
|
||||
name and seed. This means the same name/seed will always generate the same
|
||||
GUID, so that projects and solutions which refer to each other can explicitly
|
||||
determine the GUID to refer to explicitly. It also means that the GUID will
|
||||
not change when the project for a target is rebuilt.
|
||||
"""
|
||||
# Calculate a MD5 signature for the seed and name.
|
||||
d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
|
||||
# Convert most of the signature to GUID form (discard the rest)
|
||||
guid = (
|
||||
"{"
|
||||
+ d[:8]
|
||||
+ "-"
|
||||
+ d[8:12]
|
||||
+ "-"
|
||||
+ d[12:16]
|
||||
+ "-"
|
||||
+ d[16:20]
|
||||
+ "-"
|
||||
+ d[20:32]
|
||||
+ "}"
|
||||
)
|
||||
return guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolutionEntry:
|
||||
def __cmp__(self, other):
|
||||
# Sort by name then guid (so things are in order on vs2008).
|
||||
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
|
||||
|
||||
|
||||
class MSVSFolder(MSVSSolutionEntry):
|
||||
"""Folder in a Visual Studio project or solution."""
|
||||
|
||||
def __init__(self, path, name=None, entries=None, guid=None, items=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
path: Full path to the folder.
|
||||
name: Name of the folder.
|
||||
entries: List of folder entries to nest inside this folder. May contain
|
||||
Folder or Project objects. May be None, if the folder is empty.
|
||||
guid: GUID to use for folder, if not None.
|
||||
items: List of solution items to include in the folder project. May be
|
||||
None, if the folder does not directly contain items.
|
||||
"""
|
||||
if name:
|
||||
self.name = name
|
||||
else:
|
||||
# Use last layer.
|
||||
self.name = os.path.basename(path)
|
||||
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = sorted(entries or [], key=attrgetter("path"))
|
||||
self.items = list(items or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"]
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Use consistent guids for folders (so things don't regenerate).
|
||||
self.guid = MakeGuid(self.path, seed="msvs_folder")
|
||||
return self.guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSProject(MSVSSolutionEntry):
|
||||
"""Visual Studio project."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path,
|
||||
name=None,
|
||||
dependencies=None,
|
||||
guid=None,
|
||||
spec=None,
|
||||
build_file=None,
|
||||
config_platform_overrides=None,
|
||||
fixpath_prefix=None,
|
||||
):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
path: Absolute path to the project file.
|
||||
name: Name of project. If None, the name will be the same as the base
|
||||
name of the project file.
|
||||
dependencies: List of other Project objects this project is dependent
|
||||
upon, if not None.
|
||||
guid: GUID to use for project, if not None.
|
||||
spec: Dictionary specifying how to build this project.
|
||||
build_file: Filename of the .gyp file that the vcproj file comes from.
|
||||
config_platform_overrides: optional dict of configuration platforms to
|
||||
used in place of the default for this target.
|
||||
fixpath_prefix: the path used to adjust the behavior of _fixpath
|
||||
"""
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
self.spec = spec
|
||||
self.build_file = build_file
|
||||
# Use project filename if name not specified
|
||||
self.name = name or os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["project"]
|
||||
|
||||
if config_platform_overrides:
|
||||
self.config_platform_overrides = config_platform_overrides
|
||||
else:
|
||||
self.config_platform_overrides = {}
|
||||
self.fixpath_prefix = fixpath_prefix
|
||||
self.msbuild_toolset = None
|
||||
|
||||
def set_dependencies(self, dependencies):
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Set GUID from path
|
||||
# TODO(rspangler): This is fragile.
|
||||
# 1. We can't just use the project filename sans path, since there could
|
||||
# be multiple projects with the same base name (for example,
|
||||
# foo/unittest.vcproj and bar/unittest.vcproj).
|
||||
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
|
||||
# GUID is the same whether it's included from base/base.sln or
|
||||
# foo/bar/baz/baz.sln.
|
||||
# 3. The GUID needs to be the same each time this builder is invoked, so
|
||||
# that we don't need to rebuild the solution when the project changes.
|
||||
# 4. We should be able to handle pre-built project files by reading the
|
||||
# GUID from the files.
|
||||
self.guid = MakeGuid(self.name)
|
||||
return self.guid
|
||||
|
||||
def set_msbuild_toolset(self, msbuild_toolset):
|
||||
self.msbuild_toolset = msbuild_toolset
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolution:
|
||||
"""Visual Studio solution."""
|
||||
|
||||
def __init__(
|
||||
self, path, version, entries=None, variants=None, websiteProperties=True
|
||||
):
|
||||
"""Initializes the solution.
|
||||
|
||||
Args:
|
||||
path: Path to solution file.
|
||||
version: Format version to emit.
|
||||
entries: List of entries in solution. May contain Folder or Project
|
||||
objects. May be None, if the folder is empty.
|
||||
variants: List of build variant strings. If none, a default list will
|
||||
be used.
|
||||
websiteProperties: Flag to decide if the website properties section
|
||||
is generated.
|
||||
"""
|
||||
self.path = path
|
||||
self.websiteProperties = websiteProperties
|
||||
self.version = version
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = list(entries or [])
|
||||
|
||||
if variants:
|
||||
# Copy passed list
|
||||
self.variants = variants[:]
|
||||
else:
|
||||
# Use default
|
||||
self.variants = ["Debug|Win32", "Release|Win32"]
|
||||
# TODO(rspangler): Need to be able to handle a mapping of solution config
|
||||
# to project config. Should we be able to handle variants being a dict,
|
||||
# or add a separate variant_map variable? If it's a dict, we can't
|
||||
# guarantee the order of variants since dict keys aren't ordered.
|
||||
|
||||
# TODO(rspangler): Automatically write to disk for now; should delay until
|
||||
# node-evaluation time.
|
||||
self.Write()
|
||||
|
||||
def Write(self, writer=gyp.common.WriteOnDiff):
|
||||
"""Writes the solution file to disk.
|
||||
|
||||
Raises:
|
||||
IndexError: An entry appears multiple times.
|
||||
"""
|
||||
# Walk the entry tree and collect all the folders and projects.
|
||||
all_entries = set()
|
||||
entries_to_check = self.entries[:]
|
||||
while entries_to_check:
|
||||
e = entries_to_check.pop(0)
|
||||
|
||||
# If this entry has been visited, nothing to do.
|
||||
if e in all_entries:
|
||||
continue
|
||||
|
||||
all_entries.add(e)
|
||||
|
||||
# If this is a folder, check its entries too.
|
||||
if isinstance(e, MSVSFolder):
|
||||
entries_to_check += e.entries
|
||||
|
||||
all_entries = sorted(all_entries, key=attrgetter("path"))
|
||||
|
||||
# Open file and print header
|
||||
f = writer(self.path)
|
||||
f.write(
|
||||
"Microsoft Visual Studio Solution File, "
|
||||
"Format Version %s\r\n" % self.version.SolutionVersion()
|
||||
)
|
||||
f.write("# %s\r\n" % self.version.Description())
|
||||
|
||||
# Project entries
|
||||
sln_root = os.path.split(self.path)[0]
|
||||
for e in all_entries:
|
||||
relative_path = gyp.common.RelativePath(e.path, sln_root)
|
||||
# msbuild does not accept an empty folder_name.
|
||||
# use '.' in case relative_path is empty.
|
||||
folder_name = relative_path.replace("/", "\\") or "."
|
||||
f.write(
|
||||
'Project("%s") = "%s", "%s", "%s"\r\n'
|
||||
% (
|
||||
e.entry_type_guid, # Entry type GUID
|
||||
e.name, # Folder name
|
||||
folder_name, # Folder name (again)
|
||||
e.get_guid(), # Entry GUID
|
||||
)
|
||||
)
|
||||
|
||||
# TODO(rspangler): Need a way to configure this stuff
|
||||
if self.websiteProperties:
|
||||
f.write(
|
||||
"\tProjectSection(WebsiteProperties) = preProject\r\n"
|
||||
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
|
||||
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
|
||||
"\tEndProjectSection\r\n"
|
||||
)
|
||||
|
||||
if isinstance(e, MSVSFolder):
|
||||
if e.items:
|
||||
f.write("\tProjectSection(SolutionItems) = preProject\r\n")
|
||||
for i in e.items:
|
||||
f.write(f"\t\t{i} = {i}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
if isinstance(e, MSVSProject):
|
||||
if e.dependencies:
|
||||
f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
|
||||
for d in e.dependencies:
|
||||
f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
f.write("EndProject\r\n")
|
||||
|
||||
# Global section
|
||||
f.write("Global\r\n")
|
||||
|
||||
# Configurations (variants)
|
||||
f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
|
||||
for v in self.variants:
|
||||
f.write(f"\t\t{v} = {v}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Sort config guids for easier diffing of solution changes.
|
||||
config_guids = []
|
||||
config_guids_overrides = {}
|
||||
for e in all_entries:
|
||||
if isinstance(e, MSVSProject):
|
||||
config_guids.append(e.get_guid())
|
||||
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
|
||||
config_guids.sort()
|
||||
|
||||
f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
|
||||
for g in config_guids:
|
||||
for v in self.variants:
|
||||
nv = config_guids_overrides[g].get(v, v)
|
||||
# Pick which project configuration to build for this solution
|
||||
# configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.ActiveCfg = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
|
||||
# Enable project in this solution configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.Build.0 = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# TODO(rspangler): Should be able to configure this stuff too (though I've
|
||||
# never seen this be any different)
|
||||
f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
|
||||
f.write("\t\tHideSolutionNode = FALSE\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Folder mappings
|
||||
# Omit this section if there are no folders
|
||||
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
|
||||
f.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
|
||||
for e in all_entries:
|
||||
if not isinstance(e, MSVSFolder):
|
||||
continue # Does not apply to projects, only folders
|
||||
for subentry in e.entries:
|
||||
f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
f.write("EndGlobal\r\n")
|
||||
|
||||
f.close()
|
||||
206
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Normal file
206
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Normal file
@@ -0,0 +1,206 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Tool:
|
||||
"""Visual Studio tool."""
|
||||
|
||||
def __init__(self, name, attrs=None):
|
||||
"""Initializes the tool.
|
||||
|
||||
Args:
|
||||
name: Tool name.
|
||||
attrs: Dict of tool attributes; may be None.
|
||||
"""
|
||||
self._attrs = attrs or {}
|
||||
self._attrs["Name"] = name
|
||||
|
||||
def _GetSpecification(self):
|
||||
"""Creates an element for the tool.
|
||||
|
||||
Returns:
|
||||
A new xml.dom.Element for the tool.
|
||||
"""
|
||||
return ["Tool", self._attrs]
|
||||
|
||||
|
||||
class Filter:
|
||||
"""Visual Studio filter - that is, a virtual folder."""
|
||||
|
||||
def __init__(self, name, contents=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
name: Filter (folder) name.
|
||||
contents: List of filenames and/or Filter objects contained.
|
||||
"""
|
||||
self.name = name
|
||||
self.contents = list(contents or [])
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML project writer."""
|
||||
|
||||
def __init__(self, project_path, version, name, guid=None, platforms=None):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
project_path: Path to the project file.
|
||||
version: Format version to emit.
|
||||
name: Name of the project.
|
||||
guid: GUID to use for project, if not None.
|
||||
platforms: Array of string, the supported platforms. If null, ['Win32']
|
||||
"""
|
||||
self.project_path = project_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.guid = guid
|
||||
|
||||
# Default to Win32 for platforms.
|
||||
if not platforms:
|
||||
platforms = ["Win32"]
|
||||
|
||||
# Initialize the specifications of the various sections.
|
||||
self.platform_section = ["Platforms"]
|
||||
for platform in platforms:
|
||||
self.platform_section.append(["Platform", {"Name": platform}])
|
||||
self.tool_files_section = ["ToolFiles"]
|
||||
self.configurations_section = ["Configurations"]
|
||||
self.files_section = ["Files"]
|
||||
|
||||
# Keep a dict keyed on filename to speed up access.
|
||||
self.files_dict = dict()
|
||||
|
||||
def AddToolFile(self, path):
|
||||
"""Adds a tool file to the project.
|
||||
|
||||
Args:
|
||||
path: Relative path from project to tool file.
|
||||
"""
|
||||
self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
|
||||
|
||||
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
|
||||
"""Returns the specification for a configuration.
|
||||
|
||||
Args:
|
||||
config_type: Type of configuration node.
|
||||
config_name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
Returns:
|
||||
"""
|
||||
# Handle defaults
|
||||
if not attrs:
|
||||
attrs = {}
|
||||
if not tools:
|
||||
tools = []
|
||||
|
||||
# Add configuration node and its attributes
|
||||
node_attrs = attrs.copy()
|
||||
node_attrs["Name"] = config_name
|
||||
specification = [config_type, node_attrs]
|
||||
|
||||
# Add tool nodes and their attributes
|
||||
if tools:
|
||||
for t in tools:
|
||||
if isinstance(t, Tool):
|
||||
specification.append(t._GetSpecification())
|
||||
else:
|
||||
specification.append(Tool(t)._GetSpecification())
|
||||
return specification
|
||||
|
||||
def AddConfig(self, name, attrs=None, tools=None):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
"""
|
||||
spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
|
||||
self.configurations_section.append(spec)
|
||||
|
||||
def _AddFilesToNode(self, parent, files):
|
||||
"""Adds files and/or filters to the parent node.
|
||||
|
||||
Args:
|
||||
parent: Destination node
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
Will call itself recursively, if the files list contains Filter objects.
|
||||
"""
|
||||
for f in files:
|
||||
if isinstance(f, Filter):
|
||||
node = ["Filter", {"Name": f.name}]
|
||||
self._AddFilesToNode(node, f.contents)
|
||||
else:
|
||||
node = ["File", {"RelativePath": f}]
|
||||
self.files_dict[f] = node
|
||||
parent.append(node)
|
||||
|
||||
def AddFiles(self, files):
|
||||
"""Adds files to the project.
|
||||
|
||||
Args:
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
This makes a copy of the file/filter tree at the time of this call. If you
|
||||
later add files to a Filter object which was passed into a previous call
|
||||
to AddFiles(), it will not be reflected in this project.
|
||||
"""
|
||||
self._AddFilesToNode(self.files_section, files)
|
||||
# TODO(rspangler) This also doesn't handle adding files to an existing
|
||||
# filter. That is, it doesn't merge the trees.
|
||||
|
||||
def AddFileConfig(self, path, config, attrs=None, tools=None):
|
||||
"""Adds a configuration to a file.
|
||||
|
||||
Args:
|
||||
path: Relative path to the file.
|
||||
config: Name of configuration to add.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
|
||||
Raises:
|
||||
ValueError: Relative path does not match any file added via AddFiles().
|
||||
"""
|
||||
# Find the file node with the right relative path
|
||||
parent = self.files_dict.get(path)
|
||||
if not parent:
|
||||
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
|
||||
|
||||
# Add the config to the file node
|
||||
spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools)
|
||||
parent.append(spec)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the project file."""
|
||||
# First create XML content definition
|
||||
content = [
|
||||
"VisualStudioProject",
|
||||
{
|
||||
"ProjectType": "Visual C++",
|
||||
"Version": self.version.ProjectVersion(),
|
||||
"Name": self.name,
|
||||
"ProjectGUID": self.guid,
|
||||
"RootNamespace": self.name,
|
||||
"Keyword": "Win32Proj",
|
||||
},
|
||||
self.platform_section,
|
||||
self.tool_files_section,
|
||||
self.configurations_section,
|
||||
["References"], # empty section
|
||||
self.files_section,
|
||||
["Globals"], # empty section
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252")
|
||||
1270
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Normal file
1270
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1547
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Executable file
1547
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
59
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Normal file
59
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML tool file writer."""
|
||||
|
||||
def __init__(self, tool_file_path, name):
|
||||
"""Initializes the tool file.
|
||||
|
||||
Args:
|
||||
tool_file_path: Path to the tool file.
|
||||
name: Name of the tool file.
|
||||
"""
|
||||
self.tool_file_path = tool_file_path
|
||||
self.name = name
|
||||
self.rules_section = ["Rules"]
|
||||
|
||||
def AddCustomBuildRule(
|
||||
self, name, cmd, description, additional_dependencies, outputs, extensions
|
||||
):
|
||||
"""Adds a rule to the tool file.
|
||||
|
||||
Args:
|
||||
name: Name of the rule.
|
||||
description: Description of the rule.
|
||||
cmd: Command line of the rule.
|
||||
additional_dependencies: other files which may trigger the rule.
|
||||
outputs: outputs of the rule.
|
||||
extensions: extensions handled by the rule.
|
||||
"""
|
||||
rule = [
|
||||
"CustomBuildRule",
|
||||
{
|
||||
"Name": name,
|
||||
"ExecutionDescription": description,
|
||||
"CommandLine": cmd,
|
||||
"Outputs": ";".join(outputs),
|
||||
"FileExtensions": ";".join(extensions),
|
||||
"AdditionalDependencies": ";".join(additional_dependencies),
|
||||
},
|
||||
]
|
||||
self.rules_section.append(rule)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the tool file."""
|
||||
content = [
|
||||
"VisualStudioToolFile",
|
||||
{"Version": "8.00", "Name": self.name},
|
||||
self.rules_section,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.tool_file_path, encoding="Windows-1252"
|
||||
)
|
||||
153
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Normal file
153
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Normal file
@@ -0,0 +1,153 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio user preferences file writer."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket # for gethostname
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _FindCommandInPath(command):
|
||||
"""If there are no slashes in the command given, this function
|
||||
searches the PATH env to find the given command, and converts it
|
||||
to an absolute path. We have to do this because MSVS is looking
|
||||
for an actual file to launch a debugger on, not just a command
|
||||
line. Note that this happens at GYP time, so anything needing to
|
||||
be built needs to have a full path."""
|
||||
if "/" in command or "\\" in command:
|
||||
# If the command already has path elements (either relative or
|
||||
# absolute), then assume it is constructed properly.
|
||||
return command
|
||||
else:
|
||||
# Search through the path list and find an existing file that
|
||||
# we can access.
|
||||
paths = os.environ.get("PATH", "").split(os.pathsep)
|
||||
for path in paths:
|
||||
item = os.path.join(path, command)
|
||||
if os.path.isfile(item) and os.access(item, os.X_OK):
|
||||
return item
|
||||
return command
|
||||
|
||||
|
||||
def _QuoteWin32CommandLineArgs(args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
# Replace all double-quotes with double-double-quotes to escape
|
||||
# them for cmd shell, and then quote the whole thing if there
|
||||
# are any.
|
||||
if arg.find('"') != -1:
|
||||
arg = '""'.join(arg.split('"'))
|
||||
arg = '"%s"' % arg
|
||||
|
||||
# Otherwise, if there are any spaces, quote the whole arg.
|
||||
elif re.search(r"[ \t\n]", arg):
|
||||
arg = '"%s"' % arg
|
||||
new_args.append(arg)
|
||||
return new_args
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML user user file writer."""
|
||||
|
||||
def __init__(self, user_file_path, version, name):
|
||||
"""Initializes the user file.
|
||||
|
||||
Args:
|
||||
user_file_path: Path to the user file.
|
||||
version: Version info.
|
||||
name: Name of the user file.
|
||||
"""
|
||||
self.user_file_path = user_file_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.configurations = {}
|
||||
|
||||
def AddConfig(self, name):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
"""
|
||||
self.configurations[name] = ["Configuration", {"Name": name}]
|
||||
|
||||
def AddDebugSettings(
|
||||
self, config_name, command, environment={}, working_directory=""
|
||||
):
|
||||
"""Adds a DebugSettings node to the user file for a particular config.
|
||||
|
||||
Args:
|
||||
command: command line to run. First element in the list is the
|
||||
executable. All elements of the command will be quoted if
|
||||
necessary.
|
||||
working_directory: other files which may trigger the rule. (optional)
|
||||
"""
|
||||
command = _QuoteWin32CommandLineArgs(command)
|
||||
|
||||
abs_command = _FindCommandInPath(command[0])
|
||||
|
||||
if environment and isinstance(environment, dict):
|
||||
env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
|
||||
environment = " ".join(env_list)
|
||||
else:
|
||||
environment = ""
|
||||
|
||||
n_cmd = [
|
||||
"DebugSettings",
|
||||
{
|
||||
"Command": abs_command,
|
||||
"WorkingDirectory": working_directory,
|
||||
"CommandArguments": " ".join(command[1:]),
|
||||
"RemoteMachine": socket.gethostname(),
|
||||
"Environment": environment,
|
||||
"EnvironmentMerge": "true",
|
||||
# Currently these are all "dummy" values that we're just setting
|
||||
# in the default manner that MSVS does it. We could use some of
|
||||
# these to add additional capabilities, I suppose, but they might
|
||||
# not have parity with other platforms then.
|
||||
"Attach": "false",
|
||||
"DebuggerType": "3", # 'auto' debugger
|
||||
"Remote": "1",
|
||||
"RemoteCommand": "",
|
||||
"HttpUrl": "",
|
||||
"PDBPath": "",
|
||||
"SQLDebugging": "",
|
||||
"DebuggerFlavor": "0",
|
||||
"MPIRunCommand": "",
|
||||
"MPIRunArguments": "",
|
||||
"MPIRunWorkingDirectory": "",
|
||||
"ApplicationCommand": "",
|
||||
"ApplicationArguments": "",
|
||||
"ShimCommand": "",
|
||||
"MPIAcceptMode": "",
|
||||
"MPIAcceptFilter": "",
|
||||
},
|
||||
]
|
||||
|
||||
# Find the config, and add it if it doesn't exist.
|
||||
if config_name not in self.configurations:
|
||||
self.AddConfig(config_name)
|
||||
|
||||
# Add the DebugSettings onto the appropriate config.
|
||||
self.configurations[config_name].append(n_cmd)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the user file."""
|
||||
configs = ["Configurations"]
|
||||
for config, spec in sorted(self.configurations.items()):
|
||||
configs.append(spec)
|
||||
|
||||
content = [
|
||||
"VisualStudioUserFile",
|
||||
{"Version": self.version.ProjectVersion(), "Name": self.name},
|
||||
configs,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.user_file_path, encoding="Windows-1252"
|
||||
)
|
||||
271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Normal file
271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Normal file
@@ -0,0 +1,271 @@
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions shared amongst the Windows generators."""
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
|
||||
# A dictionary mapping supported target types to extensions.
|
||||
TARGET_TYPE_EXT = {
|
||||
"executable": "exe",
|
||||
"loadable_module": "dll",
|
||||
"shared_library": "dll",
|
||||
"static_library": "lib",
|
||||
"windows_driver": "sys",
|
||||
}
|
||||
|
||||
|
||||
def _GetLargePdbShimCcPath():
|
||||
"""Returns the path of the large_pdb_shim.cc file."""
|
||||
this_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
src_dir = os.path.abspath(os.path.join(this_dir, "..", ".."))
|
||||
win_data_dir = os.path.join(src_dir, "data", "win")
|
||||
large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc")
|
||||
return large_pdb_shim_cc
|
||||
|
||||
|
||||
def _DeepCopySomeKeys(in_dict, keys):
|
||||
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
||||
|
||||
Arguments:
|
||||
in_dict: The dictionary to copy.
|
||||
keys: The keys to be copied. If a key is in this list and doesn't exist in
|
||||
|in_dict| this is not an error.
|
||||
Returns:
|
||||
The partially deep-copied dictionary.
|
||||
"""
|
||||
d = {}
|
||||
for key in keys:
|
||||
if key not in in_dict:
|
||||
continue
|
||||
d[key] = copy.deepcopy(in_dict[key])
|
||||
return d
|
||||
|
||||
|
||||
def _SuffixName(name, suffix):
|
||||
"""Add a suffix to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
suffix: the suffix to be added
|
||||
Returns:
|
||||
Target name with suffix added (foo_suffix#target)
|
||||
"""
|
||||
parts = name.rsplit("#", 1)
|
||||
parts[0] = f"{parts[0]}_{suffix}"
|
||||
return "#".join(parts)
|
||||
|
||||
|
||||
def _ShardName(name, number):
|
||||
"""Add a shard number to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
number: shard number
|
||||
Returns:
|
||||
Target name with shard added (foo_1#target)
|
||||
"""
|
||||
return _SuffixName(name, str(number))
|
||||
|
||||
|
||||
def ShardTargets(target_list, target_dicts):
|
||||
"""Shard some targets apart to work around the linkers limits.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
Returns:
|
||||
Tuple of the new sharded versions of the inputs.
|
||||
"""
|
||||
# Gather the targets to shard, and how many pieces.
|
||||
targets_to_shard = {}
|
||||
for t in target_dicts:
|
||||
shards = int(target_dicts[t].get("msvs_shard", 0))
|
||||
if shards:
|
||||
targets_to_shard[t] = shards
|
||||
# Shard target_list.
|
||||
new_target_list = []
|
||||
for t in target_list:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
new_target_list.append(_ShardName(t, i))
|
||||
else:
|
||||
new_target_list.append(t)
|
||||
# Shard target_dict.
|
||||
new_target_dicts = {}
|
||||
for t in target_dicts:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
name = _ShardName(t, i)
|
||||
new_target_dicts[name] = copy.copy(target_dicts[t])
|
||||
new_target_dicts[name]["target_name"] = _ShardName(
|
||||
new_target_dicts[name]["target_name"], i
|
||||
)
|
||||
sources = new_target_dicts[name].get("sources", [])
|
||||
new_sources = []
|
||||
for pos in range(i, len(sources), targets_to_shard[t]):
|
||||
new_sources.append(sources[pos])
|
||||
new_target_dicts[name]["sources"] = new_sources
|
||||
else:
|
||||
new_target_dicts[t] = target_dicts[t]
|
||||
# Shard dependencies.
|
||||
for t in sorted(new_target_dicts):
|
||||
for deptype in ("dependencies", "dependencies_original"):
|
||||
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
|
||||
new_dependencies = []
|
||||
for d in dependencies:
|
||||
if d in targets_to_shard:
|
||||
for i in range(targets_to_shard[d]):
|
||||
new_dependencies.append(_ShardName(d, i))
|
||||
else:
|
||||
new_dependencies.append(d)
|
||||
new_target_dicts[t][deptype] = new_dependencies
|
||||
|
||||
return (new_target_list, new_target_dicts)
|
||||
|
||||
|
||||
def _GetPdbPath(target_dict, config_name, vars):
|
||||
"""Returns the path to the PDB file that will be generated by a given
|
||||
configuration.
|
||||
|
||||
The lookup proceeds as follows:
|
||||
- Look for an explicit path in the VCLinkerTool configuration block.
|
||||
- Look for an 'msvs_large_pdb_path' variable.
|
||||
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
|
||||
specified.
|
||||
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
|
||||
|
||||
Arguments:
|
||||
target_dict: The target dictionary to be searched.
|
||||
config_name: The name of the configuration of interest.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
The path of the corresponding PDB file.
|
||||
"""
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
linker = msvs.get("VCLinkerTool", {})
|
||||
|
||||
pdb_path = linker.get("ProgramDatabaseFile")
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
variables = target_dict.get("variables", {})
|
||||
pdb_path = variables.get("msvs_large_pdb_path", None)
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
pdb_base = target_dict.get("product_name", target_dict["target_name"])
|
||||
pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
|
||||
pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
|
||||
|
||||
return pdb_path
|
||||
|
||||
|
||||
def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
||||
|
||||
This is a workaround for targets with PDBs greater than 1GB in size, the
|
||||
limit for the 1KB pagesize PDBs created by the linker by default.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
Tuple of the shimmed version of the inputs.
|
||||
"""
|
||||
# Determine which targets need shimming.
|
||||
targets_to_shim = []
|
||||
for t in target_dicts:
|
||||
target_dict = target_dicts[t]
|
||||
|
||||
# We only want to shim targets that have msvs_large_pdb enabled.
|
||||
if not int(target_dict.get("msvs_large_pdb", 0)):
|
||||
continue
|
||||
# This is intended for executable, shared_library and loadable_module
|
||||
# targets where every configuration is set up to produce a PDB output.
|
||||
# If any of these conditions is not true then the shim logic will fail
|
||||
# below.
|
||||
targets_to_shim.append(t)
|
||||
|
||||
large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
||||
|
||||
for t in targets_to_shim:
|
||||
target_dict = target_dicts[t]
|
||||
target_name = target_dict.get("target_name")
|
||||
|
||||
base_dict = _DeepCopySomeKeys(
|
||||
target_dict, ["configurations", "default_configuration", "toolset"]
|
||||
)
|
||||
|
||||
# This is the dict for copying the source file (part of the GYP tree)
|
||||
# to the intermediate directory of the project. This is necessary because
|
||||
# we can't always build a relative path to the shim source file (on Windows
|
||||
# GYP and the project may be on different drives), and Ninja hates absolute
|
||||
# paths (it ends up generating the .obj and .obj.d alongside the source
|
||||
# file, polluting GYPs tree).
|
||||
copy_suffix = "large_pdb_copy"
|
||||
copy_target_name = target_name + "_" + copy_suffix
|
||||
full_copy_target_name = _SuffixName(t, copy_suffix)
|
||||
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
||||
shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name
|
||||
shim_cc_path = shim_cc_dir + "/" + shim_cc_basename
|
||||
copy_dict = copy.deepcopy(base_dict)
|
||||
copy_dict["target_name"] = copy_target_name
|
||||
copy_dict["type"] = "none"
|
||||
copy_dict["sources"] = [large_pdb_shim_cc]
|
||||
copy_dict["copies"] = [
|
||||
{"destination": shim_cc_dir, "files": [large_pdb_shim_cc]}
|
||||
]
|
||||
|
||||
# This is the dict for the PDB generating shim target. It depends on the
|
||||
# copy target.
|
||||
shim_suffix = "large_pdb_shim"
|
||||
shim_target_name = target_name + "_" + shim_suffix
|
||||
full_shim_target_name = _SuffixName(t, shim_suffix)
|
||||
shim_dict = copy.deepcopy(base_dict)
|
||||
shim_dict["target_name"] = shim_target_name
|
||||
shim_dict["type"] = "static_library"
|
||||
shim_dict["sources"] = [shim_cc_path]
|
||||
shim_dict["dependencies"] = [full_copy_target_name]
|
||||
|
||||
# Set up the shim to output its PDB to the same location as the final linker
|
||||
# target.
|
||||
for config_name, config in shim_dict.get("configurations").items():
|
||||
pdb_path = _GetPdbPath(target_dict, config_name, vars)
|
||||
|
||||
# A few keys that we don't want to propagate.
|
||||
for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]:
|
||||
config.pop(key, None)
|
||||
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
# Update the compiler directives in the shim target.
|
||||
compiler = msvs.setdefault("VCCLCompilerTool", {})
|
||||
compiler["DebugInformationFormat"] = "3"
|
||||
compiler["ProgramDataBaseFileName"] = pdb_path
|
||||
|
||||
# Set the explicit PDB path in the appropriate configuration of the
|
||||
# original target.
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
linker = msvs.setdefault("VCLinkerTool", {})
|
||||
linker["GenerateDebugInformation"] = "true"
|
||||
linker["ProgramDatabaseFile"] = pdb_path
|
||||
|
||||
# Add the new targets. They must go to the beginning of the list so that
|
||||
# the dependency generation works as expected in ninja.
|
||||
target_list.insert(0, full_copy_target_name)
|
||||
target_list.insert(0, full_shim_target_name)
|
||||
target_dicts[full_copy_target_name] = copy_dict
|
||||
target_dicts[full_shim_target_name] = shim_dict
|
||||
|
||||
# Update the original target to depend on the shim target.
|
||||
target_dict.setdefault("dependencies", []).append(full_shim_target_name)
|
||||
|
||||
return (target_list, target_dicts)
|
||||
574
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Normal file
574
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Normal file
@@ -0,0 +1,574 @@
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Handle version information related to Visual Stuio."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import glob
|
||||
|
||||
|
||||
def JoinPath(*args):
|
||||
return os.path.normpath(os.path.join(*args))
|
||||
|
||||
|
||||
class VisualStudioVersion:
|
||||
"""Information regarding a version of Visual Studio."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
short_name,
|
||||
description,
|
||||
solution_version,
|
||||
project_version,
|
||||
flat_sln,
|
||||
uses_vcxproj,
|
||||
path,
|
||||
sdk_based,
|
||||
default_toolset=None,
|
||||
compatible_sdks=None,
|
||||
):
|
||||
self.short_name = short_name
|
||||
self.description = description
|
||||
self.solution_version = solution_version
|
||||
self.project_version = project_version
|
||||
self.flat_sln = flat_sln
|
||||
self.uses_vcxproj = uses_vcxproj
|
||||
self.path = path
|
||||
self.sdk_based = sdk_based
|
||||
self.default_toolset = default_toolset
|
||||
compatible_sdks = compatible_sdks or []
|
||||
compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True)
|
||||
self.compatible_sdks = compatible_sdks
|
||||
|
||||
def ShortName(self):
|
||||
return self.short_name
|
||||
|
||||
def Description(self):
|
||||
"""Get the full description of the version."""
|
||||
return self.description
|
||||
|
||||
def SolutionVersion(self):
|
||||
"""Get the version number of the sln files."""
|
||||
return self.solution_version
|
||||
|
||||
def ProjectVersion(self):
|
||||
"""Get the version number of the vcproj or vcxproj files."""
|
||||
return self.project_version
|
||||
|
||||
def FlatSolution(self):
|
||||
return self.flat_sln
|
||||
|
||||
def UsesVcxproj(self):
|
||||
"""Returns true if this version uses a vcxproj file."""
|
||||
return self.uses_vcxproj
|
||||
|
||||
def ProjectExtension(self):
|
||||
"""Returns the file extension for the project."""
|
||||
return self.uses_vcxproj and ".vcxproj" or ".vcproj"
|
||||
|
||||
def Path(self):
|
||||
"""Returns the path to Visual Studio installation."""
|
||||
return self.path
|
||||
|
||||
def ToolPath(self, tool):
|
||||
"""Returns the path to a given compiler tool. """
|
||||
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
|
||||
|
||||
def DefaultToolset(self):
|
||||
"""Returns the msbuild toolset version that will be used in the absence
|
||||
of a user override."""
|
||||
return self.default_toolset
|
||||
|
||||
def _SetupScriptInternal(self, target_arch):
|
||||
"""Returns a command (with arguments) to be used to set up the
|
||||
environment."""
|
||||
assert target_arch in ("x86", "x64"), "target_arch not supported"
|
||||
# If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
|
||||
# depot_tools build tools and should run SetEnv.Cmd to set up the
|
||||
# environment. The check for WindowsSDKDir alone is not sufficient because
|
||||
# this is set by running vcvarsall.bat.
|
||||
sdk_dir = os.environ.get("WindowsSDKDir", "")
|
||||
setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd")
|
||||
if self.sdk_based and sdk_dir and os.path.exists(setup_path):
|
||||
return [setup_path, "/" + target_arch]
|
||||
|
||||
is_host_arch_x64 = (
|
||||
os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64"
|
||||
or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64"
|
||||
)
|
||||
|
||||
# For VS2017 (and newer) it's fairly easy
|
||||
if self.short_name >= "2017":
|
||||
script_path = JoinPath(
|
||||
self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat"
|
||||
)
|
||||
|
||||
# Always use a native executable, cross-compiling if necessary.
|
||||
host_arch = "amd64" if is_host_arch_x64 else "x86"
|
||||
msvc_target_arch = "amd64" if target_arch == "x64" else "x86"
|
||||
arg = host_arch
|
||||
if host_arch != msvc_target_arch:
|
||||
arg += "_" + msvc_target_arch
|
||||
|
||||
return [script_path, arg]
|
||||
|
||||
# We try to find the best version of the env setup batch.
|
||||
vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat")
|
||||
if target_arch == "x86":
|
||||
if (
|
||||
self.short_name >= "2013"
|
||||
and self.short_name[-1] != "e"
|
||||
and is_host_arch_x64
|
||||
):
|
||||
# VS2013 and later, non-Express have a x64-x86 cross that we want
|
||||
# to prefer.
|
||||
return [vcvarsall, "amd64_x86"]
|
||||
else:
|
||||
# Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
|
||||
# for x86 because vcvarsall calls vcvars32, which it can only find if
|
||||
# VS??COMNTOOLS is set, which isn't guaranteed.
|
||||
return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")]
|
||||
elif target_arch == "x64":
|
||||
arg = "x86_amd64"
|
||||
# Use the 64-on-64 compiler if we're not using an express edition and
|
||||
# we're running on a 64bit OS.
|
||||
if self.short_name[-1] != "e" and is_host_arch_x64:
|
||||
arg = "amd64"
|
||||
return [vcvarsall, arg]
|
||||
|
||||
def SetupScript(self, target_arch):
|
||||
script_data = self._SetupScriptInternal(target_arch)
|
||||
script_path = script_data[0]
|
||||
if not os.path.exists(script_path):
|
||||
raise Exception(
|
||||
"%s is missing - make sure VC++ tools are installed." % script_path
|
||||
)
|
||||
return script_data
|
||||
|
||||
|
||||
def _RegistryQueryBase(sysdir, key, value):
|
||||
"""Use reg.exe to read a particular key.
|
||||
|
||||
While ideally we might use the win32 module, we would like gyp to be
|
||||
python neutral, so for instance cygwin python lacks this module.
|
||||
|
||||
Arguments:
|
||||
sysdir: The system subdirectory to attempt to launch reg.exe from.
|
||||
key: The registry key to read from.
|
||||
value: The particular value to read.
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
# Skip if not on Windows or Python Win32 setup issue
|
||||
if sys.platform not in ("win32", "cygwin"):
|
||||
return None
|
||||
# Setup params to pass to and attempt to launch reg.exe
|
||||
cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key]
|
||||
if value:
|
||||
cmd.extend(["/v", value])
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
|
||||
# Note that the error text may be in [1] in some cases
|
||||
text = p.communicate()[0].decode("utf-8")
|
||||
# Check return code from reg.exe; officially 0==success and 1==error
|
||||
if p.returncode:
|
||||
return None
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryQuery(key, value=None):
|
||||
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
||||
|
||||
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
||||
that fails, it falls back to System32. Sysnative is available on Vista and
|
||||
up and available on Windows Server 2003 and XP through KB patch 942589. Note
|
||||
that Sysnative will always fail if using 64-bit python due to it being a
|
||||
virtual directory and System32 will work correctly in the first place.
|
||||
|
||||
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
|
||||
|
||||
Arguments:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read (optional).
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
text = None
|
||||
try:
|
||||
text = _RegistryQueryBase("Sysnative", key, value)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
text = _RegistryQueryBase("System32", key, value)
|
||||
else:
|
||||
raise
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryGetValueUsingWinReg(key, value):
|
||||
"""Use the _winreg module to obtain the value of a registry key.
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure. Throws
|
||||
ImportError if winreg is unavailable.
|
||||
"""
|
||||
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
|
||||
try:
|
||||
root, subkey = key.split("\\", 1)
|
||||
assert root == "HKLM" # Only need HKLM for now.
|
||||
with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
|
||||
return QueryValueEx(hkey, value)[0]
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
def _RegistryGetValue(key, value):
|
||||
"""Use _winreg or reg.exe to obtain the value of a registry key.
|
||||
|
||||
Using _winreg is preferable because it solves an issue on some corporate
|
||||
environments where access to reg.exe is locked down. However, we still need
|
||||
to fallback to reg.exe for the case where the _winreg module is not available
|
||||
(for example in cygwin python).
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure.
|
||||
"""
|
||||
try:
|
||||
return _RegistryGetValueUsingWinReg(key, value)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Fallback to reg.exe if we fail to import _winreg.
|
||||
text = _RegistryQuery(key, value)
|
||||
if not text:
|
||||
return None
|
||||
# Extract value.
|
||||
match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def _CreateVersion(name, path, sdk_based=False):
|
||||
"""Sets up MSVS project generation.
|
||||
|
||||
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
|
||||
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
|
||||
passed in that doesn't match a value in versions python will throw a error.
|
||||
"""
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
versions = {
|
||||
"2022": VisualStudioVersion(
|
||||
"2022",
|
||||
"Visual Studio 2022",
|
||||
solution_version="12.00",
|
||||
project_version="17.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v143",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2019": VisualStudioVersion(
|
||||
"2019",
|
||||
"Visual Studio 2019",
|
||||
solution_version="12.00",
|
||||
project_version="16.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v142",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2017": VisualStudioVersion(
|
||||
"2017",
|
||||
"Visual Studio 2017",
|
||||
solution_version="12.00",
|
||||
project_version="15.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v141",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2015": VisualStudioVersion(
|
||||
"2015",
|
||||
"Visual Studio 2015",
|
||||
solution_version="12.00",
|
||||
project_version="14.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v140",
|
||||
),
|
||||
"2013": VisualStudioVersion(
|
||||
"2013",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2013e": VisualStudioVersion(
|
||||
"2013e",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2012": VisualStudioVersion(
|
||||
"2012",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2012e": VisualStudioVersion(
|
||||
"2012e",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2010": VisualStudioVersion(
|
||||
"2010",
|
||||
"Visual Studio 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2010e": VisualStudioVersion(
|
||||
"2010e",
|
||||
"Visual C++ Express 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008": VisualStudioVersion(
|
||||
"2008",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008e": VisualStudioVersion(
|
||||
"2008e",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005": VisualStudioVersion(
|
||||
"2005",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005e": VisualStudioVersion(
|
||||
"2005e",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
}
|
||||
return versions[str(name)]
|
||||
|
||||
|
||||
def _ConvertToCygpath(path):
|
||||
"""Convert to cygwin path if we are using cygwin."""
|
||||
if sys.platform == "cygwin":
|
||||
p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE)
|
||||
path = p.communicate()[0].decode("utf-8").strip()
|
||||
return path
|
||||
|
||||
|
||||
def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||
"""Collect the list of installed visual studio versions.
|
||||
|
||||
Returns:
|
||||
A list of visual studio versions installed in descending order of
|
||||
usage preference.
|
||||
Base this on the registry and a quick check if devenv.exe exists.
|
||||
Possibilities are:
|
||||
2005(e) - Visual Studio 2005 (8)
|
||||
2008(e) - Visual Studio 2008 (9)
|
||||
2010(e) - Visual Studio 2010 (10)
|
||||
2012(e) - Visual Studio 2012 (11)
|
||||
2013(e) - Visual Studio 2013 (12)
|
||||
2015 - Visual Studio 2015 (14)
|
||||
2017 - Visual Studio 2017 (15)
|
||||
2019 - Visual Studio 2019 (16)
|
||||
2022 - Visual Studio 2022 (17)
|
||||
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||
"""
|
||||
version_to_year = {
|
||||
"8.0": "2005",
|
||||
"9.0": "2008",
|
||||
"10.0": "2010",
|
||||
"11.0": "2012",
|
||||
"12.0": "2013",
|
||||
"14.0": "2015",
|
||||
"15.0": "2017",
|
||||
"16.0": "2019",
|
||||
"17.0": "2022",
|
||||
}
|
||||
versions = []
|
||||
for version in versions_to_check:
|
||||
# Old method of searching for which VS version is installed
|
||||
# We don't use the 2010-encouraged-way because we also want to get the
|
||||
# path to the binaries, which it doesn't offer.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Microsoft\VCExpress\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version,
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], "InstallDir")
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
# Check for full.
|
||||
full_path = os.path.join(path, "devenv.exe")
|
||||
express_path = os.path.join(path, "*express.exe")
|
||||
if not force_express and os.path.exists(full_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version], os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
# Check for express.
|
||||
elif glob.glob(express_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e", os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
|
||||
# The old method above does not work when only SDK is installed.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7",
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], version)
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
if version == "15.0":
|
||||
if os.path.exists(path):
|
||||
versions.append(_CreateVersion("2017", path))
|
||||
elif version != "14.0": # There is no Express edition for 2015.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e",
|
||||
os.path.join(path, ".."),
|
||||
sdk_based=True,
|
||||
)
|
||||
)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
def SelectVisualStudioVersion(version="auto", allow_fallback=True):
|
||||
"""Select which version of Visual Studio projects to generate.
|
||||
|
||||
Arguments:
|
||||
version: Hook to allow caller to force a particular version (vs auto).
|
||||
Returns:
|
||||
An object representing a visual studio project format version.
|
||||
"""
|
||||
# In auto mode, check environment variable for override.
|
||||
if version == "auto":
|
||||
version = os.environ.get("GYP_MSVS_VERSION", "auto")
|
||||
version_map = {
|
||||
"auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"),
|
||||
"2005": ("8.0",),
|
||||
"2005e": ("8.0",),
|
||||
"2008": ("9.0",),
|
||||
"2008e": ("9.0",),
|
||||
"2010": ("10.0",),
|
||||
"2010e": ("10.0",),
|
||||
"2012": ("11.0",),
|
||||
"2012e": ("11.0",),
|
||||
"2013": ("12.0",),
|
||||
"2013e": ("12.0",),
|
||||
"2015": ("14.0",),
|
||||
"2017": ("15.0",),
|
||||
"2019": ("16.0",),
|
||||
"2022": ("17.0",),
|
||||
}
|
||||
override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
|
||||
if override_path:
|
||||
msvs_version = os.environ.get("GYP_MSVS_VERSION")
|
||||
if not msvs_version:
|
||||
raise ValueError(
|
||||
"GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be "
|
||||
"set to a particular version (e.g. 2010e)."
|
||||
)
|
||||
return _CreateVersion(msvs_version, override_path, sdk_based=True)
|
||||
version = str(version)
|
||||
versions = _DetectVisualStudioVersions(version_map[version], "e" in version)
|
||||
if not versions:
|
||||
if not allow_fallback:
|
||||
raise ValueError("Could not locate Visual Studio installation.")
|
||||
if version == "auto":
|
||||
# Default to 2005 if we couldn't find anything
|
||||
return _CreateVersion("2005", None)
|
||||
else:
|
||||
return _CreateVersion(version, None)
|
||||
return versions[0]
|
||||
690
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Executable file
690
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Executable file
@@ -0,0 +1,690 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import copy
|
||||
import gyp.input
|
||||
import argparse
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import traceback
|
||||
from gyp.common import GypError
|
||||
|
||||
|
||||
# Default debug modes for GYP
|
||||
debug = {}
|
||||
|
||||
# List of "official" debug modes, but you can use anything you like.
|
||||
DEBUG_GENERAL = "general"
|
||||
DEBUG_VARIABLES = "variables"
|
||||
DEBUG_INCLUDES = "includes"
|
||||
|
||||
|
||||
def DebugOutput(mode, message, *args):
|
||||
if "all" in gyp.debug or mode in gyp.debug:
|
||||
ctx = ("unknown", 0, "unknown")
|
||||
try:
|
||||
f = traceback.extract_stack(limit=2)
|
||||
if f:
|
||||
ctx = f[0][:3]
|
||||
except Exception:
|
||||
pass
|
||||
if args:
|
||||
message %= args
|
||||
print(
|
||||
"%s:%s:%d:%s %s"
|
||||
% (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message)
|
||||
)
|
||||
|
||||
|
||||
def FindBuildFiles():
|
||||
extension = ".gyp"
|
||||
files = os.listdir(os.getcwd())
|
||||
build_files = []
|
||||
for file in files:
|
||||
if file.endswith(extension):
|
||||
build_files.append(file)
|
||||
return build_files
|
||||
|
||||
|
||||
def Load(
|
||||
build_files,
|
||||
format,
|
||||
default_variables={},
|
||||
includes=[],
|
||||
depth=".",
|
||||
params=None,
|
||||
check=False,
|
||||
circular_check=True,
|
||||
):
|
||||
"""
|
||||
Loads one or more specified build files.
|
||||
default_variables and includes will be copied before use.
|
||||
Returns the generator for the specified format and the
|
||||
data returned by loading the specified build files.
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
if "-" in format:
|
||||
format, params["flavor"] = format.split("-", 1)
|
||||
|
||||
default_variables = copy.copy(default_variables)
|
||||
|
||||
# Default variables provided by this program and its modules should be
|
||||
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
||||
# avoiding collisions with user and automatic variables.
|
||||
default_variables["GENERATOR"] = format
|
||||
default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "")
|
||||
|
||||
# Format can be a custom python file, or by default the name of a module
|
||||
# within gyp.generator.
|
||||
if format.endswith(".py"):
|
||||
generator_name = os.path.splitext(format)[0]
|
||||
path, generator_name = os.path.split(generator_name)
|
||||
|
||||
# Make sure the path to the custom generator is in sys.path
|
||||
# Don't worry about removing it once we are done. Keeping the path
|
||||
# to each generator that is used in sys.path is likely harmless and
|
||||
# arguably a good idea.
|
||||
path = os.path.abspath(path)
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
else:
|
||||
generator_name = "gyp.generator." + format
|
||||
|
||||
# These parameters are passed in order (as opposed to by key)
|
||||
# because ActivePython cannot handle key parameters to __import__.
|
||||
generator = __import__(generator_name, globals(), locals(), generator_name)
|
||||
for (key, val) in generator.generator_default_variables.items():
|
||||
default_variables.setdefault(key, val)
|
||||
|
||||
output_dir = params["options"].generator_output or params["options"].toplevel_dir
|
||||
if default_variables["GENERATOR"] == "ninja":
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(output_dir, "out", default_variables["build_type"]),
|
||||
)
|
||||
else:
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(output_dir, default_variables["CONFIGURATION_NAME"]),
|
||||
)
|
||||
|
||||
# Give the generator the opportunity to set additional variables based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateVariables", None):
|
||||
generator.CalculateVariables(default_variables, params)
|
||||
|
||||
# Give the generator the opportunity to set generator_input_info based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateGeneratorInputInfo", None):
|
||||
generator.CalculateGeneratorInputInfo(params)
|
||||
|
||||
# Fetch the generator specific info that gets fed to input, we use getattr
|
||||
# so we can default things and the generators only have to provide what
|
||||
# they need.
|
||||
generator_input_info = {
|
||||
"non_configuration_keys": getattr(
|
||||
generator, "generator_additional_non_configuration_keys", []
|
||||
),
|
||||
"path_sections": getattr(generator, "generator_additional_path_sections", []),
|
||||
"extra_sources_for_rules": getattr(
|
||||
generator, "generator_extra_sources_for_rules", []
|
||||
),
|
||||
"generator_supports_multiple_toolsets": getattr(
|
||||
generator, "generator_supports_multiple_toolsets", False
|
||||
),
|
||||
"generator_wants_static_library_dependencies_adjusted": getattr(
|
||||
generator, "generator_wants_static_library_dependencies_adjusted", True
|
||||
),
|
||||
"generator_wants_sorted_dependencies": getattr(
|
||||
generator, "generator_wants_sorted_dependencies", False
|
||||
),
|
||||
"generator_filelist_paths": getattr(
|
||||
generator, "generator_filelist_paths", None
|
||||
),
|
||||
}
|
||||
|
||||
# Process the input specific to this generator.
|
||||
result = gyp.input.Load(
|
||||
build_files,
|
||||
default_variables,
|
||||
includes[:],
|
||||
depth,
|
||||
generator_input_info,
|
||||
check,
|
||||
circular_check,
|
||||
params["parallel"],
|
||||
params["root_targets"],
|
||||
)
|
||||
return [generator] + result
|
||||
|
||||
|
||||
def NameValueListToDict(name_value_list):
|
||||
"""
|
||||
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
||||
of the pairs. If a string is simply NAME, then the value in the dictionary
|
||||
is set to True. If VALUE can be converted to an integer, it is.
|
||||
"""
|
||||
result = {}
|
||||
for item in name_value_list:
|
||||
tokens = item.split("=", 1)
|
||||
if len(tokens) == 2:
|
||||
# If we can make it an int, use that, otherwise, use the string.
|
||||
try:
|
||||
token_value = int(tokens[1])
|
||||
except ValueError:
|
||||
token_value = tokens[1]
|
||||
# Set the variable to the supplied value.
|
||||
result[tokens[0]] = token_value
|
||||
else:
|
||||
# No value supplied, treat it as a boolean and set it.
|
||||
result[tokens[0]] = True
|
||||
return result
|
||||
|
||||
|
||||
def ShlexEnv(env_name):
|
||||
flags = os.environ.get(env_name, [])
|
||||
if flags:
|
||||
flags = shlex.split(flags)
|
||||
return flags
|
||||
|
||||
|
||||
def FormatOpt(opt, value):
|
||||
if opt.startswith("--"):
|
||||
return f"{opt}={value}"
|
||||
return opt + value
|
||||
|
||||
|
||||
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
|
||||
"""Regenerate a list of command line flags, for an option of action='append'.
|
||||
|
||||
The |env_name|, if given, is checked in the environment and used to generate
|
||||
an initial list of options, then the options that were specified on the
|
||||
command line (given in |values|) are appended. This matches the handling of
|
||||
environment variables and command line flags where command line flags override
|
||||
the environment, while not requiring the environment to be set when the flags
|
||||
are used again.
|
||||
"""
|
||||
flags = []
|
||||
if options.use_environment and env_name:
|
||||
for flag_value in ShlexEnv(env_name):
|
||||
value = FormatOpt(flag, predicate(flag_value))
|
||||
if value in flags:
|
||||
flags.remove(value)
|
||||
flags.append(value)
|
||||
if values:
|
||||
for flag_value in values:
|
||||
flags.append(FormatOpt(flag, predicate(flag_value)))
|
||||
return flags
|
||||
|
||||
|
||||
def RegenerateFlags(options):
|
||||
"""Given a parsed options object, and taking the environment variables into
|
||||
account, returns a list of flags that should regenerate an equivalent options
|
||||
object (even in the absence of the environment variables.)
|
||||
|
||||
Any path options will be normalized relative to depth.
|
||||
|
||||
The format flag is not included, as it is assumed the calling generator will
|
||||
set that as appropriate.
|
||||
"""
|
||||
|
||||
def FixPath(path):
|
||||
path = gyp.common.FixIfRelativePath(path, options.depth)
|
||||
if not path:
|
||||
return os.path.curdir
|
||||
return path
|
||||
|
||||
def Noop(value):
|
||||
return value
|
||||
|
||||
# We always want to ignore the environment when regenerating, to avoid
|
||||
# duplicate or changed flags in the environment at the time of regeneration.
|
||||
flags = ["--ignore-environment"]
|
||||
for name, metadata in options._regeneration_metadata.items():
|
||||
opt = metadata["opt"]
|
||||
value = getattr(options, name)
|
||||
value_predicate = metadata["type"] == "path" and FixPath or Noop
|
||||
action = metadata["action"]
|
||||
env_name = metadata["env_name"]
|
||||
if action == "append":
|
||||
flags.extend(
|
||||
RegenerateAppendFlag(opt, value, value_predicate, env_name, options)
|
||||
)
|
||||
elif action in ("store", None): # None is a synonym for 'store'.
|
||||
if value:
|
||||
flags.append(FormatOpt(opt, value_predicate(value)))
|
||||
elif options.use_environment and env_name and os.environ.get(env_name):
|
||||
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
|
||||
elif action in ("store_true", "store_false"):
|
||||
if (action == "store_true" and value) or (
|
||||
action == "store_false" and not value
|
||||
):
|
||||
flags.append(opt)
|
||||
elif options.use_environment and env_name:
|
||||
print(
|
||||
"Warning: environment regeneration unimplemented "
|
||||
"for %s flag %r env_name %r" % (action, opt, env_name),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
print(
|
||||
"Warning: regeneration unimplemented for action %r "
|
||||
"flag %r" % (action, opt),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
return flags
|
||||
|
||||
|
||||
class RegeneratableOptionParser(argparse.ArgumentParser):
|
||||
def __init__(self, usage):
|
||||
self.__regeneratable_options = {}
|
||||
argparse.ArgumentParser.__init__(self, usage=usage)
|
||||
|
||||
def add_argument(self, *args, **kw):
|
||||
"""Add an option to the parser.
|
||||
|
||||
This accepts the same arguments as ArgumentParser.add_argument, plus the
|
||||
following:
|
||||
regenerate: can be set to False to prevent this option from being included
|
||||
in regeneration.
|
||||
env_name: name of environment variable that additional values for this
|
||||
option come from.
|
||||
type: adds type='path', to tell the regenerator that the values of
|
||||
this option need to be made relative to options.depth
|
||||
"""
|
||||
env_name = kw.pop("env_name", None)
|
||||
if "dest" in kw and kw.pop("regenerate", True):
|
||||
dest = kw["dest"]
|
||||
|
||||
# The path type is needed for regenerating, for optparse we can just treat
|
||||
# it as a string.
|
||||
type = kw.get("type")
|
||||
if type == "path":
|
||||
kw["type"] = str
|
||||
|
||||
self.__regeneratable_options[dest] = {
|
||||
"action": kw.get("action"),
|
||||
"type": type,
|
||||
"env_name": env_name,
|
||||
"opt": args[0],
|
||||
}
|
||||
|
||||
argparse.ArgumentParser.add_argument(self, *args, **kw)
|
||||
|
||||
def parse_args(self, *args):
|
||||
values, args = argparse.ArgumentParser.parse_known_args(self, *args)
|
||||
values._regeneration_metadata = self.__regeneratable_options
|
||||
return values, args
|
||||
|
||||
|
||||
def gyp_main(args):
|
||||
my_name = os.path.basename(sys.argv[0])
|
||||
usage = "usage: %(prog)s [options ...] [build_file ...]"
|
||||
|
||||
parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s"))
|
||||
parser.add_argument(
|
||||
"--build",
|
||||
dest="configs",
|
||||
action="append",
|
||||
help="configuration for build after project generation",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check", dest="check", action="store_true", help="check format of gyp files"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config-dir",
|
||||
dest="config_dir",
|
||||
action="store",
|
||||
env_name="GYP_CONFIG_DIR",
|
||||
default=None,
|
||||
help="The location for configuration files like " "include.gypi.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debug",
|
||||
dest="debug",
|
||||
metavar="DEBUGMODE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="turn on a debugging "
|
||||
'mode for debugging GYP. Supported modes are "variables", '
|
||||
'"includes" and "general" or "all" for all of them.',
|
||||
)
|
||||
parser.add_argument(
|
||||
"-D",
|
||||
dest="defines",
|
||||
action="append",
|
||||
metavar="VAR=VAL",
|
||||
env_name="GYP_DEFINES",
|
||||
help="sets variable VAR to value VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--depth",
|
||||
dest="depth",
|
||||
metavar="PATH",
|
||||
type="path",
|
||||
help="set DEPTH gyp variable to a relative path to PATH",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
dest="formats",
|
||||
action="append",
|
||||
env_name="GYP_GENERATORS",
|
||||
regenerate=False,
|
||||
help="output formats to generate",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
dest="generator_flags",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="FLAG=VAL",
|
||||
env_name="GYP_GENERATOR_FLAGS",
|
||||
help="sets generator flag FLAG to VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--generator-output",
|
||||
dest="generator_output",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
env_name="GYP_GENERATOR_OUTPUT",
|
||||
help="puts generated build files under DIR",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-environment",
|
||||
dest="use_environment",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="do not read options from environment variables",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-I",
|
||||
"--include",
|
||||
dest="includes",
|
||||
action="append",
|
||||
metavar="INCLUDE",
|
||||
type="path",
|
||||
help="files to include in all loaded .gyp files",
|
||||
)
|
||||
# --no-circular-check disables the check for circular relationships between
|
||||
# .gyp files. These relationships should not exist, but they've only been
|
||||
# observed to be harmful with the Xcode generator. Chromium's .gyp files
|
||||
# currently have some circular relationships on non-Mac platforms, so this
|
||||
# option allows the strict behavior to be used on Macs and the lenient
|
||||
# behavior to be used elsewhere.
|
||||
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
|
||||
parser.add_argument(
|
||||
"--no-circular-check",
|
||||
dest="circular_check",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="don't check for circular relationships between files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-parallel",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable multiprocessing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--suffix",
|
||||
dest="suffix",
|
||||
default="",
|
||||
help="suffix to add to generated files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--toplevel-dir",
|
||||
dest="toplevel_dir",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
help="directory to use as the root of the source tree",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-R",
|
||||
"--root-target",
|
||||
dest="root_targets",
|
||||
action="append",
|
||||
metavar="TARGET",
|
||||
help="include only TARGET and its deep dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-V",
|
||||
"--version",
|
||||
dest="version",
|
||||
action="store_true",
|
||||
help="Show the version and exit.",
|
||||
)
|
||||
|
||||
options, build_files_arg = parser.parse_args(args)
|
||||
if options.version:
|
||||
import pkg_resources
|
||||
print(f"v{pkg_resources.get_distribution('gyp-next').version}")
|
||||
return 0
|
||||
build_files = build_files_arg
|
||||
|
||||
# Set up the configuration directory (defaults to ~/.gyp)
|
||||
if not options.config_dir:
|
||||
home = None
|
||||
home_dot_gyp = None
|
||||
if options.use_environment:
|
||||
home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None)
|
||||
if home_dot_gyp:
|
||||
home_dot_gyp = os.path.expanduser(home_dot_gyp)
|
||||
|
||||
if not home_dot_gyp:
|
||||
home_vars = ["HOME"]
|
||||
if sys.platform in ("cygwin", "win32"):
|
||||
home_vars.append("USERPROFILE")
|
||||
for home_var in home_vars:
|
||||
home = os.getenv(home_var)
|
||||
if home:
|
||||
home_dot_gyp = os.path.join(home, ".gyp")
|
||||
if not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
else:
|
||||
break
|
||||
else:
|
||||
home_dot_gyp = os.path.expanduser(options.config_dir)
|
||||
|
||||
if home_dot_gyp and not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
|
||||
if not options.formats:
|
||||
# If no format was given on the command line, then check the env variable.
|
||||
generate_formats = []
|
||||
if options.use_environment:
|
||||
generate_formats = os.environ.get("GYP_GENERATORS", [])
|
||||
if generate_formats:
|
||||
generate_formats = re.split(r"[\s,]", generate_formats)
|
||||
if generate_formats:
|
||||
options.formats = generate_formats
|
||||
else:
|
||||
# Nothing in the variable, default based on platform.
|
||||
if sys.platform == "darwin":
|
||||
options.formats = ["xcode"]
|
||||
elif sys.platform in ("win32", "cygwin"):
|
||||
options.formats = ["msvs"]
|
||||
else:
|
||||
options.formats = ["make"]
|
||||
|
||||
if not options.generator_output and options.use_environment:
|
||||
g_o = os.environ.get("GYP_GENERATOR_OUTPUT")
|
||||
if g_o:
|
||||
options.generator_output = g_o
|
||||
|
||||
options.parallel = not options.no_parallel
|
||||
|
||||
for mode in options.debug:
|
||||
gyp.debug[mode] = 1
|
||||
|
||||
# Do an extra check to avoid work when we're not debugging.
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL, "running with these options:")
|
||||
for option, value in sorted(options.__dict__.items()):
|
||||
if option[0] == "_":
|
||||
continue
|
||||
if isinstance(value, str):
|
||||
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
|
||||
else:
|
||||
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
|
||||
|
||||
if not build_files:
|
||||
build_files = FindBuildFiles()
|
||||
if not build_files:
|
||||
raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name))
|
||||
|
||||
# TODO(mark): Chromium-specific hack!
|
||||
# For Chromium, the gyp "depth" variable should always be a relative path
|
||||
# to Chromium's top-level "src" directory. If no depth variable was set
|
||||
# on the command line, try to find a "src" directory by looking at the
|
||||
# absolute path to each build file's directory. The first "src" component
|
||||
# found will be treated as though it were the path used for --depth.
|
||||
if not options.depth:
|
||||
for build_file in build_files:
|
||||
build_file_dir = os.path.abspath(os.path.dirname(build_file))
|
||||
build_file_dir_components = build_file_dir.split(os.path.sep)
|
||||
components_len = len(build_file_dir_components)
|
||||
for index in range(components_len - 1, -1, -1):
|
||||
if build_file_dir_components[index] == "src":
|
||||
options.depth = os.path.sep.join(build_file_dir_components)
|
||||
break
|
||||
del build_file_dir_components[index]
|
||||
|
||||
# If the inner loop found something, break without advancing to another
|
||||
# build file.
|
||||
if options.depth:
|
||||
break
|
||||
|
||||
if not options.depth:
|
||||
raise GypError(
|
||||
"Could not automatically locate src directory. This is"
|
||||
"a temporary Chromium feature that will be removed. Use"
|
||||
"--depth as a workaround."
|
||||
)
|
||||
|
||||
# If toplevel-dir is not set, we assume that depth is the root of our source
|
||||
# tree.
|
||||
if not options.toplevel_dir:
|
||||
options.toplevel_dir = options.depth
|
||||
|
||||
# -D on the command line sets variable defaults - D isn't just for define,
|
||||
# it's for default. Perhaps there should be a way to force (-F?) a
|
||||
# variable's value so that it can't be overridden by anything else.
|
||||
cmdline_default_variables = {}
|
||||
defines = []
|
||||
if options.use_environment:
|
||||
defines += ShlexEnv("GYP_DEFINES")
|
||||
if options.defines:
|
||||
defines += options.defines
|
||||
cmdline_default_variables = NameValueListToDict(defines)
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(
|
||||
DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables
|
||||
)
|
||||
|
||||
# Set up includes.
|
||||
includes = []
|
||||
|
||||
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
|
||||
# .gyp file that's loaded, before anything else is included.
|
||||
if home_dot_gyp:
|
||||
default_include = os.path.join(home_dot_gyp, "include.gypi")
|
||||
if os.path.exists(default_include):
|
||||
print("Using overrides found in " + default_include)
|
||||
includes.append(default_include)
|
||||
|
||||
# Command-line --include files come after the default include.
|
||||
if options.includes:
|
||||
includes.extend(options.includes)
|
||||
|
||||
# Generator flags should be prefixed with the target generator since they
|
||||
# are global across all generator runs.
|
||||
gen_flags = []
|
||||
if options.use_environment:
|
||||
gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS")
|
||||
if options.generator_flags:
|
||||
gen_flags += options.generator_flags
|
||||
generator_flags = NameValueListToDict(gen_flags)
|
||||
if DEBUG_GENERAL in gyp.debug.keys():
|
||||
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
|
||||
|
||||
# Generate all requested formats (use a set in case we got one format request
|
||||
# twice)
|
||||
for format in set(options.formats):
|
||||
params = {
|
||||
"options": options,
|
||||
"build_files": build_files,
|
||||
"generator_flags": generator_flags,
|
||||
"cwd": os.getcwd(),
|
||||
"build_files_arg": build_files_arg,
|
||||
"gyp_binary": sys.argv[0],
|
||||
"home_dot_gyp": home_dot_gyp,
|
||||
"parallel": options.parallel,
|
||||
"root_targets": options.root_targets,
|
||||
"target_arch": cmdline_default_variables.get("target_arch", ""),
|
||||
}
|
||||
|
||||
# Start with the default variables from the command line.
|
||||
[generator, flat_list, targets, data] = Load(
|
||||
build_files,
|
||||
format,
|
||||
cmdline_default_variables,
|
||||
includes,
|
||||
options.depth,
|
||||
params,
|
||||
options.check,
|
||||
options.circular_check,
|
||||
)
|
||||
|
||||
# TODO(mark): Pass |data| for now because the generator needs a list of
|
||||
# build files that came in. In the future, maybe it should just accept
|
||||
# a list, and not the whole data dict.
|
||||
# NOTE: flat_list is the flattened dependency graph specifying the order
|
||||
# that targets may be built. Build systems that operate serially or that
|
||||
# need to have dependencies defined before dependents reference them should
|
||||
# generate targets in the order specified in flat_list.
|
||||
generator.GenerateOutput(flat_list, targets, data, params)
|
||||
|
||||
if options.configs:
|
||||
valid_configs = targets[flat_list[0]]["configurations"]
|
||||
for conf in options.configs:
|
||||
if conf not in valid_configs:
|
||||
raise GypError("Invalid config specified via --build: %s" % conf)
|
||||
generator.PerformBuild(data, options.configs, params)
|
||||
|
||||
# Done
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
try:
|
||||
return gyp_main(args)
|
||||
except GypError as e:
|
||||
sys.stderr.write("gyp: %s\n" % e)
|
||||
return 1
|
||||
|
||||
|
||||
# NOTE: setuptools generated console_scripts calls function with no arguments
|
||||
def script_main():
|
||||
return main(sys.argv[1:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(script_main())
|
||||
661
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Normal file
661
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Normal file
@@ -0,0 +1,661 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import filecmp
|
||||
import os.path
|
||||
import re
|
||||
import tempfile
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from collections.abc import MutableSet
|
||||
|
||||
|
||||
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
|
||||
# among other "problems".
|
||||
class memoize:
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, *args):
|
||||
try:
|
||||
return self.cache[args]
|
||||
except KeyError:
|
||||
result = self.func(*args)
|
||||
self.cache[args] = result
|
||||
return result
|
||||
|
||||
|
||||
class GypError(Exception):
|
||||
"""Error class representing an error, which is to be presented
|
||||
to the user. The main entry point will catch and display this.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def ExceptionAppend(e, msg):
|
||||
"""Append a message to the given exception's message."""
|
||||
if not e.args:
|
||||
e.args = (msg,)
|
||||
elif len(e.args) == 1:
|
||||
e.args = (str(e.args[0]) + " " + msg,)
|
||||
else:
|
||||
e.args = (str(e.args[0]) + " " + msg,) + e.args[1:]
|
||||
|
||||
|
||||
def FindQualifiedTargets(target, qualified_list):
|
||||
"""
|
||||
Given a list of qualified targets, return the qualified targets for the
|
||||
specified |target|.
|
||||
"""
|
||||
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
|
||||
|
||||
|
||||
def ParseQualifiedTarget(target):
|
||||
# Splits a qualified target into a build file, target name and toolset.
|
||||
|
||||
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
|
||||
target_split = target.rsplit(":", 1)
|
||||
if len(target_split) == 2:
|
||||
[build_file, target] = target_split
|
||||
else:
|
||||
build_file = None
|
||||
|
||||
target_split = target.rsplit("#", 1)
|
||||
if len(target_split) == 2:
|
||||
[target, toolset] = target_split
|
||||
else:
|
||||
toolset = None
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def ResolveTarget(build_file, target, toolset):
|
||||
# This function resolves a target into a canonical form:
|
||||
# - a fully defined build file, either absolute or relative to the current
|
||||
# directory
|
||||
# - a target name
|
||||
# - a toolset
|
||||
#
|
||||
# build_file is the file relative to which 'target' is defined.
|
||||
# target is the qualified target.
|
||||
# toolset is the default toolset for that target.
|
||||
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
|
||||
|
||||
if parsed_build_file:
|
||||
if build_file:
|
||||
# If a relative path, parsed_build_file is relative to the directory
|
||||
# containing build_file. If build_file is not in the current directory,
|
||||
# parsed_build_file is not a usable path as-is. Resolve it by
|
||||
# interpreting it as relative to build_file. If parsed_build_file is
|
||||
# absolute, it is usable as a path regardless of the current directory,
|
||||
# and os.path.join will return it as-is.
|
||||
build_file = os.path.normpath(
|
||||
os.path.join(os.path.dirname(build_file), parsed_build_file)
|
||||
)
|
||||
# Further (to handle cases like ../cwd), make it relative to cwd)
|
||||
if not os.path.isabs(build_file):
|
||||
build_file = RelativePath(build_file, ".")
|
||||
else:
|
||||
build_file = parsed_build_file
|
||||
|
||||
if parsed_toolset:
|
||||
toolset = parsed_toolset
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def BuildFile(fully_qualified_target):
|
||||
# Extracts the build file from the fully qualified target.
|
||||
return ParseQualifiedTarget(fully_qualified_target)[0]
|
||||
|
||||
|
||||
def GetEnvironFallback(var_list, default):
|
||||
"""Look up a key in the environment, with fallback to secondary keys
|
||||
and finally falling back to a default value."""
|
||||
for var in var_list:
|
||||
if var in os.environ:
|
||||
return os.environ[var]
|
||||
return default
|
||||
|
||||
|
||||
def QualifiedTarget(build_file, target, toolset):
|
||||
# "Qualified" means the file that a target was defined in and the target
|
||||
# name, separated by a colon, suffixed by a # and the toolset name:
|
||||
# /path/to/file.gyp:target_name#toolset
|
||||
fully_qualified = build_file + ":" + target
|
||||
if toolset:
|
||||
fully_qualified = fully_qualified + "#" + toolset
|
||||
return fully_qualified
|
||||
|
||||
|
||||
@memoize
|
||||
def RelativePath(path, relative_to, follow_path_symlink=True):
|
||||
# Assuming both |path| and |relative_to| are relative to the current
|
||||
# directory, returns a relative path that identifies path relative to
|
||||
# relative_to.
|
||||
# If |follow_symlink_path| is true (default) and |path| is a symlink, then
|
||||
# this method returns a path to the real file represented by |path|. If it is
|
||||
# false, this method returns a path to the symlink. If |path| is not a
|
||||
# symlink, this option has no effect.
|
||||
|
||||
# Convert to normalized (and therefore absolute paths).
|
||||
if follow_path_symlink:
|
||||
path = os.path.realpath(path)
|
||||
else:
|
||||
path = os.path.abspath(path)
|
||||
relative_to = os.path.realpath(relative_to)
|
||||
|
||||
# On Windows, we can't create a relative path to a different drive, so just
|
||||
# use the absolute path.
|
||||
if sys.platform == "win32":
|
||||
if (
|
||||
os.path.splitdrive(path)[0].lower()
|
||||
!= os.path.splitdrive(relative_to)[0].lower()
|
||||
):
|
||||
return path
|
||||
|
||||
# Split the paths into components.
|
||||
path_split = path.split(os.path.sep)
|
||||
relative_to_split = relative_to.split(os.path.sep)
|
||||
|
||||
# Determine how much of the prefix the two paths share.
|
||||
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
|
||||
|
||||
# Put enough ".." components to back up out of relative_to to the common
|
||||
# prefix, and then append the part of path_split after the common prefix.
|
||||
relative_split = [os.path.pardir] * (
|
||||
len(relative_to_split) - prefix_len
|
||||
) + path_split[prefix_len:]
|
||||
|
||||
if len(relative_split) == 0:
|
||||
# The paths were the same.
|
||||
return ""
|
||||
|
||||
# Turn it back into a string and we're done.
|
||||
return os.path.join(*relative_split)
|
||||
|
||||
|
||||
@memoize
|
||||
def InvertRelativePath(path, toplevel_dir=None):
|
||||
"""Given a path like foo/bar that is relative to toplevel_dir, return
|
||||
the inverse relative path back to the toplevel_dir.
|
||||
|
||||
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
||||
should always produce the empty string, unless the path contains symlinks.
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
toplevel_dir = "." if toplevel_dir is None else toplevel_dir
|
||||
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
|
||||
|
||||
|
||||
def FixIfRelativePath(path, relative_to):
|
||||
# Like RelativePath but returns |path| unchanged if it is absolute.
|
||||
if os.path.isabs(path):
|
||||
return path
|
||||
return RelativePath(path, relative_to)
|
||||
|
||||
|
||||
def UnrelativePath(path, relative_to):
|
||||
# Assuming that |relative_to| is relative to the current directory, and |path|
|
||||
# is a path relative to the dirname of |relative_to|, returns a path that
|
||||
# identifies |path| relative to the current directory.
|
||||
rel_dir = os.path.dirname(relative_to)
|
||||
return os.path.normpath(os.path.join(rel_dir, path))
|
||||
|
||||
|
||||
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
|
||||
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
|
||||
# and the documentation for various shells.
|
||||
|
||||
# _quote is a pattern that should match any argument that needs to be quoted
|
||||
# with double-quotes by EncodePOSIXShellArgument. It matches the following
|
||||
# characters appearing anywhere in an argument:
|
||||
# \t, \n, space parameter separators
|
||||
# # comments
|
||||
# $ expansions (quoted to always expand within one argument)
|
||||
# % called out by IEEE 1003.1 XCU.2.2
|
||||
# & job control
|
||||
# ' quoting
|
||||
# (, ) subshell execution
|
||||
# *, ?, [ pathname expansion
|
||||
# ; command delimiter
|
||||
# <, >, | redirection
|
||||
# = assignment
|
||||
# {, } brace expansion (bash)
|
||||
# ~ tilde expansion
|
||||
# It also matches the empty string, because "" (or '') is the only way to
|
||||
# represent an empty string literal argument to a POSIX shell.
|
||||
#
|
||||
# This does not match the characters in _escape, because those need to be
|
||||
# backslash-escaped regardless of whether they appear in a double-quoted
|
||||
# string.
|
||||
_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$")
|
||||
|
||||
# _escape is a pattern that should match any character that needs to be
|
||||
# escaped with a backslash, whether or not the argument matched the _quote
|
||||
# pattern. _escape is used with re.sub to backslash anything in _escape's
|
||||
# first match group, hence the (parentheses) in the regular expression.
|
||||
#
|
||||
# _escape matches the following characters appearing anywhere in an argument:
|
||||
# " to prevent POSIX shells from interpreting this character for quoting
|
||||
# \ to prevent POSIX shells from interpreting this character for escaping
|
||||
# ` to prevent POSIX shells from interpreting this character for command
|
||||
# substitution
|
||||
# Missing from this list is $, because the desired behavior of
|
||||
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
|
||||
#
|
||||
# Also missing from this list is !, which bash will interpret as the history
|
||||
# expansion character when history is enabled. bash does not enable history
|
||||
# by default in non-interactive shells, so this is not thought to be a problem.
|
||||
# ! was omitted from this list because bash interprets "\!" as a literal string
|
||||
# including the backslash character (avoiding history expansion but retaining
|
||||
# the backslash), which would not be correct for argument encoding. Handling
|
||||
# this case properly would also be problematic because bash allows the history
|
||||
# character to be changed with the histchars shell variable. Fortunately,
|
||||
# as history is not enabled in non-interactive shells and
|
||||
# EncodePOSIXShellArgument is only expected to encode for non-interactive
|
||||
# shells, there is no room for error here by ignoring !.
|
||||
_escape = re.compile(r'(["\\`])')
|
||||
|
||||
|
||||
def EncodePOSIXShellArgument(argument):
|
||||
"""Encodes |argument| suitably for consumption by POSIX shells.
|
||||
|
||||
argument may be quoted and escaped as necessary to ensure that POSIX shells
|
||||
treat the returned value as a literal representing the argument passed to
|
||||
this function. Parameter (variable) expansions beginning with $ are allowed
|
||||
to remain intact without escaping the $, to allow the argument to contain
|
||||
references to variables to be expanded by the shell.
|
||||
"""
|
||||
|
||||
if not isinstance(argument, str):
|
||||
argument = str(argument)
|
||||
|
||||
if _quote.search(argument):
|
||||
quote = '"'
|
||||
else:
|
||||
quote = ""
|
||||
|
||||
encoded = quote + re.sub(_escape, r"\\\1", argument) + quote
|
||||
|
||||
return encoded
|
||||
|
||||
|
||||
def EncodePOSIXShellList(list):
|
||||
"""Encodes |list| suitably for consumption by POSIX shells.
|
||||
|
||||
Returns EncodePOSIXShellArgument for each item in list, and joins them
|
||||
together using the space character as an argument separator.
|
||||
"""
|
||||
|
||||
encoded_arguments = []
|
||||
for argument in list:
|
||||
encoded_arguments.append(EncodePOSIXShellArgument(argument))
|
||||
return " ".join(encoded_arguments)
|
||||
|
||||
|
||||
def DeepDependencyTargets(target_dicts, roots):
|
||||
"""Returns the recursive list of target dependencies."""
|
||||
dependencies = set()
|
||||
pending = set(roots)
|
||||
while pending:
|
||||
# Pluck out one.
|
||||
r = pending.pop()
|
||||
# Skip if visited already.
|
||||
if r in dependencies:
|
||||
continue
|
||||
# Add it.
|
||||
dependencies.add(r)
|
||||
# Add its children.
|
||||
spec = target_dicts[r]
|
||||
pending.update(set(spec.get("dependencies", [])))
|
||||
pending.update(set(spec.get("dependencies_original", [])))
|
||||
return list(dependencies - set(roots))
|
||||
|
||||
|
||||
def BuildFileTargets(target_list, build_file):
|
||||
"""From a target_list, returns the subset from the specified build_file.
|
||||
"""
|
||||
return [p for p in target_list if BuildFile(p) == build_file]
|
||||
|
||||
|
||||
def AllTargets(target_list, target_dicts, build_file):
|
||||
"""Returns all targets (direct and dependencies) for the specified build_file.
|
||||
"""
|
||||
bftargets = BuildFileTargets(target_list, build_file)
|
||||
deptargets = DeepDependencyTargets(target_dicts, bftargets)
|
||||
return bftargets + deptargets
|
||||
|
||||
|
||||
def WriteOnDiff(filename):
|
||||
"""Write to a file only if the new contents differ.
|
||||
|
||||
Arguments:
|
||||
filename: name of the file to potentially write to.
|
||||
Returns:
|
||||
A file like object which will write to temporary file and only overwrite
|
||||
the target if it differs (on close).
|
||||
"""
|
||||
|
||||
class Writer:
|
||||
"""Wrapper around file which only covers the target if it differs."""
|
||||
|
||||
def __init__(self):
|
||||
# On Cygwin remove the "dir" argument
|
||||
# `C:` prefixed paths are treated as relative,
|
||||
# consequently ending up with current dir "/cygdrive/c/..."
|
||||
# being prefixed to those, which was
|
||||
# obviously a non-existent path,
|
||||
# for example: "/cygdrive/c/<some folder>/C:\<my win style abs path>".
|
||||
# For more details see:
|
||||
# https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp
|
||||
base_temp_dir = "" if IsCygwin() else os.path.dirname(filename)
|
||||
# Pick temporary file.
|
||||
tmp_fd, self.tmp_path = tempfile.mkstemp(
|
||||
suffix=".tmp",
|
||||
prefix=os.path.split(filename)[1] + ".gyp.",
|
||||
dir=base_temp_dir,
|
||||
)
|
||||
try:
|
||||
self.tmp_file = os.fdopen(tmp_fd, "wb")
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
# Delegate everything else to self.tmp_file
|
||||
return getattr(self.tmp_file, attrname)
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
# Close tmp file.
|
||||
self.tmp_file.close()
|
||||
# Determine if different.
|
||||
same = False
|
||||
try:
|
||||
same = filecmp.cmp(self.tmp_path, filename, False)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
if same:
|
||||
# The new file is identical to the old one, just get rid of the new
|
||||
# one.
|
||||
os.unlink(self.tmp_path)
|
||||
else:
|
||||
# The new file is different from the old one,
|
||||
# or there is no old one.
|
||||
# Rename the new file to the permanent name.
|
||||
#
|
||||
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
|
||||
# file that can only be read by the owner, regardless of the umask.
|
||||
# There's no reason to not respect the umask here,
|
||||
# which means that an extra hoop is required
|
||||
# to fetch it and reset the new file's mode.
|
||||
#
|
||||
# No way to get the umask without setting a new one? Set a safe one
|
||||
# and then set it back to the old value.
|
||||
umask = os.umask(0o77)
|
||||
os.umask(umask)
|
||||
os.chmod(self.tmp_path, 0o666 & ~umask)
|
||||
if sys.platform == "win32" and os.path.exists(filename):
|
||||
# NOTE: on windows (but not cygwin) rename will not replace an
|
||||
# existing file, so it must be preceded with a remove.
|
||||
# Sadly there is no way to make the switch atomic.
|
||||
os.remove(filename)
|
||||
os.rename(self.tmp_path, filename)
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def write(self, s):
|
||||
self.tmp_file.write(s.encode("utf-8"))
|
||||
|
||||
return Writer()
|
||||
|
||||
|
||||
def EnsureDirExists(path):
|
||||
"""Make sure the directory for |path| exists."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def GetFlavor(params):
|
||||
"""Returns |params.flavor| if it's set, the system's default flavor else."""
|
||||
flavors = {
|
||||
"cygwin": "win",
|
||||
"win32": "win",
|
||||
"darwin": "mac",
|
||||
}
|
||||
|
||||
if "flavor" in params:
|
||||
return params["flavor"]
|
||||
if sys.platform in flavors:
|
||||
return flavors[sys.platform]
|
||||
if sys.platform.startswith("sunos"):
|
||||
return "solaris"
|
||||
if sys.platform.startswith(("dragonfly", "freebsd")):
|
||||
return "freebsd"
|
||||
if sys.platform.startswith("openbsd"):
|
||||
return "openbsd"
|
||||
if sys.platform.startswith("netbsd"):
|
||||
return "netbsd"
|
||||
if sys.platform.startswith("aix"):
|
||||
return "aix"
|
||||
if sys.platform.startswith(("os390", "zos")):
|
||||
return "zos"
|
||||
if sys.platform == "os400":
|
||||
return "os400"
|
||||
|
||||
return "linux"
|
||||
|
||||
|
||||
def CopyTool(flavor, out_path, generator_flags={}):
|
||||
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
|
||||
to |out_path|."""
|
||||
# aix and solaris just need flock emulation. mac and win use more complicated
|
||||
# support scripts.
|
||||
prefix = {
|
||||
"aix": "flock",
|
||||
"os400": "flock",
|
||||
"solaris": "flock",
|
||||
"mac": "mac",
|
||||
"ios": "mac",
|
||||
"win": "win",
|
||||
}.get(flavor, None)
|
||||
if not prefix:
|
||||
return
|
||||
|
||||
# Slurp input file.
|
||||
source_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix
|
||||
)
|
||||
with open(source_path) as source_file:
|
||||
source = source_file.readlines()
|
||||
|
||||
# Set custom header flags.
|
||||
header = "# Generated by gyp. Do not edit.\n"
|
||||
mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
|
||||
if flavor == "mac" and mac_toolchain_dir:
|
||||
header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir
|
||||
|
||||
# Add header and write it out.
|
||||
tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix)
|
||||
with open(tool_path, "w") as tool_file:
|
||||
tool_file.write("".join([source[0], header] + source[1:]))
|
||||
|
||||
# Make file executable.
|
||||
os.chmod(tool_path, 0o755)
|
||||
|
||||
|
||||
# From Alex Martelli,
|
||||
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
|
||||
# ASPN: Python Cookbook: Remove duplicates from a sequence
|
||||
# First comment, dated 2001/10/13.
|
||||
# (Also in the printed Python Cookbook.)
|
||||
|
||||
|
||||
def uniquer(seq, idfun=lambda x: x):
|
||||
seen = {}
|
||||
result = []
|
||||
for item in seq:
|
||||
marker = idfun(item)
|
||||
if marker in seen:
|
||||
continue
|
||||
seen[marker] = 1
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
# Based on http://code.activestate.com/recipes/576694/.
|
||||
class OrderedSet(MutableSet):
|
||||
def __init__(self, iterable=None):
|
||||
self.end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.map = {} # key --> [key, prev, next]
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
return len(self.map)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
if key not in self.map:
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||
|
||||
def discard(self, key):
|
||||
if key in self.map:
|
||||
key, prev_item, next_item = self.map.pop(key)
|
||||
prev_item[2] = next_item
|
||||
next_item[1] = prev_item
|
||||
|
||||
def __iter__(self):
|
||||
end = self.end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
# The second argument is an addition that causes a pylint warning.
|
||||
def pop(self, last=True): # pylint: disable=W0221
|
||||
if not self:
|
||||
raise KeyError("set is empty")
|
||||
key = self.end[1][0] if last else self.end[2][0]
|
||||
self.discard(key)
|
||||
return key
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return f"{self.__class__.__name__}()"
|
||||
return f"{self.__class__.__name__}({list(self)!r})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedSet):
|
||||
return len(self) == len(other) and list(self) == list(other)
|
||||
return set(self) == set(other)
|
||||
|
||||
# Extensions to the recipe.
|
||||
def update(self, iterable):
|
||||
for i in iterable:
|
||||
if i not in self:
|
||||
self.add(i)
|
||||
|
||||
|
||||
class CycleError(Exception):
|
||||
"""An exception raised when an unexpected cycle is detected."""
|
||||
|
||||
def __init__(self, nodes):
|
||||
self.nodes = nodes
|
||||
|
||||
def __str__(self):
|
||||
return "CycleError: cycle involving: " + str(self.nodes)
|
||||
|
||||
|
||||
def TopologicallySorted(graph, get_edges):
|
||||
r"""Topologically sort based on a user provided edge definition.
|
||||
|
||||
Args:
|
||||
graph: A list of node names.
|
||||
get_edges: A function mapping from node name to a hashable collection
|
||||
of node names which this node has outgoing edges to.
|
||||
Returns:
|
||||
A list containing all of the node in graph in topological order.
|
||||
It is assumed that calling get_edges once for each node and caching is
|
||||
cheaper than repeatedly calling get_edges.
|
||||
Raises:
|
||||
CycleError in the event of a cycle.
|
||||
Example:
|
||||
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
|
||||
def GetEdges(node):
|
||||
return re.findall(r'\$\(([^))]\)', graph[node])
|
||||
print TopologicallySorted(graph.keys(), GetEdges)
|
||||
==>
|
||||
['a', 'c', b']
|
||||
"""
|
||||
get_edges = memoize(get_edges)
|
||||
visited = set()
|
||||
visiting = set()
|
||||
ordered_nodes = []
|
||||
|
||||
def Visit(node):
|
||||
if node in visiting:
|
||||
raise CycleError(visiting)
|
||||
if node in visited:
|
||||
return
|
||||
visited.add(node)
|
||||
visiting.add(node)
|
||||
for neighbor in get_edges(node):
|
||||
Visit(neighbor)
|
||||
visiting.remove(node)
|
||||
ordered_nodes.insert(0, node)
|
||||
|
||||
for node in sorted(graph):
|
||||
Visit(node)
|
||||
return ordered_nodes
|
||||
|
||||
|
||||
def CrossCompileRequested():
|
||||
# TODO: figure out how to not build extra host objects in the
|
||||
# non-cross-compile case when this is enabled, and enable unconditionally.
|
||||
return (
|
||||
os.environ.get("GYP_CROSSCOMPILE")
|
||||
or os.environ.get("AR_host")
|
||||
or os.environ.get("CC_host")
|
||||
or os.environ.get("CXX_host")
|
||||
or os.environ.get("AR_target")
|
||||
or os.environ.get("CC_target")
|
||||
or os.environ.get("CXX_target")
|
||||
)
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout = out.communicate()[0].decode("utf-8")
|
||||
return "CYGWIN" in str(stdout)
|
||||
except Exception:
|
||||
return False
|
||||
78
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Executable file
78
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Executable file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the common.py file."""
|
||||
|
||||
import gyp.common
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestTopologicallySorted(unittest.TestCase):
|
||||
def test_Valid(self):
|
||||
"""Test that sorting works on a valid graph with one possible order."""
|
||||
graph = {
|
||||
"a": ["b", "c"],
|
||||
"b": [],
|
||||
"c": ["d"],
|
||||
"d": ["b"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertEqual(
|
||||
gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"]
|
||||
)
|
||||
|
||||
def test_Cycle(self):
|
||||
"""Test that an exception is thrown on a cyclic graph."""
|
||||
graph = {
|
||||
"a": ["b"],
|
||||
"b": ["c"],
|
||||
"c": ["d"],
|
||||
"d": ["a"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertRaises(
|
||||
gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge
|
||||
)
|
||||
|
||||
|
||||
class TestGetFlavor(unittest.TestCase):
|
||||
"""Test that gyp.common.GetFlavor works as intended"""
|
||||
|
||||
original_platform = ""
|
||||
|
||||
def setUp(self):
|
||||
self.original_platform = sys.platform
|
||||
|
||||
def tearDown(self):
|
||||
sys.platform = self.original_platform
|
||||
|
||||
def assertFlavor(self, expected, argument, param):
|
||||
sys.platform = argument
|
||||
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
||||
|
||||
def test_platform_default(self):
|
||||
self.assertFlavor("freebsd", "freebsd9", {})
|
||||
self.assertFlavor("freebsd", "freebsd10", {})
|
||||
self.assertFlavor("openbsd", "openbsd5", {})
|
||||
self.assertFlavor("solaris", "sunos5", {})
|
||||
self.assertFlavor("solaris", "sunos", {})
|
||||
self.assertFlavor("linux", "linux2", {})
|
||||
self.assertFlavor("linux", "linux3", {})
|
||||
self.assertFlavor("linux", "linux", {})
|
||||
|
||||
def test_param(self):
|
||||
self.assertFlavor("foobar", "linux2", {"flavor": "foobar"})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
165
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Normal file
165
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
import locale
|
||||
from functools import reduce
|
||||
|
||||
|
||||
def XmlToString(content, encoding="utf-8", pretty=False):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Visual Studio files have a lot of pre-defined structures. This function makes
|
||||
it easy to represent these structures as Python data structures, instead of
|
||||
having to create a lot of function calls.
|
||||
|
||||
Each XML element of the content is represented as a list composed of:
|
||||
1. The name of the element, a string,
|
||||
2. The attributes of the element, a dictionary (optional), and
|
||||
3+. The content of the element, if any. Strings are simple text nodes and
|
||||
lists are child elements.
|
||||
|
||||
Example 1:
|
||||
<test/>
|
||||
becomes
|
||||
['test']
|
||||
|
||||
Example 2:
|
||||
<myelement a='value1' b='value2'>
|
||||
<childtype>This is</childtype>
|
||||
<childtype>it!</childtype>
|
||||
</myelement>
|
||||
|
||||
becomes
|
||||
['myelement', {'a':'value1', 'b':'value2'},
|
||||
['childtype', 'This is'],
|
||||
['childtype', 'it!'],
|
||||
]
|
||||
|
||||
Args:
|
||||
content: The structured content to be converted.
|
||||
encoding: The encoding to report on the first XML line.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
|
||||
Returns:
|
||||
The XML content as a string.
|
||||
"""
|
||||
# We create a huge list of all the elements of the file.
|
||||
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
|
||||
if pretty:
|
||||
xml_parts.append("\n")
|
||||
_ConstructContentList(xml_parts, content, pretty)
|
||||
|
||||
# Convert it to a string
|
||||
return "".join(xml_parts)
|
||||
|
||||
|
||||
def _ConstructContentList(xml_parts, specification, pretty, level=0):
|
||||
""" Appends the XML parts corresponding to the specification.
|
||||
|
||||
Args:
|
||||
xml_parts: A list of XML parts to be appended to.
|
||||
specification: The specification of the element. See EasyXml docs.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
level: Indentation level.
|
||||
"""
|
||||
# The first item in a specification is the name of the element.
|
||||
if pretty:
|
||||
indentation = " " * level
|
||||
new_line = "\n"
|
||||
else:
|
||||
indentation = ""
|
||||
new_line = ""
|
||||
name = specification[0]
|
||||
if not isinstance(name, str):
|
||||
raise Exception(
|
||||
"The first item of an EasyXml specification should be "
|
||||
"a string. Specification was " + str(specification)
|
||||
)
|
||||
xml_parts.append(indentation + "<" + name)
|
||||
|
||||
# Optionally in second position is a dictionary of the attributes.
|
||||
rest = specification[1:]
|
||||
if rest and isinstance(rest[0], dict):
|
||||
for at, val in sorted(rest[0].items()):
|
||||
xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"')
|
||||
rest = rest[1:]
|
||||
if rest:
|
||||
xml_parts.append(">")
|
||||
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
|
||||
multi_line = not all_strings
|
||||
if multi_line and new_line:
|
||||
xml_parts.append(new_line)
|
||||
for child_spec in rest:
|
||||
# If it's a string, append a text node.
|
||||
# Otherwise recurse over that child definition
|
||||
if isinstance(child_spec, str):
|
||||
xml_parts.append(_XmlEscape(child_spec))
|
||||
else:
|
||||
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
|
||||
if multi_line and indentation:
|
||||
xml_parts.append(indentation)
|
||||
xml_parts.append(f"</{name}>{new_line}")
|
||||
else:
|
||||
xml_parts.append("/>%s" % new_line)
|
||||
|
||||
|
||||
def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
|
||||
win32=(sys.platform == "win32")):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Args:
|
||||
content: The structured content to be written.
|
||||
path: Location of the file.
|
||||
encoding: The encoding to report on the first line of the XML file.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
"""
|
||||
xml_string = XmlToString(content, encoding, pretty)
|
||||
if win32 and os.linesep != "\r\n":
|
||||
xml_string = xml_string.replace("\n", "\r\n")
|
||||
|
||||
default_encoding = locale.getdefaultlocale()[1]
|
||||
if default_encoding and default_encoding.upper() != encoding.upper():
|
||||
xml_string = xml_string.encode(encoding)
|
||||
|
||||
# Get the old content
|
||||
try:
|
||||
with open(path) as file:
|
||||
existing = file.read()
|
||||
except OSError:
|
||||
existing = None
|
||||
|
||||
# It has changed, write it
|
||||
if existing != xml_string:
|
||||
with open(path, "wb") as file:
|
||||
file.write(xml_string)
|
||||
|
||||
|
||||
_xml_escape_map = {
|
||||
'"': """,
|
||||
"'": "'",
|
||||
"<": "<",
|
||||
">": ">",
|
||||
"&": "&",
|
||||
"\n": "
",
|
||||
"\r": "
",
|
||||
}
|
||||
|
||||
|
||||
_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
|
||||
|
||||
|
||||
def _XmlEscape(value, attr=False):
|
||||
""" Escape a string for inclusion in XML."""
|
||||
|
||||
def replace(match):
|
||||
m = match.string[match.start() : match.end()]
|
||||
# don't replace single quotes in attrs
|
||||
if attr and m == "'":
|
||||
return m
|
||||
return _xml_escape_map[m]
|
||||
|
||||
return _xml_escape_re.sub(replace, value)
|
||||
109
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Executable file
109
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the easy_xml.py file. """
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_EasyXml_simple(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test/>',
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"], encoding="Windows-1252"),
|
||||
'<?xml version="1.0" encoding="Windows-1252"?><test/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_simple_with_attributes(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_escaping(self):
|
||||
original = "<test>'\"\r&\nfoo"
|
||||
converted = "<test>'"
&
foo"
|
||||
converted_apos = converted.replace("'", "'")
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test3", {"a": original}, original]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>'
|
||||
% (converted, converted_apos),
|
||||
)
|
||||
|
||||
def test_EasyXml_pretty(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(
|
||||
["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]],
|
||||
pretty=True,
|
||||
),
|
||||
'<?xml version="1.0" encoding="utf-8"?>\n'
|
||||
"<test3>\n"
|
||||
" <GrandParent>\n"
|
||||
" <Parent1>\n"
|
||||
" <Child/>\n"
|
||||
" </Parent1>\n"
|
||||
" <Parent2/>\n"
|
||||
" </GrandParent>\n"
|
||||
"</test3>\n",
|
||||
)
|
||||
|
||||
def test_EasyXml_complex(self):
|
||||
# We want to create:
|
||||
target = (
|
||||
'<?xml version="1.0" encoding="utf-8"?>'
|
||||
"<Project>"
|
||||
'<PropertyGroup Label="Globals">'
|
||||
"<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>"
|
||||
"<Keyword>Win32Proj</Keyword>"
|
||||
"<RootNamespace>automated_ui_tests</RootNamespace>"
|
||||
"</PropertyGroup>"
|
||||
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
|
||||
"<PropertyGroup "
|
||||
"Condition=\"'$(Configuration)|$(Platform)'=="
|
||||
'\'Debug|Win32\'" Label="Configuration">'
|
||||
"<ConfigurationType>Application</ConfigurationType>"
|
||||
"<CharacterSet>Unicode</CharacterSet>"
|
||||
"</PropertyGroup>"
|
||||
"</Project>"
|
||||
)
|
||||
|
||||
xml = easy_xml.XmlToString(
|
||||
[
|
||||
"Project",
|
||||
[
|
||||
"PropertyGroup",
|
||||
{"Label": "Globals"},
|
||||
["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"],
|
||||
["Keyword", "Win32Proj"],
|
||||
["RootNamespace", "automated_ui_tests"],
|
||||
],
|
||||
["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}],
|
||||
[
|
||||
"PropertyGroup",
|
||||
{
|
||||
"Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'",
|
||||
"Label": "Configuration",
|
||||
},
|
||||
["ConfigurationType", "Application"],
|
||||
["CharacterSet", "Unicode"],
|
||||
],
|
||||
]
|
||||
)
|
||||
self.assertEqual(xml, target)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Executable file
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""These functions are executed via gyp-flock-tool when using the Makefile
|
||||
generator. Used on systems that don't have a built-in flock."""
|
||||
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = FlockTool()
|
||||
executor.Dispatch(args)
|
||||
|
||||
|
||||
class FlockTool:
|
||||
"""This class emulates the 'flock' command."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
# Note that the stock python on SunOS has a bug
|
||||
# where fcntl.flock(fd, LOCK_EX) always fails
|
||||
# with EBADF, that's why we use this F_SETLK
|
||||
# hack instead.
|
||||
fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
if sys.platform.startswith("aix") or sys.platform == "os400":
|
||||
# Python on AIX is compiled with LARGEFILE support, which changes the
|
||||
# struct size.
|
||||
op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
else:
|
||||
op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
0
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Normal file
0
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Normal file
808
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Normal file
808
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Normal file
@@ -0,0 +1,808 @@
|
||||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
|
||||
the generator flag config_path) the path of a json file that dictates the files
|
||||
and targets to search for. The following keys are supported:
|
||||
files: list of paths (relative) of the files to search for.
|
||||
test_targets: unqualified target names to search for. Any target in this list
|
||||
that depends upon a file in |files| is output regardless of the type of target
|
||||
or chain of dependencies.
|
||||
additional_compile_targets: Unqualified targets to search for in addition to
|
||||
test_targets. Targets in the combined list that depend upon a file in |files|
|
||||
are not necessarily output. For example, if the target is of type none then the
|
||||
target is not output (but one of the descendants of the target will be).
|
||||
|
||||
The following is output:
|
||||
error: only supplied if there is an error.
|
||||
compile_targets: minimal set of targets that directly or indirectly (for
|
||||
targets of type none) depend on the files in |files| and is one of the
|
||||
supplied targets or a target that one of the supplied targets depends on.
|
||||
The expectation is this set of targets is passed into a build step. This list
|
||||
always contains the output of test_targets as well.
|
||||
test_targets: set of targets from the supplied |test_targets| that either
|
||||
directly or indirectly depend upon a file in |files|. This list if useful
|
||||
if additional processing needs to be done for certain targets after the
|
||||
build, such as running tests.
|
||||
status: outputs one of three values: none of the supplied files were found,
|
||||
one of the include files changed so that it should be assumed everything
|
||||
changed (in this case test_targets and compile_targets are not output) or at
|
||||
least one file was found.
|
||||
invalid_targets: list of supplied targets that were not found.
|
||||
|
||||
Example:
|
||||
Consider a graph like the following:
|
||||
A D
|
||||
/ \
|
||||
B C
|
||||
A depends upon both B and C, A is of type none and B and C are executables.
|
||||
D is an executable, has no dependencies and nothing depends on it.
|
||||
If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
|
||||
files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
|
||||
the following is output:
|
||||
|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
|
||||
and the supplied target A depends upon it. A is not output as a build_target
|
||||
as it is of type none with no rules and actions.
|
||||
|test_targets| = ["B"] B directly depends upon the change file b.cc.
|
||||
|
||||
Even though the file d.cc, which D depends upon, has changed D is not output
|
||||
as it was not supplied by way of |additional_compile_targets| or |test_targets|.
|
||||
|
||||
If the generator flag analyzer_output_path is specified, output is written
|
||||
there. Otherwise output is written to stdout.
|
||||
|
||||
In Gyp the "all" target is shorthand for the root targets in the files passed
|
||||
to gyp. For example, if file "a.gyp" contains targets "a1" and
|
||||
"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
|
||||
on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
|
||||
Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
|
||||
directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
|
||||
then the "all" target includes "b1" and "b2".
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
debug = False
|
||||
|
||||
found_dependency_string = "Found dependency"
|
||||
no_dependency_string = "No dependencies"
|
||||
# Status when it should be assumed that everything has changed.
|
||||
all_changed_string = "Found dependency (all)"
|
||||
|
||||
# MatchStatus is used indicate if and how a target depends upon the supplied
|
||||
# sources.
|
||||
# The target's sources contain one of the supplied paths.
|
||||
MATCH_STATUS_MATCHES = 1
|
||||
# The target has a dependency on another target that contains one of the
|
||||
# supplied paths.
|
||||
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
|
||||
# The target's sources weren't in the supplied paths and none of the target's
|
||||
# dependencies depend upon a target that matched.
|
||||
MATCH_STATUS_DOESNT_MATCH = 3
|
||||
# The target doesn't contain the source, but the dependent targets have not yet
|
||||
# been visited to determine a more specific status yet.
|
||||
MATCH_STATUS_TBD = 4
|
||||
|
||||
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
generator_default_variables[dirname] = "!!!"
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def _ToGypPath(path):
|
||||
"""Converts a path to the format used by gyp."""
|
||||
if os.sep == "\\" and os.altsep == "/":
|
||||
return path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def _ResolveParent(path, base_path_components):
|
||||
"""Resolves |path|, which starts with at least one '../'. Returns an empty
|
||||
string if the path shouldn't be considered. See _AddSources() for a
|
||||
description of |base_path_components|."""
|
||||
depth = 0
|
||||
while path.startswith("../"):
|
||||
depth += 1
|
||||
path = path[3:]
|
||||
# Relative includes may go outside the source tree. For example, an action may
|
||||
# have inputs in /usr/include, which are not in the source tree.
|
||||
if depth > len(base_path_components):
|
||||
return ""
|
||||
if depth == len(base_path_components):
|
||||
return path
|
||||
return (
|
||||
"/".join(base_path_components[0 : len(base_path_components) - depth])
|
||||
+ "/"
|
||||
+ path
|
||||
)
|
||||
|
||||
|
||||
def _AddSources(sources, base_path, base_path_components, result):
|
||||
"""Extracts valid sources from |sources| and adds them to |result|. Each
|
||||
source file is relative to |base_path|, but may contain '..'. To make
|
||||
resolving '..' easier |base_path_components| contains each of the
|
||||
directories in |base_path|. Additionally each source may contain variables.
|
||||
Such sources are ignored as it is assumed dependencies on them are expressed
|
||||
and tracked in some other means."""
|
||||
# NOTE: gyp paths are always posix style.
|
||||
for source in sources:
|
||||
if not len(source) or source.startswith("!!!") or source.startswith("$"):
|
||||
continue
|
||||
# variable expansion may lead to //.
|
||||
org_source = source
|
||||
source = source[0] + source[1:].replace("//", "/")
|
||||
if source.startswith("../"):
|
||||
source = _ResolveParent(source, base_path_components)
|
||||
if len(source):
|
||||
result.append(source)
|
||||
continue
|
||||
result.append(base_path + source)
|
||||
if debug:
|
||||
print("AddSource", org_source, result[len(result) - 1])
|
||||
|
||||
|
||||
def _ExtractSourcesFromAction(action, base_path, base_path_components, results):
|
||||
if "inputs" in action:
|
||||
_AddSources(action["inputs"], base_path, base_path_components, results)
|
||||
|
||||
|
||||
def _ToLocalPath(toplevel_dir, path):
|
||||
"""Converts |path| to a path relative to |toplevel_dir|."""
|
||||
if path == toplevel_dir:
|
||||
return ""
|
||||
if path.startswith(toplevel_dir + "/"):
|
||||
return path[len(toplevel_dir) + len("/") :]
|
||||
return path
|
||||
|
||||
|
||||
def _ExtractSources(target, target_dict, toplevel_dir):
|
||||
# |target| is either absolute or relative and in the format of the OS. Gyp
|
||||
# source paths are always posix. Convert |target| to a posix path relative to
|
||||
# |toplevel_dir_|. This is done to make it easy to build source paths.
|
||||
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
|
||||
base_path_components = base_path.split("/")
|
||||
|
||||
# Add a trailing '/' so that _AddSources() can easily build paths.
|
||||
if len(base_path):
|
||||
base_path += "/"
|
||||
|
||||
if debug:
|
||||
print("ExtractSources", target, base_path)
|
||||
|
||||
results = []
|
||||
if "sources" in target_dict:
|
||||
_AddSources(target_dict["sources"], base_path, base_path_components, results)
|
||||
# Include the inputs from any actions. Any changes to these affect the
|
||||
# resulting output.
|
||||
if "actions" in target_dict:
|
||||
for action in target_dict["actions"]:
|
||||
_ExtractSourcesFromAction(action, base_path, base_path_components, results)
|
||||
if "rules" in target_dict:
|
||||
for rule in target_dict["rules"]:
|
||||
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class Target:
|
||||
"""Holds information about a particular target:
|
||||
deps: set of Targets this Target depends upon. This is not recursive, only the
|
||||
direct dependent Targets.
|
||||
match_status: one of the MatchStatus values.
|
||||
back_deps: set of Targets that have a dependency on this Target.
|
||||
visited: used during iteration to indicate whether we've visited this target.
|
||||
This is used for two iterations, once in building the set of Targets and
|
||||
again in _GetBuildTargets().
|
||||
name: fully qualified name of the target.
|
||||
requires_build: True if the target type is such that it needs to be built.
|
||||
See _DoesTargetTypeRequireBuild for details.
|
||||
added_to_compile_targets: used when determining if the target was added to the
|
||||
set of targets that needs to be built.
|
||||
in_roots: true if this target is a descendant of one of the root nodes.
|
||||
is_executable: true if the type of target is executable.
|
||||
is_static_library: true if the type of target is static_library.
|
||||
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
|
||||
if there is a target in back_deps that does a link."""
|
||||
|
||||
def __init__(self, name):
|
||||
self.deps = set()
|
||||
self.match_status = MATCH_STATUS_TBD
|
||||
self.back_deps = set()
|
||||
self.name = name
|
||||
# TODO(sky): I don't like hanging this off Target. This state is specific
|
||||
# to certain functions and should be isolated there.
|
||||
self.visited = False
|
||||
self.requires_build = False
|
||||
self.added_to_compile_targets = False
|
||||
self.in_roots = False
|
||||
self.is_executable = False
|
||||
self.is_static_library = False
|
||||
self.is_or_has_linked_ancestor = False
|
||||
|
||||
|
||||
class Config:
|
||||
"""Details what we're looking for
|
||||
files: set of files to search for
|
||||
targets: see file description for details."""
|
||||
|
||||
def __init__(self):
|
||||
self.files = []
|
||||
self.targets = set()
|
||||
self.additional_compile_target_names = set()
|
||||
self.test_target_names = set()
|
||||
|
||||
def Init(self, params):
|
||||
"""Initializes Config. This is a separate method as it raises an exception
|
||||
if there is a parse error."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
config_path = generator_flags.get("config_path", None)
|
||||
if not config_path:
|
||||
return
|
||||
try:
|
||||
f = open(config_path)
|
||||
config = json.load(f)
|
||||
f.close()
|
||||
except OSError:
|
||||
raise Exception("Unable to open file " + config_path)
|
||||
except ValueError as e:
|
||||
raise Exception("Unable to parse config file " + config_path + str(e))
|
||||
if not isinstance(config, dict):
|
||||
raise Exception("config_path must be a JSON file containing a dictionary")
|
||||
self.files = config.get("files", [])
|
||||
self.additional_compile_target_names = set(
|
||||
config.get("additional_compile_targets", [])
|
||||
)
|
||||
self.test_target_names = set(config.get("test_targets", []))
|
||||
|
||||
|
||||
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
|
||||
"""Returns true if the build file |build_file| is either in |files| or
|
||||
one of the files included by |build_file| is in |files|. |toplevel_dir| is
|
||||
the root of the source tree."""
|
||||
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
|
||||
if debug:
|
||||
print("gyp file modified", build_file)
|
||||
return True
|
||||
|
||||
# First element of included_files is the file itself.
|
||||
if len(data[build_file]["included_files"]) <= 1:
|
||||
return False
|
||||
|
||||
for include_file in data[build_file]["included_files"][1:]:
|
||||
# |included_files| are relative to the directory of the |build_file|.
|
||||
rel_include_file = _ToGypPath(
|
||||
gyp.common.UnrelativePath(include_file, build_file)
|
||||
)
|
||||
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
|
||||
if debug:
|
||||
print(
|
||||
"included gyp file modified, gyp_file=",
|
||||
build_file,
|
||||
"included file=",
|
||||
rel_include_file,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _GetOrCreateTargetByName(targets, target_name):
|
||||
"""Creates or returns the Target at targets[target_name]. If there is no
|
||||
Target for |target_name| one is created. Returns a tuple of whether a new
|
||||
Target was created and the Target."""
|
||||
if target_name in targets:
|
||||
return False, targets[target_name]
|
||||
target = Target(target_name)
|
||||
targets[target_name] = target
|
||||
return True, target
|
||||
|
||||
|
||||
def _DoesTargetTypeRequireBuild(target_dict):
|
||||
"""Returns true if the target type is such that it needs to be built."""
|
||||
# If a 'none' target has rules or actions we assume it requires a build.
|
||||
return bool(
|
||||
target_dict["type"] != "none"
|
||||
or target_dict.get("actions")
|
||||
or target_dict.get("rules")
|
||||
)
|
||||
|
||||
|
||||
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
|
||||
"""Returns a tuple of the following:
|
||||
. A dictionary mapping from fully qualified name to Target.
|
||||
. A list of the targets that have a source file in |files|.
|
||||
. Targets that constitute the 'all' target. See description at top of file
|
||||
for details on the 'all' target.
|
||||
This sets the |match_status| of the targets that contain any of the source
|
||||
files in |files| to MATCH_STATUS_MATCHES.
|
||||
|toplevel_dir| is the root of the source tree."""
|
||||
# Maps from target name to Target.
|
||||
name_to_target = {}
|
||||
|
||||
# Targets that matched.
|
||||
matching_targets = []
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
# Maps from build file to a boolean indicating whether the build file is in
|
||||
# |files|.
|
||||
build_file_in_files = {}
|
||||
|
||||
# Root targets across all files.
|
||||
roots = set()
|
||||
|
||||
# Set of Targets in |build_files|.
|
||||
build_file_targets = set()
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target_name = targets_to_visit.pop()
|
||||
created_target, target = _GetOrCreateTargetByName(name_to_target, target_name)
|
||||
if created_target:
|
||||
roots.add(target)
|
||||
elif target.visited:
|
||||
continue
|
||||
|
||||
target.visited = True
|
||||
target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name])
|
||||
target_type = target_dicts[target_name]["type"]
|
||||
target.is_executable = target_type == "executable"
|
||||
target.is_static_library = target_type == "static_library"
|
||||
target.is_or_has_linked_ancestor = (
|
||||
target_type == "executable" or target_type == "shared_library"
|
||||
)
|
||||
|
||||
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
|
||||
if build_file not in build_file_in_files:
|
||||
build_file_in_files[build_file] = _WasBuildFileModified(
|
||||
build_file, data, files, toplevel_dir
|
||||
)
|
||||
|
||||
if build_file in build_files:
|
||||
build_file_targets.add(target)
|
||||
|
||||
# If a build file (or any of its included files) is modified we assume all
|
||||
# targets in the file are modified.
|
||||
if build_file_in_files[build_file]:
|
||||
print("matching target from modified build file", target_name)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
else:
|
||||
sources = _ExtractSources(
|
||||
target_name, target_dicts[target_name], toplevel_dir
|
||||
)
|
||||
for source in sources:
|
||||
if _ToGypPath(os.path.normpath(source)) in files:
|
||||
print("target", target_name, "matches", source)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
break
|
||||
|
||||
# Add dependencies to visit as well as updating back pointers for deps.
|
||||
for dep in target_dicts[target_name].get("dependencies", []):
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
created_dep_target, dep_target = _GetOrCreateTargetByName(
|
||||
name_to_target, dep
|
||||
)
|
||||
if not created_dep_target:
|
||||
roots.discard(dep_target)
|
||||
|
||||
target.deps.add(dep_target)
|
||||
dep_target.back_deps.add(target)
|
||||
|
||||
return name_to_target, matching_targets, roots & build_file_targets
|
||||
|
||||
|
||||
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
|
||||
"""Returns a tuple of the following:
|
||||
. mapping (dictionary) from unqualified name to Target for all the
|
||||
Targets in |to_find|.
|
||||
. any target names not found. If this is empty all targets were found."""
|
||||
result = {}
|
||||
if not to_find:
|
||||
return {}, []
|
||||
to_find = set(to_find)
|
||||
for target_name in all_targets.keys():
|
||||
extracted = gyp.common.ParseQualifiedTarget(target_name)
|
||||
if len(extracted) > 1 and extracted[1] in to_find:
|
||||
to_find.remove(extracted[1])
|
||||
result[extracted[1]] = all_targets[target_name]
|
||||
if not to_find:
|
||||
return result, []
|
||||
return result, [x for x in to_find]
|
||||
|
||||
|
||||
def _DoesTargetDependOnMatchingTargets(target):
|
||||
"""Returns true if |target| or any of its dependencies is one of the
|
||||
targets containing the files supplied as input to analyzer. This updates
|
||||
|matches| of the Targets as it recurses.
|
||||
target: the Target to look for."""
|
||||
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
|
||||
return False
|
||||
if (
|
||||
target.match_status == MATCH_STATUS_MATCHES
|
||||
or target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||
):
|
||||
return True
|
||||
for dep in target.deps:
|
||||
if _DoesTargetDependOnMatchingTargets(dep):
|
||||
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||
print("\t", target.name, "matches by dep", dep.name)
|
||||
return True
|
||||
target.match_status = MATCH_STATUS_DOESNT_MATCH
|
||||
return False
|
||||
|
||||
|
||||
def _GetTargetsDependingOnMatchingTargets(possible_targets):
|
||||
"""Returns the list of Targets in |possible_targets| that depend (either
|
||||
directly on indirectly) on at least one of the targets containing the files
|
||||
supplied as input to analyzer.
|
||||
possible_targets: targets to search from."""
|
||||
found = []
|
||||
print("Targets that matched by dependency:")
|
||||
for target in possible_targets:
|
||||
if _DoesTargetDependOnMatchingTargets(target):
|
||||
found.append(target)
|
||||
return found
|
||||
|
||||
|
||||
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
|
||||
"""Recurses through all targets that depend on |target|, adding all targets
|
||||
that need to be built (and are in |roots|) to |result|.
|
||||
roots: set of root targets.
|
||||
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|
||||
|target| to |result|. |target| must still be in |roots|.
|
||||
result: targets that need to be built are added here."""
|
||||
if target.visited:
|
||||
return
|
||||
|
||||
target.visited = True
|
||||
target.in_roots = target in roots
|
||||
|
||||
for back_dep_target in target.back_deps:
|
||||
_AddCompileTargets(back_dep_target, roots, False, result)
|
||||
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
|
||||
target.in_roots |= back_dep_target.in_roots
|
||||
target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor
|
||||
|
||||
# Always add 'executable' targets. Even though they may be built by other
|
||||
# targets that depend upon them it makes detection of what is going to be
|
||||
# built easier.
|
||||
# And always add static_libraries that have no dependencies on them from
|
||||
# linkables. This is necessary as the other dependencies on them may be
|
||||
# static libraries themselves, which are not compile time dependencies.
|
||||
if target.in_roots and (
|
||||
target.is_executable
|
||||
or (
|
||||
not target.added_to_compile_targets
|
||||
and (add_if_no_ancestor or target.requires_build)
|
||||
)
|
||||
or (
|
||||
target.is_static_library
|
||||
and add_if_no_ancestor
|
||||
and not target.is_or_has_linked_ancestor
|
||||
)
|
||||
):
|
||||
print(
|
||||
"\t\tadding to compile targets",
|
||||
target.name,
|
||||
"executable",
|
||||
target.is_executable,
|
||||
"added_to_compile_targets",
|
||||
target.added_to_compile_targets,
|
||||
"add_if_no_ancestor",
|
||||
add_if_no_ancestor,
|
||||
"requires_build",
|
||||
target.requires_build,
|
||||
"is_static_library",
|
||||
target.is_static_library,
|
||||
"is_or_has_linked_ancestor",
|
||||
target.is_or_has_linked_ancestor,
|
||||
)
|
||||
result.add(target)
|
||||
target.added_to_compile_targets = True
|
||||
|
||||
|
||||
def _GetCompileTargets(matching_targets, supplied_targets):
|
||||
"""Returns the set of Targets that require a build.
|
||||
matching_targets: targets that changed and need to be built.
|
||||
supplied_targets: set of targets supplied to analyzer to search from."""
|
||||
result = set()
|
||||
for target in matching_targets:
|
||||
print("finding compile targets for match", target.name)
|
||||
_AddCompileTargets(target, supplied_targets, True, result)
|
||||
return result
|
||||
|
||||
|
||||
def _WriteOutput(params, **values):
|
||||
"""Writes the output, either to stdout or a file is specified."""
|
||||
if "error" in values:
|
||||
print("Error:", values["error"])
|
||||
if "status" in values:
|
||||
print(values["status"])
|
||||
if "targets" in values:
|
||||
values["targets"].sort()
|
||||
print("Supplied targets that depend on changed files:")
|
||||
for target in values["targets"]:
|
||||
print("\t", target)
|
||||
if "invalid_targets" in values:
|
||||
values["invalid_targets"].sort()
|
||||
print("The following targets were not found:")
|
||||
for target in values["invalid_targets"]:
|
||||
print("\t", target)
|
||||
if "build_targets" in values:
|
||||
values["build_targets"].sort()
|
||||
print("Targets that require a build:")
|
||||
for target in values["build_targets"]:
|
||||
print("\t", target)
|
||||
if "compile_targets" in values:
|
||||
values["compile_targets"].sort()
|
||||
print("Targets that need to be built:")
|
||||
for target in values["compile_targets"]:
|
||||
print("\t", target)
|
||||
if "test_targets" in values:
|
||||
values["test_targets"].sort()
|
||||
print("Test targets:")
|
||||
for target in values["test_targets"]:
|
||||
print("\t", target)
|
||||
|
||||
output_path = params.get("generator_flags", {}).get("analyzer_output_path", None)
|
||||
if not output_path:
|
||||
print(json.dumps(values))
|
||||
return
|
||||
try:
|
||||
f = open(output_path, "w")
|
||||
f.write(json.dumps(values) + "\n")
|
||||
f.close()
|
||||
except OSError as e:
|
||||
print("Error writing to output file", output_path, str(e))
|
||||
|
||||
|
||||
def _WasGypIncludeFileModified(params, files):
|
||||
"""Returns true if one of the files in |files| is in the set of included
|
||||
files."""
|
||||
if params["options"].includes:
|
||||
for include in params["options"].includes:
|
||||
if _ToGypPath(os.path.normpath(include)) in files:
|
||||
print("Include file modified, assuming all changed", include)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _NamesNotIn(names, mapping):
|
||||
"""Returns a list of the values in |names| that are not in |mapping|."""
|
||||
return [name for name in names if name not in mapping]
|
||||
|
||||
|
||||
def _LookupTargets(names, mapping):
|
||||
"""Returns a list of the mapping[name] for each value in |names| that is in
|
||||
|mapping|."""
|
||||
return [mapping[name] for name in names if name in mapping]
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
"""Calculate additional variables for use in the build (called by gyp)."""
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "mac":
|
||||
default_variables.setdefault("OS", "mac")
|
||||
elif flavor == "win":
|
||||
default_variables.setdefault("OS", "win")
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
else:
|
||||
operating_system = flavor
|
||||
if flavor == "android":
|
||||
operating_system = "linux" # Keep this legacy behavior for now.
|
||||
default_variables.setdefault("OS", operating_system)
|
||||
|
||||
|
||||
class TargetCalculator:
|
||||
"""Calculates the matching test_targets and matching compile_targets."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
files,
|
||||
additional_compile_target_names,
|
||||
test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
build_files,
|
||||
):
|
||||
self._additional_compile_target_names = set(additional_compile_target_names)
|
||||
self._test_target_names = set(test_target_names)
|
||||
(
|
||||
self._name_to_target,
|
||||
self._changed_targets,
|
||||
self._root_targets,
|
||||
) = _GenerateTargets(
|
||||
data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files
|
||||
)
|
||||
(
|
||||
self._unqualified_mapping,
|
||||
self.invalid_targets,
|
||||
) = _GetUnqualifiedToTargetMapping(
|
||||
self._name_to_target, self._supplied_target_names_no_all()
|
||||
)
|
||||
|
||||
def _supplied_target_names(self):
|
||||
return self._additional_compile_target_names | self._test_target_names
|
||||
|
||||
def _supplied_target_names_no_all(self):
|
||||
"""Returns the supplied test targets without 'all'."""
|
||||
result = self._supplied_target_names()
|
||||
result.discard("all")
|
||||
return result
|
||||
|
||||
def is_build_impacted(self):
|
||||
"""Returns true if the supplied files impact the build at all."""
|
||||
return self._changed_targets
|
||||
|
||||
def find_matching_test_target_names(self):
|
||||
"""Returns the set of output test targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Find the test targets first. 'all' is special cased to mean all the
|
||||
# root targets. To deal with all the supplied |test_targets| are expanded
|
||||
# to include the root targets during lookup. If any of the root targets
|
||||
# match, we remove it and replace it with 'all'.
|
||||
test_target_names_no_all = set(self._test_target_names)
|
||||
test_target_names_no_all.discard("all")
|
||||
test_targets_no_all = _LookupTargets(
|
||||
test_target_names_no_all, self._unqualified_mapping
|
||||
)
|
||||
test_target_names_contains_all = "all" in self._test_target_names
|
||||
if test_target_names_contains_all:
|
||||
test_targets = [
|
||||
x for x in (set(test_targets_no_all) | set(self._root_targets))
|
||||
]
|
||||
else:
|
||||
test_targets = [x for x in test_targets_no_all]
|
||||
print("supplied test_targets")
|
||||
for target_name in self._test_target_names:
|
||||
print("\t", target_name)
|
||||
print("found test_targets")
|
||||
for target in test_targets:
|
||||
print("\t", target.name)
|
||||
print("searching for matching test targets")
|
||||
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
|
||||
matching_test_targets_contains_all = test_target_names_contains_all and set(
|
||||
matching_test_targets
|
||||
) & set(self._root_targets)
|
||||
if matching_test_targets_contains_all:
|
||||
# Remove any of the targets for all that were not explicitly supplied,
|
||||
# 'all' is subsequentely added to the matching names below.
|
||||
matching_test_targets = [
|
||||
x for x in (set(matching_test_targets) & set(test_targets_no_all))
|
||||
]
|
||||
print("matched test_targets")
|
||||
for target in matching_test_targets:
|
||||
print("\t", target.name)
|
||||
matching_target_names = [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in matching_test_targets
|
||||
]
|
||||
if matching_test_targets_contains_all:
|
||||
matching_target_names.append("all")
|
||||
print("\tall")
|
||||
return matching_target_names
|
||||
|
||||
def find_matching_compile_target_names(self):
|
||||
"""Returns the set of output compile targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Compile targets are found by searching up from changed targets.
|
||||
# Reset the visited status for _GetBuildTargets.
|
||||
for target in self._name_to_target.values():
|
||||
target.visited = False
|
||||
|
||||
supplied_targets = _LookupTargets(
|
||||
self._supplied_target_names_no_all(), self._unqualified_mapping
|
||||
)
|
||||
if "all" in self._supplied_target_names():
|
||||
supplied_targets = [
|
||||
x for x in (set(supplied_targets) | set(self._root_targets))
|
||||
]
|
||||
print("Supplied test_targets & compile_targets")
|
||||
for target in supplied_targets:
|
||||
print("\t", target.name)
|
||||
print("Finding compile targets")
|
||||
compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets)
|
||||
return [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in compile_targets
|
||||
]
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Called by gyp as the final stage. Outputs results."""
|
||||
config = Config()
|
||||
try:
|
||||
config.Init(params)
|
||||
|
||||
if not config.files:
|
||||
raise Exception(
|
||||
"Must specify files to analyze via config_path generator " "flag"
|
||||
)
|
||||
|
||||
toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir))
|
||||
if debug:
|
||||
print("toplevel_dir", toplevel_dir)
|
||||
|
||||
if _WasGypIncludeFileModified(params, config.files):
|
||||
result_dict = {
|
||||
"status": all_changed_string,
|
||||
"test_targets": list(config.test_target_names),
|
||||
"compile_targets": list(
|
||||
config.additional_compile_target_names | config.test_target_names
|
||||
),
|
||||
}
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
calculator = TargetCalculator(
|
||||
config.files,
|
||||
config.additional_compile_target_names,
|
||||
config.test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
params["build_files"],
|
||||
)
|
||||
if not calculator.is_build_impacted():
|
||||
result_dict = {
|
||||
"status": no_dependency_string,
|
||||
"test_targets": [],
|
||||
"compile_targets": [],
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
test_target_names = calculator.find_matching_test_target_names()
|
||||
compile_target_names = calculator.find_matching_compile_target_names()
|
||||
found_at_least_one_target = compile_target_names or test_target_names
|
||||
result_dict = {
|
||||
"test_targets": test_target_names,
|
||||
"status": found_dependency_string
|
||||
if found_at_least_one_target
|
||||
else no_dependency_string,
|
||||
"compile_targets": list(set(compile_target_names) | set(test_target_names)),
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
|
||||
except Exception as e:
|
||||
_WriteOutput(params, error=str(e))
|
||||
1173
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Normal file
1173
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1321
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Normal file
1321
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
120
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Normal file
120
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import gyp.common
|
||||
import gyp.xcode_emulation
|
||||
import json
|
||||
import os
|
||||
|
||||
generator_additional_non_configuration_keys = []
|
||||
generator_additional_path_sections = []
|
||||
generator_extra_sources_for_rules = []
|
||||
generator_filelist_paths = None
|
||||
generator_supports_multiple_toolsets = True
|
||||
generator_wants_sorted_dependencies = False
|
||||
|
||||
# Lifted from make.py. The actual values don't matter much.
|
||||
generator_default_variables = {
|
||||
"CONFIGURATION_NAME": "$(BUILDTYPE)",
|
||||
"EXECUTABLE_PREFIX": "",
|
||||
"EXECUTABLE_SUFFIX": "",
|
||||
"INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni",
|
||||
"PRODUCT_DIR": "$(builddir)",
|
||||
"RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s",
|
||||
"RULE_INPUT_EXT": "$(suffix $<)",
|
||||
"RULE_INPUT_NAME": "$(notdir $<)",
|
||||
"RULE_INPUT_PATH": "$(abspath $<)",
|
||||
"RULE_INPUT_ROOT": "%(INPUT_ROOT)s",
|
||||
"SHARED_INTERMEDIATE_DIR": "$(obj)/gen",
|
||||
"SHARED_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_SUFFIX": ".a",
|
||||
}
|
||||
|
||||
|
||||
def IsMac(params):
|
||||
return "mac" == gyp.common.GetFlavor(params)
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
|
||||
def AddCommandsForTarget(cwd, target, params, per_config_commands):
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, configuration in target["configurations"].items():
|
||||
if IsMac(params):
|
||||
xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
|
||||
cflags = xcode_settings.GetCflags(configuration_name)
|
||||
cflags_c = xcode_settings.GetCflagsC(configuration_name)
|
||||
cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
|
||||
else:
|
||||
cflags = configuration.get("cflags", [])
|
||||
cflags_c = configuration.get("cflags_c", [])
|
||||
cflags_cc = configuration.get("cflags_cc", [])
|
||||
|
||||
cflags_c = cflags + cflags_c
|
||||
cflags_cc = cflags + cflags_cc
|
||||
|
||||
defines = configuration.get("defines", [])
|
||||
defines = ["-D" + s for s in defines]
|
||||
|
||||
# TODO(bnoordhuis) Handle generated source files.
|
||||
extensions = (".c", ".cc", ".cpp", ".cxx")
|
||||
sources = [s for s in target.get("sources", []) if s.endswith(extensions)]
|
||||
|
||||
def resolve(filename):
|
||||
return os.path.abspath(os.path.join(cwd, filename))
|
||||
|
||||
# TODO(bnoordhuis) Handle generated header files.
|
||||
include_dirs = configuration.get("include_dirs", [])
|
||||
include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")]
|
||||
includes = ["-I" + resolve(s) for s in include_dirs]
|
||||
|
||||
defines = gyp.common.EncodePOSIXShellList(defines)
|
||||
includes = gyp.common.EncodePOSIXShellList(includes)
|
||||
cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
|
||||
cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
|
||||
|
||||
commands = per_config_commands.setdefault(configuration_name, [])
|
||||
for source in sources:
|
||||
file = resolve(source)
|
||||
isc = source.endswith(".c")
|
||||
cc = "cc" if isc else "c++"
|
||||
cflags = cflags_c if isc else cflags_cc
|
||||
command = " ".join(
|
||||
(
|
||||
cc,
|
||||
defines,
|
||||
includes,
|
||||
cflags,
|
||||
"-c",
|
||||
gyp.common.EncodePOSIXShellArgument(file),
|
||||
)
|
||||
)
|
||||
commands.append(dict(command=command, directory=output_dir, file=file))
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
per_config_commands = {}
|
||||
for qualified_target, target in target_dicts.items():
|
||||
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
|
||||
qualified_target
|
||||
)
|
||||
if IsMac(params):
|
||||
settings = data[build_file]
|
||||
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
|
||||
cwd = os.path.dirname(build_file)
|
||||
AddCommandsForTarget(cwd, target, params, per_config_commands)
|
||||
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, commands in per_config_commands.items():
|
||||
filename = os.path.join(output_dir, configuration_name, "compile_commands.json")
|
||||
gyp.common.EnsureDirExists(filename)
|
||||
fp = open(filename, "w")
|
||||
json.dump(commands, fp=fp, indent=0, check_circular=False)
|
||||
|
||||
|
||||
def PerformBuild(data, configurations, params):
|
||||
pass
|
||||
103
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Normal file
103
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import os
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import json
|
||||
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_filelist_paths = {}
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
# Some gyp steps fail if these are empty(!).
|
||||
generator_default_variables[dirname] = "dir"
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
toplevel = params["options"].toplevel_dir
|
||||
generator_dir = os.path.relpath(params["options"].generator_output or ".")
|
||||
# output_dir: relative path from generator_dir to the build directory.
|
||||
output_dir = generator_flags.get("output_dir", "out")
|
||||
qualified_out_dir = os.path.normpath(
|
||||
os.path.join(toplevel, generator_dir, output_dir, "gypfiles")
|
||||
)
|
||||
global generator_filelist_paths
|
||||
generator_filelist_paths = {
|
||||
"toplevel": toplevel,
|
||||
"qualified_out_dir": qualified_out_dir,
|
||||
}
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# Map of target -> list of targets it depends on.
|
||||
edges = {}
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target = targets_to_visit.pop()
|
||||
if target in edges:
|
||||
continue
|
||||
edges[target] = []
|
||||
|
||||
for dep in target_dicts[target].get("dependencies", []):
|
||||
edges[target].append(dep)
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
try:
|
||||
filepath = params["generator_flags"]["output_dir"]
|
||||
except KeyError:
|
||||
filepath = "."
|
||||
filename = os.path.join(filepath, "dump.json")
|
||||
f = open(filename, "w")
|
||||
json.dump(edges, f)
|
||||
f.close()
|
||||
print("Wrote json to %s." % filename)
|
||||
464
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Normal file
464
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Normal file
@@ -0,0 +1,464 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""GYP backend that generates Eclipse CDT settings files.
|
||||
|
||||
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
|
||||
files that can be imported into an Eclipse CDT project. The XML file contains a
|
||||
list of include paths and symbols (i.e. defines).
|
||||
|
||||
Because a full .cproject definition is not created by this generator, it's not
|
||||
possible to properly define the include dirs and symbols for each file
|
||||
individually. Instead, one set of includes/symbols is generated for the entire
|
||||
project. This works fairly well (and is a vast improvement in general), but may
|
||||
still result in a few indexer issues here and there.
|
||||
|
||||
This generator has no automated tests, so expect it to be broken.
|
||||
"""
|
||||
|
||||
from xml.sax.saxutils import escape
|
||||
import os.path
|
||||
import subprocess
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import shlex
|
||||
import xml.etree.cElementTree as ET
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]:
|
||||
# Some gyp steps fail if these are empty(!), so we convert them to variables
|
||||
generator_default_variables[dirname] = "$" + dirname
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
|
||||
# part of the path when dealing with generated headers. This value will be
|
||||
# replaced dynamically for each configuration.
|
||||
generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR"
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
default_variables.setdefault("OS", flavor)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
|
||||
def GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
):
|
||||
"""Calculate the set of include directories to be used.
|
||||
|
||||
Returns:
|
||||
A list including all the include_dir's specified for every target followed
|
||||
by any include directories that were added as cflag compiler options.
|
||||
"""
|
||||
|
||||
gyp_includes_set = set()
|
||||
compiler_includes_list = []
|
||||
|
||||
# Find compiler's default include dirs.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-xc++", "-v", "-"])
|
||||
proc = subprocess.Popen(
|
||||
args=command,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
output = proc.communicate()[1].decode("utf-8")
|
||||
# Extract the list of include dirs from the output, which has this format:
|
||||
# ...
|
||||
# #include "..." search starts here:
|
||||
# #include <...> search starts here:
|
||||
# /usr/include/c++/4.6
|
||||
# /usr/local/include
|
||||
# End of search list.
|
||||
# ...
|
||||
in_include_list = False
|
||||
for line in output.splitlines():
|
||||
if line.startswith("#include"):
|
||||
in_include_list = True
|
||||
continue
|
||||
if line.startswith("End of search list."):
|
||||
break
|
||||
if in_include_list:
|
||||
include_dir = line.strip()
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
|
||||
# Look for any include dirs that were explicitly added via cflags. This
|
||||
# may be done in gyp files to force certain includes to come at the end.
|
||||
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
|
||||
# remove this.
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
cflags = msvs_settings.GetCflags(config_name)
|
||||
else:
|
||||
cflags = config["cflags"]
|
||||
for cflag in cflags:
|
||||
if cflag.startswith("-I"):
|
||||
include_dir = cflag[2:]
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
# Find standard gyp include dirs.
|
||||
if "include_dirs" in config:
|
||||
include_dirs = config["include_dirs"]
|
||||
for shared_intermediate_dir in shared_intermediate_dirs:
|
||||
for include_dir in include_dirs:
|
||||
include_dir = include_dir.replace(
|
||||
"$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir
|
||||
)
|
||||
if not os.path.isabs(include_dir):
|
||||
base_dir = os.path.dirname(target_name)
|
||||
|
||||
include_dir = base_dir + "/" + include_dir
|
||||
include_dir = os.path.abspath(include_dir)
|
||||
|
||||
gyp_includes_set.add(include_dir)
|
||||
|
||||
# Generate a list that has all the include dirs.
|
||||
all_includes_list = list(gyp_includes_set)
|
||||
all_includes_list.sort()
|
||||
for compiler_include in compiler_includes_list:
|
||||
if compiler_include not in gyp_includes_set:
|
||||
all_includes_list.append(compiler_include)
|
||||
|
||||
# All done.
|
||||
return all_includes_list
|
||||
|
||||
|
||||
def GetCompilerPath(target_list, data, options):
|
||||
"""Determine a command that can be used to invoke the compiler.
|
||||
|
||||
Returns:
|
||||
If this is a gyp project that has explicit make settings, try to determine
|
||||
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||
CC_target environment variable.
|
||||
"""
|
||||
# First, see if the compiler is configured in make's settings.
|
||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||
make_global_settings_dict = data[build_file].get("make_global_settings", {})
|
||||
for key, value in make_global_settings_dict:
|
||||
if key in ["CC", "CXX"]:
|
||||
return os.path.join(options.toplevel_dir, value)
|
||||
|
||||
# Check to see if the compiler was specified as an environment variable.
|
||||
for key in ["CC_target", "CC", "CXX"]:
|
||||
compiler = os.environ.get(key)
|
||||
if compiler:
|
||||
return compiler
|
||||
|
||||
return "gcc"
|
||||
|
||||
|
||||
def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
|
||||
"""Calculate the defines for a project.
|
||||
|
||||
Returns:
|
||||
A dict that includes explicit defines declared in gyp files along with all
|
||||
of the default defines that the compiler uses.
|
||||
"""
|
||||
|
||||
# Get defines declared in the gyp files.
|
||||
all_defines = {}
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
extra_defines = msvs_settings.GetComputedDefines(config_name)
|
||||
else:
|
||||
extra_defines = []
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
target_defines = config["defines"]
|
||||
else:
|
||||
target_defines = []
|
||||
for define in target_defines + extra_defines:
|
||||
split_define = define.split("=", 1)
|
||||
if len(split_define) == 1:
|
||||
split_define.append("1")
|
||||
if split_define[0].strip() in all_defines:
|
||||
# Already defined
|
||||
continue
|
||||
all_defines[split_define[0].strip()] = split_define[1].strip()
|
||||
# Get default compiler defines (if possible).
|
||||
if flavor == "win":
|
||||
return all_defines # Default defines already processed in the loop above.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-dM", "-"])
|
||||
cpp_proc = subprocess.Popen(
|
||||
args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
|
||||
)
|
||||
cpp_output = cpp_proc.communicate()[0].decode("utf-8")
|
||||
cpp_lines = cpp_output.split("\n")
|
||||
for cpp_line in cpp_lines:
|
||||
if not cpp_line.strip():
|
||||
continue
|
||||
cpp_line_parts = cpp_line.split(" ", 2)
|
||||
key = cpp_line_parts[1]
|
||||
if len(cpp_line_parts) >= 3:
|
||||
val = cpp_line_parts[2]
|
||||
else:
|
||||
val = "1"
|
||||
all_defines[key] = val
|
||||
|
||||
return all_defines
|
||||
|
||||
|
||||
def WriteIncludePaths(out, eclipse_langs, include_dirs):
|
||||
"""Write the includes section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.IncludePaths">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for include_dir in include_dirs:
|
||||
out.write(
|
||||
' <includepath workspace_path="false">%s</includepath>\n'
|
||||
% include_dir
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def WriteMacros(out, eclipse_langs, defines):
|
||||
"""Write the macros section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.Macros">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for key in sorted(defines):
|
||||
out.write(
|
||||
" <macro><name>%s</name><value>%s</value></macro>\n"
|
||||
% (escape(key), escape(defines[key]))
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
|
||||
options = params["options"]
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
|
||||
# build_dir: relative path from source root to our output files.
|
||||
# e.g. "out/Debug"
|
||||
build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name)
|
||||
|
||||
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
||||
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
|
||||
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
|
||||
shared_intermediate_dirs = [
|
||||
os.path.join(toplevel_build, "obj", "gen"),
|
||||
os.path.join(toplevel_build, "gen"),
|
||||
]
|
||||
|
||||
GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
os.path.join(toplevel_build, "eclipse-cdt-settings.xml"),
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
)
|
||||
GenerateClasspathFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
options.toplevel_dir,
|
||||
toplevel_build,
|
||||
os.path.join(toplevel_build, "eclipse-classpath.xml"),
|
||||
)
|
||||
|
||||
|
||||
def GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
out_name,
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
):
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
with open(out_name, "w") as out:
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
out.write("<cdtprojectproperties>\n")
|
||||
|
||||
eclipse_langs = [
|
||||
"C++ Source File",
|
||||
"C Source File",
|
||||
"Assembly Source File",
|
||||
"GNU C++",
|
||||
"GNU C",
|
||||
"Assembly",
|
||||
]
|
||||
compiler_path = GetCompilerPath(target_list, data, options)
|
||||
include_dirs = GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
)
|
||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||
defines = GetAllDefines(
|
||||
target_list, target_dicts, data, config_name, params, compiler_path
|
||||
)
|
||||
WriteMacros(out, eclipse_langs, defines)
|
||||
|
||||
out.write("</cdtprojectproperties>\n")
|
||||
|
||||
|
||||
def GenerateClasspathFile(
|
||||
target_list, target_dicts, toplevel_dir, toplevel_build, out_name
|
||||
):
|
||||
"""Generates a classpath file suitable for symbol navigation and code
|
||||
completion of Java code (such as in Android projects) by finding all
|
||||
.java and .jar files used as action inputs."""
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
result = ET.Element("classpath")
|
||||
|
||||
def AddElements(kind, paths):
|
||||
# First, we need to normalize the paths so they are all relative to the
|
||||
# toplevel dir.
|
||||
rel_paths = set()
|
||||
for path in paths:
|
||||
if os.path.isabs(path):
|
||||
rel_paths.add(os.path.relpath(path, toplevel_dir))
|
||||
else:
|
||||
rel_paths.add(path)
|
||||
|
||||
for path in sorted(rel_paths):
|
||||
entry_element = ET.SubElement(result, "classpathentry")
|
||||
entry_element.set("kind", kind)
|
||||
entry_element.set("path", path)
|
||||
|
||||
AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir))
|
||||
AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
|
||||
# Include the standard JRE container and a dummy out folder
|
||||
AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"])
|
||||
# Include a dummy out folder so that Eclipse doesn't use the default /bin
|
||||
# folder in the root of the project.
|
||||
AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")])
|
||||
|
||||
ET.ElementTree(result).write(out_name)
|
||||
|
||||
|
||||
def GetJavaJars(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all .jars used as inputs."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"):
|
||||
if os.path.isabs(input_):
|
||||
yield input_
|
||||
else:
|
||||
yield os.path.join(os.path.dirname(target_name), input_)
|
||||
|
||||
|
||||
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all likely java package root directories."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".java" and not input_.startswith(
|
||||
"$"
|
||||
):
|
||||
dir_ = os.path.dirname(
|
||||
os.path.join(os.path.dirname(target_name), input_)
|
||||
)
|
||||
# If there is a parent 'src' or 'java' folder, navigate up to it -
|
||||
# these are canonical package root names in Chromium. This will
|
||||
# break if 'src' or 'java' exists in the package structure. This
|
||||
# could be further improved by inspecting the java file for the
|
||||
# package name if this proves to be too fragile in practice.
|
||||
parent_search = dir_
|
||||
while os.path.basename(parent_search) not in ["src", "java"]:
|
||||
parent_search, _ = os.path.split(parent_search)
|
||||
if not parent_search or parent_search == toplevel_dir:
|
||||
# Didn't find a known root, just return the original path
|
||||
yield dir_
|
||||
break
|
||||
else:
|
||||
yield parent_search
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||
|
||||
if params["options"].generator_output:
|
||||
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||
|
||||
user_config = params.get("generator_flags", {}).get("config", None)
|
||||
if user_config:
|
||||
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
||||
else:
|
||||
config_names = target_dicts[target_list[0]]["configurations"]
|
||||
for config_name in config_names:
|
||||
GenerateOutputForConfig(
|
||||
target_list, target_dicts, data, params, config_name
|
||||
)
|
||||
89
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Normal file
89
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypd output module
|
||||
|
||||
This module produces gyp input as its output. Output files are given the
|
||||
.gypd extension to avoid overwriting the .gyp files that they are generated
|
||||
from. Internal references to .gyp files (such as those found in
|
||||
"dependencies" sections) are not adjusted to point to .gypd files instead;
|
||||
unlike other paths, which are relative to the .gyp or .gypd file, such paths
|
||||
are relative to the directory from which gyp was run to create the .gypd file.
|
||||
|
||||
This generator module is intended to be a sample and a debugging aid, hence
|
||||
the "d" for "debug" in .gypd. It is useful to inspect the results of the
|
||||
various merges, expansions, and conditional evaluations performed by gyp
|
||||
and to see a representation of what would be fed to a generator module.
|
||||
|
||||
It's not advisable to rename .gypd files produced by this module to .gyp,
|
||||
because they will have all merges, expansions, and evaluations already
|
||||
performed and the relevant constructs not present in the output; paths to
|
||||
dependencies may be wrong; and various sections that do not belong in .gyp
|
||||
files such as such as "included_files" and "*_excluded" will be present.
|
||||
Output will also be stripped of comments. This is not intended to be a
|
||||
general-purpose gyp pretty-printer; for that, you probably just want to
|
||||
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
|
||||
comments but won't do all of the other things done to this module's output.
|
||||
|
||||
The specific formatting of the output generated by this module is subject
|
||||
to change.
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import pprint
|
||||
|
||||
|
||||
# These variables should just be spit back out as variable references.
|
||||
_generator_identity_variables = [
|
||||
"CONFIGURATION_NAME",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"LIB_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
]
|
||||
|
||||
# gypd doesn't define a default value for OS like many other generator
|
||||
# modules. Specify "-D OS=whatever" on the command line to provide a value.
|
||||
generator_default_variables = {}
|
||||
|
||||
# gypd supports multiple toolsets
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
# TODO(mark): This always uses <, which isn't right. The input module should
|
||||
# notify the generator to tell it which phase it is operating in, and this
|
||||
# module should use < for the early phase and then switch to > for the late
|
||||
# phase. Bonus points for carrying @ back into the output too.
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
output_files = {}
|
||||
for qualified_target in target_list:
|
||||
[input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||
|
||||
if input_file[-4:] != ".gyp":
|
||||
continue
|
||||
input_file_stem = input_file[:-4]
|
||||
output_file = input_file_stem + params["options"].suffix + ".gypd"
|
||||
|
||||
output_files[output_file] = output_files.get(output_file, input_file)
|
||||
|
||||
for output_file, input_file in output_files.items():
|
||||
output = open(output_file, "w")
|
||||
pprint.pprint(data[input_file], output)
|
||||
output.close()
|
||||
58
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Normal file
58
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypsh output module
|
||||
|
||||
gypsh is a GYP shell. It's not really a generator per se. All it does is
|
||||
fire up an interactive Python session with a few local variables set to the
|
||||
variables passed to the generator. Like gypd, it's intended as a debugging
|
||||
aid, to facilitate the exploration of .gyp structures after being processed
|
||||
by the input module.
|
||||
|
||||
The expected usage is "gyp -f gypsh -D OS=desired_os".
|
||||
"""
|
||||
|
||||
|
||||
import code
|
||||
import sys
|
||||
|
||||
|
||||
# All of this stuff about generator variables was lovingly ripped from gypd.py.
|
||||
# That module has a much better description of what's going on and why.
|
||||
_generator_identity_variables = [
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
]
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
locals = {
|
||||
"target_list": target_list,
|
||||
"target_dicts": target_dicts,
|
||||
"data": data,
|
||||
}
|
||||
|
||||
# Use a banner that looks like the stock Python one and like what
|
||||
# code.interact uses by default, but tack on something to indicate what
|
||||
# locals are available, and identify gypsh.
|
||||
banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format(
|
||||
sys.version,
|
||||
sys.platform,
|
||||
repr(sorted(locals.keys())),
|
||||
)
|
||||
|
||||
code.interact(banner, local=locals)
|
||||
2717
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Normal file
2717
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3981
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Normal file
3981
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
44
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Executable file
44
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the msvs.py file. """
|
||||
|
||||
import gyp.generator.msvs as msvs
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_GetLibraries(self):
|
||||
self.assertEqual(msvs._GetLibraries({}), [])
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": []}), [])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"]
|
||||
)
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries(
|
||||
{
|
||||
"libraries": [
|
||||
"a.lib",
|
||||
"b.lib",
|
||||
"c.lib",
|
||||
"-lb.lib",
|
||||
"-lb.lib",
|
||||
"d.lib",
|
||||
"a.lib",
|
||||
]
|
||||
}
|
||||
),
|
||||
["c.lib", "b.lib", "d.lib", "a.lib"],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
2936
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Normal file
2936
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Normal file
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the ninja.py file. """
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import gyp.generator.ninja as ninja
|
||||
|
||||
|
||||
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||
def test_BinaryNamesWindows(self):
|
||||
# These cannot run on non-Windows as they require a VS installation to
|
||||
# correctly handle variable expansion.
|
||||
if sys.platform.startswith("win"):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
|
||||
)
|
||||
|
||||
def test_BinaryNamesLinux(self):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
1394
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Normal file
1394
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
25
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Normal file
25
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the xcode.py file. """
|
||||
|
||||
import gyp.generator.xcode as xcode
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestEscapeXcodeDefine(unittest.TestCase):
|
||||
if sys.platform == "darwin":
|
||||
|
||||
def test_InheritedRemainsUnescaped(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)")
|
||||
|
||||
def test_Escaping(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
3130
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Normal file
3130
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
98
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Executable file
98
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Executable file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the input.py file."""
|
||||
|
||||
import gyp.input
|
||||
import unittest
|
||||
|
||||
|
||||
class TestFindCycles(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.nodes = {}
|
||||
for x in ("a", "b", "c", "d", "e"):
|
||||
self.nodes[x] = gyp.input.DependencyGraphNode(x)
|
||||
|
||||
def _create_dependency(self, dependent, dependency):
|
||||
dependent.dependencies.append(dependency)
|
||||
dependency.dependents.append(dependent)
|
||||
|
||||
def test_no_cycle_empty_graph(self):
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_line(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_dag(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["a"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_cycle_self_reference(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles()
|
||||
)
|
||||
|
||||
def test_cycle_two_nodes(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["b"], self.nodes["a"]]],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
self.assertEqual(
|
||||
[[self.nodes["b"], self.nodes["a"], self.nodes["b"]]],
|
||||
self.nodes["b"].FindCycles(),
|
||||
)
|
||||
|
||||
def test_two_cycles(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["b"])
|
||||
|
||||
cycles = self.nodes["a"].FindCycles()
|
||||
self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles)
|
||||
self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles)
|
||||
self.assertEqual(2, len(cycles))
|
||||
|
||||
def test_big_cycle(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
self._create_dependency(self.nodes["d"], self.nodes["e"])
|
||||
self._create_dependency(self.nodes["e"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
self.nodes["a"],
|
||||
self.nodes["b"],
|
||||
self.nodes["c"],
|
||||
self.nodes["d"],
|
||||
self.nodes["e"],
|
||||
self.nodes["a"],
|
||||
]
|
||||
],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
771
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Executable file
771
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Executable file
@@ -0,0 +1,771 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions to perform Xcode-style build steps.
|
||||
|
||||
These functions are executed via gyp-mac-tool when using the Makefile generator.
|
||||
"""
|
||||
|
||||
|
||||
import fcntl
|
||||
import fnmatch
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import plistlib
|
||||
import re
|
||||
import shutil
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = MacTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class MacTool:
|
||||
"""This class performs all the Mac tooling steps. The methods can either be
|
||||
executed directly, or dispatched from an argument list."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
|
||||
"""Copies a resource file to the bundle/Resources directory, performing any
|
||||
necessary compilation on each resource."""
|
||||
convert_to_binary = convert_to_binary == "True"
|
||||
extension = os.path.splitext(source)[1].lower()
|
||||
if os.path.isdir(source):
|
||||
# Copy tree.
|
||||
# TODO(thakis): This copies file attributes like mtime, while the
|
||||
# single-file branch below doesn't. This should probably be changed to
|
||||
# be consistent with the single-file branch.
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
shutil.copytree(source, dest)
|
||||
elif extension == ".xib":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".storyboard":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".strings" and not convert_to_binary:
|
||||
self._CopyStringsFile(source, dest)
|
||||
else:
|
||||
if os.path.exists(dest):
|
||||
os.unlink(dest)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
if convert_to_binary and extension in (".plist", ".strings"):
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _CopyXIBFile(self, source, dest):
|
||||
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
|
||||
|
||||
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
|
||||
base = os.path.dirname(os.path.realpath(__file__))
|
||||
if os.path.relpath(source):
|
||||
source = os.path.join(base, source)
|
||||
if os.path.relpath(dest):
|
||||
dest = os.path.join(base, dest)
|
||||
|
||||
args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"]
|
||||
|
||||
if os.environ["XCODE_VERSION_ACTUAL"] > "0700":
|
||||
args.extend(["--auto-activate-custom-fonts"])
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
else:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
|
||||
args.extend(
|
||||
["--output-format", "human-readable-text", "--compile", dest, source]
|
||||
)
|
||||
|
||||
ibtool_section_re = re.compile(r"/\*.*\*/")
|
||||
ibtool_re = re.compile(r".*note:.*is clipping its content")
|
||||
try:
|
||||
stdout = subprocess.check_output(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output)
|
||||
raise
|
||||
current_section_header = None
|
||||
for line in stdout.splitlines():
|
||||
if ibtool_section_re.match(line):
|
||||
current_section_header = line
|
||||
elif not ibtool_re.match(line):
|
||||
if current_section_header:
|
||||
print(current_section_header)
|
||||
current_section_header = None
|
||||
print(line)
|
||||
return 0
|
||||
|
||||
def _ConvertToBinary(self, dest):
|
||||
subprocess.check_call(
|
||||
["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest]
|
||||
)
|
||||
|
||||
def _CopyStringsFile(self, source, dest):
|
||||
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||
|
||||
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
|
||||
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
|
||||
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||
# semicolon in dictionary.
|
||||
# on invalid files. Do the same kind of validation.
|
||||
import CoreFoundation
|
||||
|
||||
with open(source, "rb") as in_file:
|
||||
s = in_file.read()
|
||||
d = CoreFoundation.CFDataCreate(None, s, len(s))
|
||||
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
|
||||
if error:
|
||||
return
|
||||
|
||||
with open(dest, "wb") as fp:
|
||||
fp.write(s.decode(input_code).encode("UTF-16"))
|
||||
|
||||
def _DetectInputEncoding(self, file_name):
|
||||
"""Reads the first few bytes from file_name and tries to guess the text
|
||||
encoding. Returns None as a guess if it can't detect it."""
|
||||
with open(file_name, "rb") as fp:
|
||||
try:
|
||||
header = fp.read(3)
|
||||
except Exception:
|
||||
return None
|
||||
if header.startswith(b"\xFE\xFF"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xFF\xFE"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xEF\xBB\xBF"):
|
||||
return "UTF-8"
|
||||
else:
|
||||
return None
|
||||
|
||||
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
|
||||
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
||||
# Read the source Info.plist into memory.
|
||||
with open(source) as fd:
|
||||
lines = fd.read()
|
||||
|
||||
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
|
||||
plist = plistlib.readPlistFromString(lines)
|
||||
if keys:
|
||||
plist.update(json.loads(keys[0]))
|
||||
lines = plistlib.writePlistToString(plist)
|
||||
|
||||
# Go through all the environment variables and replace them as variables in
|
||||
# the file.
|
||||
IDENT_RE = re.compile(r"[_/\s]")
|
||||
for key in os.environ:
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
evar = "${%s}" % key
|
||||
evalue = os.environ[key]
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Xcode supports various suffices on environment variables, which are
|
||||
# all undocumented. :rfc1034identifier is used in the standard project
|
||||
# template these days, and :identifier was used earlier. They are used to
|
||||
# convert non-url characters into things that look like valid urls --
|
||||
# except that the replacement character for :identifier, '_' isn't valid
|
||||
# in a URL either -- oops, hence :rfc1034identifier was born.
|
||||
evar = "${%s:identifier}" % key
|
||||
evalue = IDENT_RE.sub("_", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
evar = "${%s:rfc1034identifier}" % key
|
||||
evalue = IDENT_RE.sub("-", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Remove any keys with values that haven't been replaced.
|
||||
lines = lines.splitlines()
|
||||
for i in range(len(lines)):
|
||||
if lines[i].strip().startswith("<string>${"):
|
||||
lines[i] = None
|
||||
lines[i - 1] = None
|
||||
lines = "\n".join(line for line in lines if line is not None)
|
||||
|
||||
# Write out the file with variables replaced.
|
||||
with open(dest, "w") as fd:
|
||||
fd.write(lines)
|
||||
|
||||
# Now write out PkgInfo file now that the Info.plist file has been
|
||||
# "compiled".
|
||||
self._WritePkgInfo(dest)
|
||||
|
||||
if convert_to_binary == "True":
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _WritePkgInfo(self, info_plist):
|
||||
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
||||
plist = plistlib.readPlist(info_plist)
|
||||
if not plist:
|
||||
return
|
||||
|
||||
# Only create PkgInfo for executable types.
|
||||
package_type = plist["CFBundlePackageType"]
|
||||
if package_type != "APPL":
|
||||
return
|
||||
|
||||
# The format of PkgInfo is eight characters, representing the bundle type
|
||||
# and bundle signature, each four characters. If that is missing, four
|
||||
# '?' characters are used instead.
|
||||
signature_code = plist.get("CFBundleSignature", "????")
|
||||
if len(signature_code) != 4: # Wrong length resets everything, too.
|
||||
signature_code = "?" * 4
|
||||
|
||||
dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
|
||||
with open(dest, "w") as fp:
|
||||
fp.write(f"{package_type}{signature_code}")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
def ExecFilterLibtool(self, *cmd_list):
|
||||
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
||||
symbols'."""
|
||||
libtool_re = re.compile(
|
||||
r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$"
|
||||
)
|
||||
libtool_re5 = re.compile(
|
||||
r"^.*libtool: warning for library: "
|
||||
+ r".* the table of contents is empty "
|
||||
+ r"\(no object file members in the library define global symbols\)$"
|
||||
)
|
||||
env = os.environ.copy()
|
||||
# Ref:
|
||||
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
|
||||
# The problem with this flag is that it resets the file mtime on the file to
|
||||
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
|
||||
env["ZERO_AR_DATE"] = "1"
|
||||
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
|
||||
err = libtoolout.communicate()[1].decode("utf-8")
|
||||
for line in err.splitlines():
|
||||
if not libtool_re.match(line) and not libtool_re5.match(line):
|
||||
print(line, file=sys.stderr)
|
||||
# Unconditionally touch the output .a file on the command line if present
|
||||
# and the command succeeded. A bit hacky.
|
||||
if not libtoolout.returncode:
|
||||
for i in range(len(cmd_list) - 1):
|
||||
if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"):
|
||||
os.utime(cmd_list[i + 1], None)
|
||||
break
|
||||
return libtoolout.returncode
|
||||
|
||||
def ExecPackageIosFramework(self, framework):
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
module_path = os.path.join(framework, "Modules")
|
||||
if not os.path.exists(module_path):
|
||||
os.mkdir(module_path)
|
||||
module_template = (
|
||||
"framework module %s {\n"
|
||||
' umbrella header "%s.h"\n'
|
||||
"\n"
|
||||
" export *\n"
|
||||
" module * { export * }\n"
|
||||
"}\n" % (binary, binary)
|
||||
)
|
||||
|
||||
with open(os.path.join(module_path, "module.modulemap"), "w") as module_file:
|
||||
module_file.write(module_template)
|
||||
|
||||
def ExecPackageFramework(self, framework, version):
|
||||
"""Takes a path to Something.framework and the Current version of that and
|
||||
sets up all the symlinks."""
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
|
||||
CURRENT = "Current"
|
||||
RESOURCES = "Resources"
|
||||
VERSIONS = "Versions"
|
||||
|
||||
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
|
||||
# Binary-less frameworks don't seem to contain symlinks (see e.g.
|
||||
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
|
||||
return
|
||||
|
||||
# Move into the framework directory to set the symlinks correctly.
|
||||
pwd = os.getcwd()
|
||||
os.chdir(framework)
|
||||
|
||||
# Set up the Current version.
|
||||
self._Relink(version, os.path.join(VERSIONS, CURRENT))
|
||||
|
||||
# Set up the root symlinks.
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
|
||||
|
||||
# Back to where we were before!
|
||||
os.chdir(pwd)
|
||||
|
||||
def _Relink(self, dest, link):
|
||||
"""Creates a symlink to |dest| named |link|. If |link| already exists,
|
||||
it is overwritten."""
|
||||
if os.path.lexists(link):
|
||||
os.remove(link)
|
||||
os.symlink(dest, link)
|
||||
|
||||
def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
|
||||
framework_name = os.path.basename(framework).split(".")[0]
|
||||
all_headers = [os.path.abspath(header) for header in all_headers]
|
||||
filelist = {}
|
||||
for header in all_headers:
|
||||
filename = os.path.basename(header)
|
||||
filelist[filename] = header
|
||||
filelist[os.path.join(framework_name, filename)] = header
|
||||
WriteHmap(out, filelist)
|
||||
|
||||
def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
|
||||
header_path = os.path.join(framework, "Headers")
|
||||
if not os.path.exists(header_path):
|
||||
os.makedirs(header_path)
|
||||
for header in copy_headers:
|
||||
shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
|
||||
|
||||
def ExecCompileXcassets(self, keys, *inputs):
|
||||
"""Compiles multiple .xcassets files into a single .car file.
|
||||
|
||||
This invokes 'actool' to compile all the inputs .xcassets files. The
|
||||
|keys| arguments is a json-encoded dictionary of extra arguments to
|
||||
pass to 'actool' when the asset catalogs contains an application icon
|
||||
or a launch image.
|
||||
|
||||
Note that 'actool' does not create the Assets.car file if the asset
|
||||
catalogs does not contains imageset.
|
||||
"""
|
||||
command_line = [
|
||||
"xcrun",
|
||||
"actool",
|
||||
"--output-format",
|
||||
"human-readable-text",
|
||||
"--compress-pngs",
|
||||
"--notices",
|
||||
"--warnings",
|
||||
"--errors",
|
||||
]
|
||||
is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ
|
||||
if is_iphone_target:
|
||||
platform = os.environ["CONFIGURATION"].split("-")[-1]
|
||||
if platform not in ("iphoneos", "iphonesimulator"):
|
||||
platform = "iphonesimulator"
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
platform,
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
else:
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
"macosx",
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
if keys:
|
||||
keys = json.loads(keys)
|
||||
for key, value in keys.items():
|
||||
arg_name = "--" + key
|
||||
if isinstance(value, bool):
|
||||
if value:
|
||||
command_line.append(arg_name)
|
||||
elif isinstance(value, list):
|
||||
for v in value:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(v))
|
||||
else:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(value))
|
||||
# Note: actool crashes if inputs path are relative, so use os.path.abspath
|
||||
# to get absolute path name for inputs.
|
||||
command_line.extend(map(os.path.abspath, inputs))
|
||||
subprocess.check_call(command_line)
|
||||
|
||||
def ExecMergeInfoPlist(self, output, *inputs):
|
||||
"""Merge multiple .plist files into a single .plist file."""
|
||||
merged_plist = {}
|
||||
for path in inputs:
|
||||
plist = self._LoadPlistMaybeBinary(path)
|
||||
self._MergePlist(merged_plist, plist)
|
||||
plistlib.writePlist(merged_plist, output)
|
||||
|
||||
def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
|
||||
"""Code sign a bundle.
|
||||
|
||||
This function tries to code sign an iOS bundle, following the same
|
||||
algorithm as Xcode:
|
||||
1. pick the provisioning profile that best match the bundle identifier,
|
||||
and copy it into the bundle as embedded.mobileprovision,
|
||||
2. copy Entitlements.plist from user or SDK next to the bundle,
|
||||
3. code sign the bundle.
|
||||
"""
|
||||
substitutions, overrides = self._InstallProvisioningProfile(
|
||||
provisioning, self._GetCFBundleIdentifier()
|
||||
)
|
||||
entitlements_path = self._InstallEntitlements(
|
||||
entitlements, substitutions, overrides
|
||||
)
|
||||
|
||||
args = ["codesign", "--force", "--sign", key]
|
||||
if preserve == "True":
|
||||
args.extend(["--deep", "--preserve-metadata=identifier,entitlements"])
|
||||
else:
|
||||
args.extend(["--entitlements", entitlements_path])
|
||||
args.extend(["--timestamp=none", path])
|
||||
subprocess.check_call(args)
|
||||
|
||||
def _InstallProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Installs embedded.mobileprovision into the bundle.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple containing two dictionary: variables substitutions and values
|
||||
to overrides when generating the entitlements file.
|
||||
"""
|
||||
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
|
||||
profile, bundle_identifier
|
||||
)
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"],
|
||||
os.environ["CONTENTS_FOLDER_PATH"],
|
||||
"embedded.mobileprovision",
|
||||
)
|
||||
shutil.copy2(source_path, target_path)
|
||||
substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".")
|
||||
return substitutions, provisioning_data["Entitlements"]
|
||||
|
||||
def _FindProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Finds the .mobileprovision file to use for signing the bundle.
|
||||
|
||||
Checks all the installed provisioning profiles (or if the user specified
|
||||
the PROVISIONING_PROFILE variable, only consult it) and select the most
|
||||
specific that correspond to the bundle identifier.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple of the path to the selected provisioning profile, the data of
|
||||
the embedded plist in the provisioning profile and the team identifier
|
||||
to use for code signing.
|
||||
|
||||
Raises:
|
||||
SystemExit: if no .mobileprovision can be used to sign the bundle.
|
||||
"""
|
||||
profiles_dir = os.path.join(
|
||||
os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
|
||||
)
|
||||
if not os.path.isdir(profiles_dir):
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
provisioning_profiles = None
|
||||
if profile:
|
||||
profile_path = os.path.join(profiles_dir, profile + ".mobileprovision")
|
||||
if os.path.exists(profile_path):
|
||||
provisioning_profiles = [profile_path]
|
||||
if not provisioning_profiles:
|
||||
provisioning_profiles = glob.glob(
|
||||
os.path.join(profiles_dir, "*.mobileprovision")
|
||||
)
|
||||
valid_provisioning_profiles = {}
|
||||
for profile_path in provisioning_profiles:
|
||||
profile_data = self._LoadProvisioningProfile(profile_path)
|
||||
app_id_pattern = profile_data.get("Entitlements", {}).get(
|
||||
"application-identifier", ""
|
||||
)
|
||||
for team_identifier in profile_data.get("TeamIdentifier", []):
|
||||
app_id = f"{team_identifier}.{bundle_identifier}"
|
||||
if fnmatch.fnmatch(app_id, app_id_pattern):
|
||||
valid_provisioning_profiles[app_id_pattern] = (
|
||||
profile_path,
|
||||
profile_data,
|
||||
team_identifier,
|
||||
)
|
||||
if not valid_provisioning_profiles:
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
# If the user has multiple provisioning profiles installed that can be
|
||||
# used for ${bundle_identifier}, pick the most specific one (ie. the
|
||||
# provisioning profile whose pattern is the longest).
|
||||
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
|
||||
return valid_provisioning_profiles[selected_key]
|
||||
|
||||
def _LoadProvisioningProfile(self, profile_path):
|
||||
"""Extracts the plist embedded in a provisioning profile.
|
||||
|
||||
Args:
|
||||
profile_path: string, path to the .mobileprovision file
|
||||
|
||||
Returns:
|
||||
Content of the plist embedded in the provisioning profile as a dictionary.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
subprocess.check_call(
|
||||
["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
|
||||
)
|
||||
return self._LoadPlistMaybeBinary(temp.name)
|
||||
|
||||
def _MergePlist(self, merged_plist, plist):
|
||||
"""Merge |plist| into |merged_plist|."""
|
||||
for key, value in plist.items():
|
||||
if isinstance(value, dict):
|
||||
merged_value = merged_plist.get(key, {})
|
||||
if isinstance(merged_value, dict):
|
||||
self._MergePlist(merged_value, value)
|
||||
merged_plist[key] = merged_value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
|
||||
def _LoadPlistMaybeBinary(self, plist_path):
|
||||
"""Loads into a memory a plist possibly encoded in binary format.
|
||||
|
||||
This is a wrapper around plistlib.readPlist that tries to convert the
|
||||
plist to the XML format if it can't be parsed (assuming that it is in
|
||||
the binary format).
|
||||
|
||||
Args:
|
||||
plist_path: string, path to a plist file, in XML or binary format
|
||||
|
||||
Returns:
|
||||
Content of the plist as a dictionary.
|
||||
"""
|
||||
try:
|
||||
# First, try to read the file using plistlib that only supports XML,
|
||||
# and if an exception is raised, convert a temporary copy to XML and
|
||||
# load that copy.
|
||||
return plistlib.readPlist(plist_path)
|
||||
except Exception:
|
||||
pass
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
shutil.copy2(plist_path, temp.name)
|
||||
subprocess.check_call(["plutil", "-convert", "xml1", temp.name])
|
||||
return plistlib.readPlist(temp.name)
|
||||
|
||||
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
|
||||
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
|
||||
|
||||
Args:
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
app_identifier_prefix: string, value for AppIdentifierPrefix
|
||||
|
||||
Returns:
|
||||
Dictionary of substitutions to apply when generating Entitlements.plist.
|
||||
"""
|
||||
return {
|
||||
"CFBundleIdentifier": bundle_identifier,
|
||||
"AppIdentifierPrefix": app_identifier_prefix,
|
||||
}
|
||||
|
||||
def _GetCFBundleIdentifier(self):
|
||||
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
|
||||
|
||||
Returns:
|
||||
Value of CFBundleIdentifier in the Info.plist located in the bundle.
|
||||
"""
|
||||
info_plist_path = os.path.join(
|
||||
os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
|
||||
)
|
||||
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
|
||||
return info_plist_data["CFBundleIdentifier"]
|
||||
|
||||
def _InstallEntitlements(self, entitlements, substitutions, overrides):
|
||||
"""Generates and install the ${BundleName}.xcent entitlements file.
|
||||
|
||||
Expands variables "$(variable)" pattern in the source entitlements file,
|
||||
add extra entitlements defined in the .mobileprovision file and the copy
|
||||
the generated plist to "${BundlePath}.xcent".
|
||||
|
||||
Args:
|
||||
entitlements: string, optional, path to the Entitlements.plist template
|
||||
to use, defaults to "${SDKROOT}/Entitlements.plist"
|
||||
substitutions: dictionary, variable substitutions
|
||||
overrides: dictionary, values to add to the entitlements
|
||||
|
||||
Returns:
|
||||
Path to the generated entitlements file.
|
||||
"""
|
||||
source_path = entitlements
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
|
||||
)
|
||||
if not source_path:
|
||||
source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist")
|
||||
shutil.copy2(source_path, target_path)
|
||||
data = self._LoadPlistMaybeBinary(target_path)
|
||||
data = self._ExpandVariables(data, substitutions)
|
||||
if overrides:
|
||||
for key in overrides:
|
||||
if key not in data:
|
||||
data[key] = overrides[key]
|
||||
plistlib.writePlist(data, target_path)
|
||||
return target_path
|
||||
|
||||
def _ExpandVariables(self, data, substitutions):
|
||||
"""Expands variables "$(variable)" in data.
|
||||
|
||||
Args:
|
||||
data: object, can be either string, list or dictionary
|
||||
substitutions: dictionary, variable substitutions to perform
|
||||
|
||||
Returns:
|
||||
Copy of data where each references to "$(variable)" has been replaced
|
||||
by the corresponding value found in substitutions, or left intact if
|
||||
the key was not found.
|
||||
"""
|
||||
if isinstance(data, str):
|
||||
for key, value in substitutions.items():
|
||||
data = data.replace("$(%s)" % key, value)
|
||||
return data
|
||||
if isinstance(data, list):
|
||||
return [self._ExpandVariables(v, substitutions) for v in data]
|
||||
if isinstance(data, dict):
|
||||
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
|
||||
return data
|
||||
|
||||
|
||||
def NextGreaterPowerOf2(x):
|
||||
return 2 ** (x).bit_length()
|
||||
|
||||
|
||||
def WriteHmap(output_name, filelist):
|
||||
"""Generates a header map based on |filelist|.
|
||||
|
||||
Per Mark Mentovai:
|
||||
A header map is structured essentially as a hash table, keyed by names used
|
||||
in #includes, and providing pathnames to the actual files.
|
||||
|
||||
The implementation below and the comment above comes from inspecting:
|
||||
http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
|
||||
while also looking at the implementation in clang in:
|
||||
https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
|
||||
"""
|
||||
magic = 1751998832
|
||||
version = 1
|
||||
_reserved = 0
|
||||
count = len(filelist)
|
||||
capacity = NextGreaterPowerOf2(count)
|
||||
strings_offset = 24 + (12 * capacity)
|
||||
max_value_length = max(len(value) for value in filelist.values())
|
||||
|
||||
out = open(output_name, "wb")
|
||||
out.write(
|
||||
struct.pack(
|
||||
"<LHHLLLL",
|
||||
magic,
|
||||
version,
|
||||
_reserved,
|
||||
strings_offset,
|
||||
count,
|
||||
capacity,
|
||||
max_value_length,
|
||||
)
|
||||
)
|
||||
|
||||
# Create empty hashmap buckets.
|
||||
buckets = [None] * capacity
|
||||
for file, path in filelist.items():
|
||||
key = 0
|
||||
for c in file:
|
||||
key += ord(c.lower()) * 13
|
||||
|
||||
# Fill next empty bucket.
|
||||
while buckets[key & capacity - 1] is not None:
|
||||
key = key + 1
|
||||
buckets[key & capacity - 1] = (file, path)
|
||||
|
||||
next_offset = 1
|
||||
for bucket in buckets:
|
||||
if bucket is None:
|
||||
out.write(struct.pack("<LLL", 0, 0, 0))
|
||||
else:
|
||||
(file, path) = bucket
|
||||
key_offset = next_offset
|
||||
prefix_offset = key_offset + len(file) + 1
|
||||
suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
|
||||
next_offset = suffix_offset + len(os.path.basename(path)) + 1
|
||||
out.write(struct.pack("<LLL", key_offset, prefix_offset, suffix_offset))
|
||||
|
||||
# Pad byte since next offset starts at 1.
|
||||
out.write(struct.pack("<x"))
|
||||
|
||||
for bucket in buckets:
|
||||
if bucket is not None:
|
||||
(file, path) = bucket
|
||||
out.write(struct.pack("<%ds" % len(file), file))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
base = os.path.dirname(path) + os.sep
|
||||
out.write(struct.pack("<%ds" % len(base), base))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
path = os.path.basename(path)
|
||||
out.write(struct.pack("<%ds" % len(path), path))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
1271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Normal file
1271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
174
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Normal file
174
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
# This file comes from
|
||||
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
|
||||
# Do not edit! Edit the upstream one instead.
|
||||
|
||||
"""Python module for generating .ninja files.
|
||||
|
||||
Note that this is emphatically not a required piece of Ninja; it's
|
||||
just a helpful utility for build-file-generation systems that already
|
||||
use Python.
|
||||
"""
|
||||
|
||||
import textwrap
|
||||
|
||||
|
||||
def escape_path(word):
|
||||
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||
|
||||
|
||||
class Writer:
|
||||
def __init__(self, output, width=78):
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
self.output.write("\n")
|
||||
|
||||
def comment(self, text):
|
||||
for line in textwrap.wrap(text, self.width - 2):
|
||||
self.output.write("# " + line + "\n")
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = " ".join(filter(None, value)) # Filter out empty strings.
|
||||
self._line(f"{key} = {value}", indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
self._line("pool %s" % name)
|
||||
self.variable("depth", depth, indent=1)
|
||||
|
||||
def rule(
|
||||
self,
|
||||
name,
|
||||
command,
|
||||
description=None,
|
||||
depfile=None,
|
||||
generator=False,
|
||||
pool=None,
|
||||
restat=False,
|
||||
rspfile=None,
|
||||
rspfile_content=None,
|
||||
deps=None,
|
||||
):
|
||||
self._line("rule %s" % name)
|
||||
self.variable("command", command, indent=1)
|
||||
if description:
|
||||
self.variable("description", description, indent=1)
|
||||
if depfile:
|
||||
self.variable("depfile", depfile, indent=1)
|
||||
if generator:
|
||||
self.variable("generator", "1", indent=1)
|
||||
if pool:
|
||||
self.variable("pool", pool, indent=1)
|
||||
if restat:
|
||||
self.variable("restat", "1", indent=1)
|
||||
if rspfile:
|
||||
self.variable("rspfile", rspfile, indent=1)
|
||||
if rspfile_content:
|
||||
self.variable("rspfile_content", rspfile_content, indent=1)
|
||||
if deps:
|
||||
self.variable("deps", deps, indent=1)
|
||||
|
||||
def build(
|
||||
self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None
|
||||
):
|
||||
outputs = self._as_list(outputs)
|
||||
all_inputs = self._as_list(inputs)[:]
|
||||
out_outputs = list(map(escape_path, outputs))
|
||||
all_inputs = list(map(escape_path, all_inputs))
|
||||
|
||||
if implicit:
|
||||
implicit = map(escape_path, self._as_list(implicit))
|
||||
all_inputs.append("|")
|
||||
all_inputs.extend(implicit)
|
||||
if order_only:
|
||||
order_only = map(escape_path, self._as_list(order_only))
|
||||
all_inputs.append("||")
|
||||
all_inputs.extend(order_only)
|
||||
|
||||
self._line(
|
||||
"build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
|
||||
)
|
||||
|
||||
if variables:
|
||||
if isinstance(variables, dict):
|
||||
iterator = iter(variables.items())
|
||||
else:
|
||||
iterator = iter(variables)
|
||||
|
||||
for key, val in iterator:
|
||||
self.variable(key, val, indent=1)
|
||||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
self._line("include %s" % path)
|
||||
|
||||
def subninja(self, path):
|
||||
self._line("subninja %s" % path)
|
||||
|
||||
def default(self, paths):
|
||||
self._line("default %s" % " ".join(self._as_list(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
while dollar_index > 0 and s[dollar_index] == "$":
|
||||
dollar_count += 1
|
||||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = " " * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
# The text is too wide; wrap if possible.
|
||||
|
||||
# Find the rightmost space that would obey our width constraint and
|
||||
# that's not an escaped space.
|
||||
available_space = self.width - len(leading_space) - len(" $")
|
||||
space = available_space
|
||||
while True:
|
||||
space = text.rfind(" ", 0, space)
|
||||
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
|
||||
break
|
||||
|
||||
if space < 0:
|
||||
# No such space; just use the first unescaped space we can find.
|
||||
space = available_space - 1
|
||||
while True:
|
||||
space = text.find(" ", space + 1)
|
||||
if (
|
||||
space < 0
|
||||
or self._count_dollars_before_index(text, space) % 2 == 0
|
||||
):
|
||||
break
|
||||
if space < 0:
|
||||
# Give up on breaking.
|
||||
break
|
||||
|
||||
self.output.write(leading_space + text[0:space] + " $\n")
|
||||
text = text[space + 1 :]
|
||||
|
||||
# Subsequent lines are continuations, so indent them.
|
||||
leading_space = " " * (indent + 2)
|
||||
|
||||
self.output.write(leading_space + text + "\n")
|
||||
|
||||
def _as_list(self, input):
|
||||
if input is None:
|
||||
return []
|
||||
if isinstance(input, list):
|
||||
return input
|
||||
return [input]
|
||||
|
||||
|
||||
def escape(string):
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||
# We only have one special metacharacter: '$'.
|
||||
return string.replace("$", "$$")
|
||||
61
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Normal file
61
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""A clone of the default copy.deepcopy that doesn't handle cyclic
|
||||
structures or complex types except for dicts and lists. This is
|
||||
because gyp copies so large structure that small copy overhead ends up
|
||||
taking seconds in a project the size of Chromium."""
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = ["Error", "deepcopy"]
|
||||
|
||||
|
||||
def deepcopy(x):
|
||||
"""Deep copy operation on gyp objects such as strings, ints, dicts
|
||||
and lists. More than twice as fast as copy.deepcopy but much less
|
||||
generic."""
|
||||
|
||||
try:
|
||||
return _deepcopy_dispatch[type(x)](x)
|
||||
except KeyError:
|
||||
raise Error(
|
||||
"Unsupported type %s for deepcopy. Use copy.deepcopy "
|
||||
+ "or expand simple_copy support." % type(x)
|
||||
)
|
||||
|
||||
|
||||
_deepcopy_dispatch = d = {}
|
||||
|
||||
|
||||
def _deepcopy_atomic(x):
|
||||
return x
|
||||
|
||||
|
||||
types = bool, float, int, str, type, type(None)
|
||||
|
||||
for x in types:
|
||||
d[x] = _deepcopy_atomic
|
||||
|
||||
|
||||
def _deepcopy_list(x):
|
||||
return [deepcopy(a) for a in x]
|
||||
|
||||
|
||||
d[list] = _deepcopy_list
|
||||
|
||||
|
||||
def _deepcopy_dict(x):
|
||||
y = {}
|
||||
for key, value in x.items():
|
||||
y[deepcopy(key)] = deepcopy(value)
|
||||
return y
|
||||
|
||||
|
||||
d[dict] = _deepcopy_dict
|
||||
|
||||
del d
|
||||
374
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Executable file
374
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Executable file
@@ -0,0 +1,374 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions for Windows builds.
|
||||
|
||||
These functions are executed via gyp-win-tool when using the ninja generator.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import stat
|
||||
import string
|
||||
import sys
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# A regex matching an argument corresponding to the output filename passed to
|
||||
# link.exe.
|
||||
_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P<out>.+)$", re.IGNORECASE)
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = WinTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class WinTool:
|
||||
"""This class performs all the Windows tooling steps. The methods can either
|
||||
be executed directly, or dispatched from an argument list."""
|
||||
|
||||
def _UseSeparateMspdbsrv(self, env, args):
|
||||
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
|
||||
shared one."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
if args[0] != "link.exe":
|
||||
return
|
||||
|
||||
# Use the output filename passed to the linker to generate an endpoint name
|
||||
# for mspdbsrv.exe.
|
||||
endpoint_name = None
|
||||
for arg in args:
|
||||
m = _LINK_EXE_OUT_ARG.match(arg)
|
||||
if m:
|
||||
endpoint_name = re.sub(
|
||||
r"\W+", "", "%s_%d" % (m.group("out"), os.getpid())
|
||||
)
|
||||
break
|
||||
|
||||
if endpoint_name is None:
|
||||
return
|
||||
|
||||
# Adds the appropriate environment variable. This will be read by link.exe
|
||||
# to know which instance of mspdbsrv.exe it should connect to (if it's
|
||||
# not set then the default endpoint is used).
|
||||
env["_MSPDBSRV_ENDPOINT_"] = endpoint_name
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def _GetEnv(self, arch):
|
||||
"""Gets the saved environment from a file for a given architecture."""
|
||||
# The environment is saved as an "environment block" (see CreateProcess
|
||||
# and msvs_emulation for details). We convert to a dict here.
|
||||
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
|
||||
pairs = open(arch).read()[:-2].split("\0")
|
||||
kvs = [item.split("=", 1) for item in pairs]
|
||||
return dict(kvs)
|
||||
|
||||
def ExecStamp(self, path):
|
||||
"""Simple stamp command."""
|
||||
open(path, "w").close()
|
||||
|
||||
def ExecRecursiveMirror(self, source, dest):
|
||||
"""Emulation of rm -rf out && cp -af in out."""
|
||||
if os.path.exists(dest):
|
||||
if os.path.isdir(dest):
|
||||
|
||||
def _on_error(fn, path, excinfo):
|
||||
# The operation failed, possibly because the file is set to
|
||||
# read-only. If that's why, make it writable and try the op again.
|
||||
if not os.access(path, os.W_OK):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
fn(path)
|
||||
|
||||
shutil.rmtree(dest, onerror=_on_error)
|
||||
else:
|
||||
if not os.access(dest, os.W_OK):
|
||||
# Attempt to make the file writable before deleting it.
|
||||
os.chmod(dest, stat.S_IWRITE)
|
||||
os.unlink(dest)
|
||||
|
||||
if os.path.isdir(source):
|
||||
shutil.copytree(source, dest)
|
||||
else:
|
||||
shutil.copy2(source, dest)
|
||||
|
||||
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
|
||||
"""Filter diagnostic output from link that looks like:
|
||||
' Creating library ui.dll.lib and object ui.dll.exp'
|
||||
This happens when there are exports from the dll or exe.
|
||||
"""
|
||||
env = self._GetEnv(arch)
|
||||
if use_separate_mspdbsrv == "True":
|
||||
self._UseSeparateMspdbsrv(env, args)
|
||||
if sys.platform == "win32":
|
||||
args = list(args) # *args is a tuple by default, which is read-only.
|
||||
args[0] = args[0].replace("/", "\\")
|
||||
# https://docs.python.org/2/library/subprocess.html:
|
||||
# "On Unix with shell=True [...] if args is a sequence, the first item
|
||||
# specifies the command string, and any additional items will be treated as
|
||||
# additional arguments to the shell itself. That is to say, Popen does the
|
||||
# equivalent of:
|
||||
# Popen(['/bin/sh', '-c', args[0], args[1], ...])"
|
||||
# For that reason, since going through the shell doesn't seem necessary on
|
||||
# non-Windows don't do that there.
|
||||
link = subprocess.Popen(
|
||||
args,
|
||||
shell=sys.platform == "win32",
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
out = link.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith(" Creating library ")
|
||||
and not line.startswith("Generating code")
|
||||
and not line.startswith("Finished generating code")
|
||||
):
|
||||
print(line)
|
||||
return link.returncode
|
||||
|
||||
def ExecLinkWithManifests(
|
||||
self,
|
||||
arch,
|
||||
embed_manifest,
|
||||
out,
|
||||
ldcmd,
|
||||
resname,
|
||||
mt,
|
||||
rc,
|
||||
intermediate_manifest,
|
||||
*manifests
|
||||
):
|
||||
"""A wrapper for handling creating a manifest resource and then executing
|
||||
a link command."""
|
||||
# The 'normal' way to do manifests is to have link generate a manifest
|
||||
# based on gathering dependencies from the object files, then merge that
|
||||
# manifest with other manifests supplied as sources, convert the merged
|
||||
# manifest to a resource, and then *relink*, including the compiled
|
||||
# version of the manifest resource. This breaks incremental linking, and
|
||||
# is generally overly complicated. Instead, we merge all the manifests
|
||||
# provided (along with one that includes what would normally be in the
|
||||
# linker-generated one, see msvs_emulation.py), and include that into the
|
||||
# first and only link. We still tell link to generate a manifest, but we
|
||||
# only use that to assert that our simpler process did not miss anything.
|
||||
variables = {
|
||||
"python": sys.executable,
|
||||
"arch": arch,
|
||||
"out": out,
|
||||
"ldcmd": ldcmd,
|
||||
"resname": resname,
|
||||
"mt": mt,
|
||||
"rc": rc,
|
||||
"intermediate_manifest": intermediate_manifest,
|
||||
"manifests": " ".join(manifests),
|
||||
}
|
||||
add_to_ld = ""
|
||||
if manifests:
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(manifests)s -out:%(out)s.manifest" % variables
|
||||
)
|
||||
if embed_manifest == "True":
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest"
|
||||
" %(out)s.manifest.rc %(resname)s" % variables
|
||||
)
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s "
|
||||
"%(out)s.manifest.rc" % variables
|
||||
)
|
||||
add_to_ld = " %(out)s.manifest.res" % variables
|
||||
subprocess.check_call(ldcmd + add_to_ld)
|
||||
|
||||
# Run mt.exe on the theoretically complete manifest we generated, merging
|
||||
# it with the one the linker generated to confirm that the linker
|
||||
# generated one does not add anything. This is strictly unnecessary for
|
||||
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
|
||||
# used in a #pragma comment.
|
||||
if manifests:
|
||||
# Merge the intermediate one with ours to .assert.manifest, then check
|
||||
# that .assert.manifest is identical to ours.
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(out)s.manifest %(intermediate_manifest)s "
|
||||
"-out:%(out)s.assert.manifest" % variables
|
||||
)
|
||||
assert_manifest = "%(out)s.assert.manifest" % variables
|
||||
our_manifest = "%(out)s.manifest" % variables
|
||||
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
|
||||
# and sometimes doesn't unfortunately.
|
||||
with open(our_manifest) as our_f:
|
||||
with open(assert_manifest) as assert_f:
|
||||
translator = str.maketrans('', '', string.whitespace)
|
||||
our_data = our_f.read().translate(translator)
|
||||
assert_data = assert_f.read().translate(translator)
|
||||
if our_data != assert_data:
|
||||
os.unlink(out)
|
||||
|
||||
def dump(filename):
|
||||
print(filename, file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
with open(filename) as f:
|
||||
print(f.read(), file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
|
||||
dump(intermediate_manifest)
|
||||
dump(our_manifest)
|
||||
dump(assert_manifest)
|
||||
sys.stderr.write(
|
||||
'Linker generated manifest "%s" added to final manifest "%s" '
|
||||
'(result in "%s"). '
|
||||
"Were /MANIFEST switches used in #pragma statements? "
|
||||
% (intermediate_manifest, our_manifest, assert_manifest)
|
||||
)
|
||||
return 1
|
||||
|
||||
def ExecManifestWrapper(self, arch, *args):
|
||||
"""Run manifest tool with environment set. Strip out undesirable warning
|
||||
(some XML blocks are recognized by the OS loader, but not the manifest
|
||||
tool)."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if line and "manifest authoring warning 81010002" not in line:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecManifestToRc(self, arch, *args):
|
||||
"""Creates a resource file pointing a SxS assembly manifest.
|
||||
|args| is tuple containing path to resource file, path to manifest file
|
||||
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
|
||||
manifest_path, resource_path, resource_name = args
|
||||
with open(resource_path, "w") as output:
|
||||
output.write(
|
||||
'#include <windows.h>\n%s RT_MANIFEST "%s"'
|
||||
% (resource_name, os.path.abspath(manifest_path).replace("\\", "/"))
|
||||
)
|
||||
|
||||
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
|
||||
"""Filter noisy filenames output from MIDL compile step that isn't
|
||||
quietable via command line flags.
|
||||
"""
|
||||
args = (
|
||||
["midl", "/nologo"]
|
||||
+ list(flags)
|
||||
+ [
|
||||
"/out",
|
||||
outdir,
|
||||
"/tlb",
|
||||
tlb,
|
||||
"/h",
|
||||
h,
|
||||
"/dlldata",
|
||||
dlldata,
|
||||
"/iid",
|
||||
iid,
|
||||
"/proxy",
|
||||
proxy,
|
||||
idl,
|
||||
]
|
||||
)
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
# Filter junk out of stdout, and write filtered versions. Output we want
|
||||
# to filter is pairs of lines that look like this:
|
||||
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
||||
# objidl.idl
|
||||
lines = out.splitlines()
|
||||
prefixes = ("Processing ", "64 bit Processing ")
|
||||
processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)}
|
||||
for line in lines:
|
||||
if not line.startswith(prefixes) and line not in processing:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecAsmWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of asm.exe."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and not line.startswith("Microsoft (R) Macro Assembler")
|
||||
and not line.startswith(" Assembling: ")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecRcWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of rc.exe. Older versions of RC
|
||||
don't support the /nologo flag."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Microsoft (R) Windows (R) Resource Compiler")
|
||||
and not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecActionWrapper(self, arch, rspfile, *dir):
|
||||
"""Runs an action command line from a response file using the environment
|
||||
for |arch|. If |dir| is supplied, use that as the working directory."""
|
||||
env = self._GetEnv(arch)
|
||||
# TODO(scottmg): This is a temporary hack to get some specific variables
|
||||
# through to actions that are set after gyp-time. http://crbug.com/333738.
|
||||
for k, v in os.environ.items():
|
||||
if k not in env:
|
||||
env[k] = v
|
||||
args = open(rspfile).read()
|
||||
dir = dir[0] if dir else None
|
||||
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
||||
|
||||
def ExecClCompile(self, project_dir, selected_files):
|
||||
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
|
||||
build selected C/C++ files."""
|
||||
project_dir = os.path.relpath(project_dir, BASE_DIR)
|
||||
selected_files = selected_files.split(";")
|
||||
ninja_targets = [
|
||||
os.path.join(project_dir, filename) + "^^" for filename in selected_files
|
||||
]
|
||||
cmd = ["ninja.exe"]
|
||||
cmd.extend(ninja_targets)
|
||||
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
1939
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Normal file
1939
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
302
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Normal file
302
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Normal file
@@ -0,0 +1,302 @@
|
||||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Xcode-ninja wrapper project file generator.
|
||||
|
||||
This updates the data structures passed to the Xcode gyp generator to build
|
||||
with ninja instead. The Xcode project itself is transformed into a list of
|
||||
executable targets, each with a build step to build with ninja, and a target
|
||||
with every source and resource file. This appears to sidestep some of the
|
||||
major performance headaches experienced using complex projects and large number
|
||||
of targets within Xcode.
|
||||
"""
|
||||
|
||||
import errno
|
||||
import gyp.generator.ninja
|
||||
import os
|
||||
import re
|
||||
import xml.sax.saxutils
|
||||
|
||||
|
||||
def _WriteWorkspace(main_gyp, sources_gyp, params):
|
||||
""" Create a workspace to wrap main and sources gyp paths. """
|
||||
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
|
||||
workspace_path = build_file_root + ".xcworkspace"
|
||||
options = params["options"]
|
||||
if options.generator_output:
|
||||
workspace_path = os.path.join(options.generator_output, workspace_path)
|
||||
try:
|
||||
os.makedirs(workspace_path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
output_string = (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n' + '<Workspace version = "1.0">\n'
|
||||
)
|
||||
for gyp_name in [main_gyp, sources_gyp]:
|
||||
name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj"
|
||||
name = xml.sax.saxutils.quoteattr("group:" + name)
|
||||
output_string += " <FileRef location = %s></FileRef>\n" % name
|
||||
output_string += "</Workspace>\n"
|
||||
|
||||
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
|
||||
|
||||
try:
|
||||
with open(workspace_file) as input_file:
|
||||
input_string = input_file.read()
|
||||
if input_string == output_string:
|
||||
return
|
||||
except OSError:
|
||||
# Ignore errors if the file doesn't exist.
|
||||
pass
|
||||
|
||||
with open(workspace_file, "w") as output_file:
|
||||
output_file.write(output_string)
|
||||
|
||||
|
||||
def _TargetFromSpec(old_spec, params):
|
||||
""" Create fake target for xcode-ninja wrapper. """
|
||||
# Determine ninja top level build dir (e.g. /path/to/out).
|
||||
ninja_toplevel = None
|
||||
jobs = 0
|
||||
if params:
|
||||
options = params["options"]
|
||||
ninja_toplevel = os.path.join(
|
||||
options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params)
|
||||
)
|
||||
jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0)
|
||||
|
||||
target_name = old_spec.get("target_name")
|
||||
product_name = old_spec.get("product_name", target_name)
|
||||
product_extension = old_spec.get("product_extension")
|
||||
|
||||
ninja_target = {}
|
||||
ninja_target["target_name"] = target_name
|
||||
ninja_target["product_name"] = product_name
|
||||
if product_extension:
|
||||
ninja_target["product_extension"] = product_extension
|
||||
ninja_target["toolset"] = old_spec.get("toolset")
|
||||
ninja_target["default_configuration"] = old_spec.get("default_configuration")
|
||||
ninja_target["configurations"] = {}
|
||||
|
||||
# Tell Xcode to look in |ninja_toplevel| for build products.
|
||||
new_xcode_settings = {}
|
||||
if ninja_toplevel:
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"] = (
|
||||
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
|
||||
)
|
||||
|
||||
if "configurations" in old_spec:
|
||||
for config in old_spec["configurations"]:
|
||||
old_xcode_settings = old_spec["configurations"][config].get(
|
||||
"xcode_settings", {}
|
||||
)
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings:
|
||||
new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO"
|
||||
new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[
|
||||
"IPHONEOS_DEPLOYMENT_TARGET"
|
||||
]
|
||||
for key in ["BUNDLE_LOADER", "TEST_HOST"]:
|
||||
if key in old_xcode_settings:
|
||||
new_xcode_settings[key] = old_xcode_settings[key]
|
||||
|
||||
ninja_target["configurations"][config] = {}
|
||||
ninja_target["configurations"][config][
|
||||
"xcode_settings"
|
||||
] = new_xcode_settings
|
||||
|
||||
ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0)
|
||||
ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0)
|
||||
ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0)
|
||||
ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0)
|
||||
ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0)
|
||||
ninja_target["type"] = old_spec["type"]
|
||||
if ninja_toplevel:
|
||||
ninja_target["actions"] = [
|
||||
{
|
||||
"action_name": "Compile and copy %s via ninja" % target_name,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"action": [
|
||||
"env",
|
||||
"PATH=%s" % os.environ["PATH"],
|
||||
"ninja",
|
||||
"-C",
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"],
|
||||
target_name,
|
||||
],
|
||||
"message": "Compile and copy %s via ninja" % target_name,
|
||||
},
|
||||
]
|
||||
if jobs > 0:
|
||||
ninja_target["actions"][0]["action"].extend(("-j", jobs))
|
||||
return ninja_target
|
||||
|
||||
|
||||
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
"""Limit targets for Xcode wrapper.
|
||||
|
||||
Xcode sometimes performs poorly with too many targets, so only include
|
||||
proper executable targets, with filters to customize.
|
||||
Arguments:
|
||||
target_extras: Regular expression to always add, matching any target.
|
||||
executable_target_pattern: Regular expression limiting executable targets.
|
||||
spec: Specifications for target.
|
||||
"""
|
||||
target_name = spec.get("target_name")
|
||||
# Always include targets matching target_extras.
|
||||
if target_extras is not None and re.search(target_extras, target_name):
|
||||
return True
|
||||
|
||||
# Otherwise just show executable targets and xc_tests.
|
||||
if int(spec.get("mac_xctest_bundle", 0)) != 0 or (
|
||||
spec.get("type", "") == "executable"
|
||||
and spec.get("product_extension", "") != "bundle"
|
||||
):
|
||||
|
||||
# If there is a filter and the target does not match, exclude the target.
|
||||
if executable_target_pattern is not None:
|
||||
if not re.search(executable_target_pattern, target_name):
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def CreateWrapper(target_list, target_dicts, data, params):
|
||||
"""Initialize targets for the ninja wrapper.
|
||||
|
||||
This sets up the necessary variables in the targets to generate Xcode projects
|
||||
that use ninja as an external builder.
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
data: Dict of flattened build files keyed on gyp path.
|
||||
params: Dict of global options for gyp.
|
||||
"""
|
||||
orig_gyp = params["build_files"][0]
|
||||
for gyp_name, gyp_dict in data.items():
|
||||
if gyp_name == orig_gyp:
|
||||
depth = gyp_dict["_DEPTH"]
|
||||
|
||||
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
|
||||
# and prepend .ninja before the .gyp extension.
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
main_gyp = generator_flags.get("xcode_ninja_main_gyp", None)
|
||||
if main_gyp is None:
|
||||
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
|
||||
main_gyp = build_file_root + ".ninja" + build_file_ext
|
||||
|
||||
# Create new |target_list|, |target_dicts| and |data| data structures.
|
||||
new_target_list = []
|
||||
new_target_dicts = {}
|
||||
new_data = {}
|
||||
|
||||
# Set base keys needed for |data|.
|
||||
new_data[main_gyp] = {}
|
||||
new_data[main_gyp]["included_files"] = []
|
||||
new_data[main_gyp]["targets"] = []
|
||||
new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
|
||||
# Normally the xcode-ninja generator includes only valid executable targets.
|
||||
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
|
||||
# executable targets that match the pattern. (Default all)
|
||||
executable_target_pattern = generator_flags.get(
|
||||
"xcode_ninja_executable_target_pattern", None
|
||||
)
|
||||
|
||||
# For including other non-executable targets, add the matching target name
|
||||
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
|
||||
target_extras = generator_flags.get("xcode_ninja_target_pattern", None)
|
||||
|
||||
for old_qualified_target in target_list:
|
||||
spec = target_dicts[old_qualified_target]
|
||||
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
# Add to new_target_list.
|
||||
target_name = spec.get("target_name")
|
||||
new_target_name = f"{main_gyp}:{target_name}#target"
|
||||
new_target_list.append(new_target_name)
|
||||
|
||||
# Add to new_target_dicts.
|
||||
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
|
||||
|
||||
# Add to new_data.
|
||||
for old_target in data[old_qualified_target.split(":")[0]]["targets"]:
|
||||
if old_target["target_name"] == target_name:
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = old_target["target_name"]
|
||||
new_data_target["toolset"] = old_target["toolset"]
|
||||
new_data[main_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Create sources target.
|
||||
sources_target_name = "sources_for_indexing"
|
||||
sources_target = _TargetFromSpec(
|
||||
{
|
||||
"target_name": sources_target_name,
|
||||
"toolset": "target",
|
||||
"default_configuration": "Default",
|
||||
"mac_bundle": "0",
|
||||
"type": "executable",
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
||||
# Tell Xcode to look everywhere for headers.
|
||||
sources_target["configurations"] = {"Default": {"include_dirs": [depth]}}
|
||||
|
||||
# Put excluded files into the sources target so they can be opened in Xcode.
|
||||
skip_excluded_files = not generator_flags.get(
|
||||
"xcode_ninja_list_excluded_files", True
|
||||
)
|
||||
|
||||
sources = []
|
||||
for target, target_dict in target_dicts.items():
|
||||
base = os.path.dirname(target)
|
||||
files = target_dict.get("sources", []) + target_dict.get(
|
||||
"mac_bundle_resources", []
|
||||
)
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(
|
||||
target_dict.get("sources_excluded", [])
|
||||
+ target_dict.get("mac_bundle_resources_excluded", [])
|
||||
)
|
||||
|
||||
for action in target_dict.get("actions", []):
|
||||
files.extend(action.get("inputs", []))
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(action.get("inputs_excluded", []))
|
||||
|
||||
# Remove files starting with $. These are mostly intermediate files for the
|
||||
# build system.
|
||||
files = [file for file in files if not file.startswith("$")]
|
||||
|
||||
# Make sources relative to root build file.
|
||||
relative_path = os.path.dirname(main_gyp)
|
||||
sources += [
|
||||
os.path.relpath(os.path.join(base, file), relative_path) for file in files
|
||||
]
|
||||
|
||||
sources_target["sources"] = sorted(set(sources))
|
||||
|
||||
# Put sources_to_index in it's own gyp.
|
||||
sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
|
||||
fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target"
|
||||
|
||||
# Add to new_target_list, new_target_dicts and new_data.
|
||||
new_target_list.append(fully_qualified_target_name)
|
||||
new_target_dicts[fully_qualified_target_name] = sources_target
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = sources_target["target_name"]
|
||||
new_data_target["_DEPTH"] = depth
|
||||
new_data_target["toolset"] = "target"
|
||||
new_data[sources_gyp] = {}
|
||||
new_data[sources_gyp]["targets"] = []
|
||||
new_data[sources_gyp]["included_files"] = []
|
||||
new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
new_data[sources_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Write workspace to file.
|
||||
_WriteWorkspace(main_gyp, sources_gyp, params)
|
||||
return (new_target_list, new_target_dicts, new_data)
|
||||
3197
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Normal file
3197
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
65
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Normal file
65
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Applies a fix to CR LF TAB handling in xml.dom.
|
||||
|
||||
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
|
||||
Working around this: http://bugs.python.org/issue5752
|
||||
TODO(bradnelson): Consider dropping this when we drop XP support.
|
||||
"""
|
||||
|
||||
|
||||
import xml.dom.minidom
|
||||
|
||||
|
||||
def _Replacement_write_data(writer, data, is_attrib=False):
|
||||
"""Writes datachars to writer."""
|
||||
data = data.replace("&", "&").replace("<", "<")
|
||||
data = data.replace('"', """).replace(">", ">")
|
||||
if is_attrib:
|
||||
data = data.replace("\r", "
").replace("\n", "
").replace("\t", "	")
|
||||
writer.write(data)
|
||||
|
||||
|
||||
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
|
||||
# indent = current indentation
|
||||
# addindent = indentation to add to higher levels
|
||||
# newl = newline string
|
||||
writer.write(indent + "<" + self.tagName)
|
||||
|
||||
attrs = self._get_attributes()
|
||||
a_names = sorted(attrs.keys())
|
||||
|
||||
for a_name in a_names:
|
||||
writer.write(' %s="' % a_name)
|
||||
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
|
||||
writer.write('"')
|
||||
if self.childNodes:
|
||||
writer.write(">%s" % newl)
|
||||
for node in self.childNodes:
|
||||
node.writexml(writer, indent + addindent, addindent, newl)
|
||||
writer.write(f"{indent}</{self.tagName}>{newl}")
|
||||
else:
|
||||
writer.write("/>%s" % newl)
|
||||
|
||||
|
||||
class XmlFix:
|
||||
"""Object to manage temporary patching of xml.dom.minidom."""
|
||||
|
||||
def __init__(self):
|
||||
# Preserve current xml.dom.minidom functions.
|
||||
self.write_data = xml.dom.minidom._write_data
|
||||
self.writexml = xml.dom.minidom.Element.writexml
|
||||
# Inject replacement versions of a function and a method.
|
||||
xml.dom.minidom._write_data = _Replacement_write_data
|
||||
xml.dom.minidom.Element.writexml = _Replacement_writexml
|
||||
|
||||
def Cleanup(self):
|
||||
if self.write_data:
|
||||
xml.dom.minidom._write_data = self.write_data
|
||||
xml.dom.minidom.Element.writexml = self.writexml
|
||||
self.write_data = None
|
||||
|
||||
def __del__(self):
|
||||
self.Cleanup()
|
||||
41
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Normal file
41
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "gyp-next"
|
||||
version = "0.14.0"
|
||||
authors = [
|
||||
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
|
||||
]
|
||||
description = "A fork of the GYP build system for use in the Node.js projects"
|
||||
readme = "README.md"
|
||||
license = { file="LICENSE" }
|
||||
requires-python = ">=3.6"
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["flake8", "pytest"]
|
||||
|
||||
[project.scripts]
|
||||
gyp = "gyp:script_main"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/nodejs/gyp-next"
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "pylib"}
|
||||
packages = ["gyp", "gyp.generator"]
|
||||
261
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Executable file
261
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Executable file
@@ -0,0 +1,261 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gyptest.py -- test runner for GYP tests."""
|
||||
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def is_test_name(f):
|
||||
return f.startswith("gyptest") and f.endswith(".py")
|
||||
|
||||
|
||||
def find_all_gyptest_files(directory):
|
||||
result = []
|
||||
for root, dirs, files in os.walk(directory):
|
||||
result.extend([os.path.join(root, f) for f in files if is_test_name(f)])
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-a", "--all", action="store_true", help="run all tests")
|
||||
parser.add_argument("-C", "--chdir", action="store", help="change to directory")
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
action="store",
|
||||
default="",
|
||||
help="run tests with the specified formats",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
"--gyp_option",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Add -G options to the gyp command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--list", action="store_true", help="list available tests and exit"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--no-exec",
|
||||
action="store_true",
|
||||
help="no execute, just print the command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path", action="append", default=[], help="additional $PATH directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="quiet, don't print anything unless there are failures",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="print configuration info and test results.",
|
||||
)
|
||||
parser.add_argument("tests", nargs="*")
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
if args.chdir:
|
||||
os.chdir(args.chdir)
|
||||
|
||||
if args.path:
|
||||
extra_path = [os.path.abspath(p) for p in args.path]
|
||||
extra_path = os.pathsep.join(extra_path)
|
||||
os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"]
|
||||
|
||||
if not args.tests:
|
||||
if not args.all:
|
||||
sys.stderr.write("Specify -a to get all tests.\n")
|
||||
return 1
|
||||
args.tests = ["test"]
|
||||
|
||||
tests = []
|
||||
for arg in args.tests:
|
||||
if os.path.isdir(arg):
|
||||
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
|
||||
else:
|
||||
if not is_test_name(os.path.basename(arg)):
|
||||
print(arg, "is not a valid gyp test name.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
tests.append(arg)
|
||||
|
||||
if args.list:
|
||||
for test in tests:
|
||||
print(test)
|
||||
sys.exit(0)
|
||||
|
||||
os.environ["PYTHONPATH"] = os.path.abspath("test/lib")
|
||||
|
||||
if args.verbose:
|
||||
print_configuration_info()
|
||||
|
||||
if args.gyp_option and not args.quiet:
|
||||
print("Extra Gyp options: %s\n" % args.gyp_option)
|
||||
|
||||
if args.format:
|
||||
format_list = args.format.split(",")
|
||||
else:
|
||||
format_list = {
|
||||
"aix5": ["make"],
|
||||
"os400": ["make"],
|
||||
"freebsd7": ["make"],
|
||||
"freebsd8": ["make"],
|
||||
"openbsd5": ["make"],
|
||||
"cygwin": ["msvs"],
|
||||
"win32": ["msvs", "ninja"],
|
||||
"linux": ["make", "ninja"],
|
||||
"linux2": ["make", "ninja"],
|
||||
"linux3": ["make", "ninja"],
|
||||
# TODO: Re-enable xcode-ninja.
|
||||
# https://bugs.chromium.org/p/gyp/issues/detail?id=530
|
||||
# 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
|
||||
"darwin": ["make", "ninja", "xcode"],
|
||||
}[sys.platform]
|
||||
|
||||
gyp_options = []
|
||||
for option in args.gyp_option:
|
||||
gyp_options += ["-G", option]
|
||||
|
||||
runner = Runner(format_list, tests, gyp_options, args.verbose)
|
||||
runner.run()
|
||||
|
||||
if not args.quiet:
|
||||
runner.print_results()
|
||||
|
||||
return 1 if runner.failures else 0
|
||||
|
||||
|
||||
def print_configuration_info():
|
||||
print("Test configuration:")
|
||||
if sys.platform == "darwin":
|
||||
sys.path.append(os.path.abspath("test/lib"))
|
||||
import TestMac
|
||||
|
||||
print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
|
||||
print(f" Xcode {TestMac.Xcode.Version()}")
|
||||
elif sys.platform == "win32":
|
||||
sys.path.append(os.path.abspath("pylib"))
|
||||
import gyp.MSVSVersion
|
||||
|
||||
print(" Win %s %s\n" % platform.win32_ver()[0:2])
|
||||
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
|
||||
elif sys.platform in ("linux", "linux2"):
|
||||
print(" Linux %s" % " ".join(platform.linux_distribution()))
|
||||
print(f" Python {platform.python_version()}")
|
||||
print(f" PYTHONPATH={os.environ['PYTHONPATH']}")
|
||||
print()
|
||||
|
||||
|
||||
class Runner:
|
||||
def __init__(self, formats, tests, gyp_options, verbose):
|
||||
self.formats = formats
|
||||
self.tests = tests
|
||||
self.verbose = verbose
|
||||
self.gyp_options = gyp_options
|
||||
self.failures = []
|
||||
self.num_tests = len(formats) * len(tests)
|
||||
num_digits = len(str(self.num_tests))
|
||||
self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits)
|
||||
self.isatty = sys.stdout.isatty() and not self.verbose
|
||||
self.env = os.environ.copy()
|
||||
self.hpos = 0
|
||||
|
||||
def run(self):
|
||||
run_start = time.time()
|
||||
|
||||
i = 1
|
||||
for fmt in self.formats:
|
||||
for test in self.tests:
|
||||
self.run_test(test, fmt, i)
|
||||
i += 1
|
||||
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
self.took = time.time() - run_start
|
||||
|
||||
def run_test(self, test, fmt, i):
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
msg = self.fmt_str % (i, self.num_tests, fmt, test)
|
||||
self.print_(msg)
|
||||
|
||||
start = time.time()
|
||||
cmd = [sys.executable, test] + self.gyp_options
|
||||
self.env["TESTGYP_FORMAT"] = fmt
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env
|
||||
)
|
||||
proc.wait()
|
||||
took = time.time() - start
|
||||
|
||||
stdout = proc.stdout.read().decode("utf8")
|
||||
if proc.returncode == 2:
|
||||
res = "skipped"
|
||||
elif proc.returncode:
|
||||
res = "failed"
|
||||
self.failures.append(f"({test}) {fmt}")
|
||||
else:
|
||||
res = "passed"
|
||||
res_msg = f" {res} {took:.3f}s"
|
||||
self.print_(res_msg)
|
||||
|
||||
if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")):
|
||||
print()
|
||||
print("\n".join(f" {line}" for line in stdout.splitlines()))
|
||||
elif not self.isatty:
|
||||
print()
|
||||
|
||||
def print_(self, msg):
|
||||
print(msg, end="")
|
||||
index = msg.rfind("\n")
|
||||
if index == -1:
|
||||
self.hpos += len(msg)
|
||||
else:
|
||||
self.hpos = len(msg) - index
|
||||
sys.stdout.flush()
|
||||
|
||||
def erase_current_line(self):
|
||||
print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="")
|
||||
sys.stdout.flush()
|
||||
self.hpos = 0
|
||||
|
||||
def print_results(self):
|
||||
num_failures = len(self.failures)
|
||||
if num_failures:
|
||||
print()
|
||||
if num_failures == 1:
|
||||
print("Failed the following test:")
|
||||
else:
|
||||
print("Failed the following %d tests:" % num_failures)
|
||||
print("\t" + "\n\t".join(sorted(self.failures)))
|
||||
print()
|
||||
print(
|
||||
"Ran %d tests in %.3fs, %d failed."
|
||||
% (self.num_tests, self.took, num_failures)
|
||||
)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
15
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/README
generated
vendored
Normal file
15
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/README
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
pretty_vcproj:
|
||||
Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
|
||||
|
||||
They key/value pair are used to resolve vsprops name.
|
||||
|
||||
For example, if I want to diff the base.vcproj project:
|
||||
|
||||
pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > original.txt
|
||||
pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
|
||||
|
||||
And you can use your favorite diff tool to see the changes.
|
||||
|
||||
Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
|
||||
I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
|
||||
before you perform the diff.
|
||||
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/README
generated
vendored
Normal file
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/README
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in
|
||||
|
||||
~/Library/Application Support/Developer/Shared/Xcode/Specifications/
|
||||
|
||||
and restart Xcode.
|
||||
27
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
generated
vendored
Normal file
27
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
/*
|
||||
gyp.pbfilespec
|
||||
GYP source file spec for Xcode 3
|
||||
|
||||
There is not much documentation available regarding the format
|
||||
of .pbfilespec files. As a starting point, see for instance the
|
||||
outdated documentation at:
|
||||
http://maxao.free.fr/xcode-plugin-interface/specifications.html
|
||||
and the files in:
|
||||
/Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
|
||||
|
||||
Place this file in directory:
|
||||
~/Library/Application Support/Developer/Shared/Xcode/Specifications/
|
||||
*/
|
||||
|
||||
(
|
||||
{
|
||||
Identifier = sourcecode.gyp;
|
||||
BasedOn = sourcecode;
|
||||
Name = "GYP Files";
|
||||
Extensions = ("gyp", "gypi");
|
||||
MIMETypes = ("text/gyp");
|
||||
Language = "xcode.lang.gyp";
|
||||
IsTextFile = YES;
|
||||
IsSourceFile = YES;
|
||||
}
|
||||
)
|
||||
226
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
generated
vendored
Normal file
226
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
generated
vendored
Normal file
@@ -0,0 +1,226 @@
|
||||
/*
|
||||
Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
Use of this source code is governed by a BSD-style license that can be
|
||||
found in the LICENSE file.
|
||||
|
||||
gyp.xclangspec
|
||||
GYP language specification for Xcode 3
|
||||
|
||||
There is not much documentation available regarding the format
|
||||
of .xclangspec files. As a starting point, see for instance the
|
||||
outdated documentation at:
|
||||
http://maxao.free.fr/xcode-plugin-interface/specifications.html
|
||||
and the files in:
|
||||
/Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
|
||||
|
||||
Place this file in directory:
|
||||
~/Library/Application Support/Developer/Shared/Xcode/Specifications/
|
||||
*/
|
||||
|
||||
(
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.keyword";
|
||||
Syntax = {
|
||||
Words = (
|
||||
"and",
|
||||
"or",
|
||||
"<!",
|
||||
"<",
|
||||
);
|
||||
Type = "xcode.syntax.keyword";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.target.declarator";
|
||||
Syntax = {
|
||||
Words = (
|
||||
"'target_name'",
|
||||
);
|
||||
Type = "xcode.syntax.identifier.type";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.string.singlequote";
|
||||
Syntax = {
|
||||
IncludeRules = (
|
||||
"xcode.lang.string",
|
||||
"xcode.lang.gyp.keyword",
|
||||
"xcode.lang.number",
|
||||
);
|
||||
Start = "'";
|
||||
End = "'";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.comma";
|
||||
Syntax = {
|
||||
Words = ( ",", );
|
||||
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp";
|
||||
Description = "GYP Coloring";
|
||||
BasedOn = "xcode.lang.simpleColoring";
|
||||
IncludeInMenu = YES;
|
||||
Name = "GYP";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer.toplevel";
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.dictionary",
|
||||
);
|
||||
Type = "xcode.syntax.plain";
|
||||
};
|
||||
},
|
||||
|
||||
// The following rule returns tokens to the other rules
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.lexer";
|
||||
Syntax = {
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.comment",
|
||||
"xcode.lang.string",
|
||||
'xcode.lang.gyp.targetname.declarator',
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
"xcode.lang.number",
|
||||
"xcode.lang.gyp.comma",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.lexer.toplevel";
|
||||
Syntax = {
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.comment",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.assignment";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.assignment.lhs",
|
||||
":",
|
||||
"xcode.lang.gyp.assignment.rhs",
|
||||
);
|
||||
};
|
||||
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.target.declaration";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.target.declarator",
|
||||
":",
|
||||
"xcode.lang.gyp.target.name",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.target.name";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
);
|
||||
Type = "xcode.syntax.definition.function";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.assignment.lhs";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
);
|
||||
Type = "xcode.syntax.identifier.type";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.assignment.rhs";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.string.singlequote?",
|
||||
"xcode.lang.gyp.array?",
|
||||
"xcode.lang.gyp.dictionary?",
|
||||
"xcode.lang.number?",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.dictionary";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Start = "{";
|
||||
End = "}";
|
||||
Foldable = YES;
|
||||
Recursive = YES;
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.target.declaration",
|
||||
"xcode.lang.gyp.assignment",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.array";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Start = "[";
|
||||
End = "]";
|
||||
Foldable = YES;
|
||||
Recursive = YES;
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.array",
|
||||
"xcode.lang.gyp.dictionary",
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.todo.mark";
|
||||
Syntax = {
|
||||
StartChars = "T";
|
||||
Match = (
|
||||
"^\(TODO\(.*\):[ \t]+.*\)$", // include "TODO: " in the markers list
|
||||
);
|
||||
// This is the order of captures. All of the match strings above need the same order.
|
||||
CaptureTypes = (
|
||||
"xcode.syntax.mark"
|
||||
);
|
||||
Type = "xcode.syntax.comment";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.comment";
|
||||
BasedOn = "xcode.lang.comment"; // for text macros
|
||||
Syntax = {
|
||||
Start = "#";
|
||||
End = "\n";
|
||||
IncludeRules = (
|
||||
"xcode.lang.url",
|
||||
"xcode.lang.url.mail",
|
||||
"xcode.lang.comment.mark",
|
||||
"xcode.lang.gyp.todo.mark",
|
||||
);
|
||||
Type = "xcode.syntax.comment";
|
||||
};
|
||||
},
|
||||
)
|
||||
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/README
generated
vendored
Normal file
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/README
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
How to install gyp-mode for emacs:
|
||||
|
||||
Add the following to your ~/.emacs (replace ... with the path to your gyp
|
||||
checkout).
|
||||
|
||||
(setq load-path (cons ".../tools/emacs" load-path))
|
||||
(require 'gyp)
|
||||
|
||||
Restart emacs (or eval-region the added lines) and you should be all set.
|
||||
|
||||
Please note that ert is required for running the tests, which is included in
|
||||
Emacs 24, or available separately from https://github.com/ohler/ert
|
||||
63
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
generated
vendored
Normal file
63
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
;;; gyp-tests.el - unit tests for gyp-mode.
|
||||
|
||||
;; Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
;; Use of this source code is governed by a BSD-style license that can be
|
||||
;; found in the LICENSE file.
|
||||
|
||||
;; The recommended way to run these tests is to run them from the command-line,
|
||||
;; with the run-unit-tests.sh script.
|
||||
|
||||
(require 'cl)
|
||||
(require 'ert)
|
||||
(require 'gyp)
|
||||
|
||||
(defconst samples (directory-files "testdata" t ".gyp$")
|
||||
"List of golden samples to check")
|
||||
|
||||
(defun fontify (filename)
|
||||
(with-temp-buffer
|
||||
(insert-file-contents-literally filename)
|
||||
(gyp-mode)
|
||||
(font-lock-fontify-buffer)
|
||||
(buffer-string)))
|
||||
|
||||
(defun read-golden-sample (filename)
|
||||
(with-temp-buffer
|
||||
(insert-file-contents-literally (concat filename ".fontified"))
|
||||
(read (current-buffer))))
|
||||
|
||||
(defun equivalent-face (face)
|
||||
"For the purposes of face comparison, we're not interested in the
|
||||
differences between certain faces. For example, the difference between
|
||||
font-lock-comment-delimiter and font-lock-comment-face."
|
||||
(cl-case face
|
||||
((font-lock-comment-delimiter-face) font-lock-comment-face)
|
||||
(t face)))
|
||||
|
||||
(defun text-face-properties (s)
|
||||
"Extract the text properties from s"
|
||||
(let ((result (list t)))
|
||||
(dotimes (i (length s))
|
||||
(setq result (cons (equivalent-face (get-text-property i 'face s))
|
||||
result)))
|
||||
(nreverse result)))
|
||||
|
||||
(ert-deftest test-golden-samples ()
|
||||
"Check that fontification produces the same results as the golden samples"
|
||||
(dolist (sample samples)
|
||||
(let ((golden (read-golden-sample sample))
|
||||
(fontified (fontify sample)))
|
||||
(should (equal golden fontified))
|
||||
(should (equal (text-face-properties golden)
|
||||
(text-face-properties fontified))))))
|
||||
|
||||
(defun create-golden-sample (filename)
|
||||
"Create a golden sample by fontifying filename and writing out the printable
|
||||
representation of the fontified buffer (with text properties) to the
|
||||
FILENAME.fontified"
|
||||
(with-temp-file (concat filename ".fontified")
|
||||
(print (fontify filename) (current-buffer))))
|
||||
|
||||
(defun create-golden-samples ()
|
||||
"Recreate the golden samples"
|
||||
(dolist (sample samples) (create-golden-sample sample)))
|
||||
275
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp.el
generated
vendored
Normal file
275
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp.el
generated
vendored
Normal file
@@ -0,0 +1,275 @@
|
||||
;;; gyp.el - font-lock-mode support for gyp files.
|
||||
|
||||
;; Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
;; Use of this source code is governed by a BSD-style license that can be
|
||||
;; found in the LICENSE file.
|
||||
|
||||
;; Put this somewhere in your load-path and
|
||||
;; (require 'gyp)
|
||||
|
||||
(require 'python)
|
||||
(require 'cl)
|
||||
|
||||
(when (string-match "python-mode.el" (symbol-file 'python-mode 'defun))
|
||||
(error (concat "python-mode must be loaded from python.el (bundled with "
|
||||
"recent emacsen), not from the older and less maintained "
|
||||
"python-mode.el")))
|
||||
|
||||
(defadvice python-indent-calculate-levels (after gyp-outdent-closing-parens
|
||||
activate)
|
||||
"De-indent closing parens, braces, and brackets in gyp-mode."
|
||||
(when (and (eq major-mode 'gyp-mode)
|
||||
(string-match "^ *[])}][],)}]* *$"
|
||||
(buffer-substring-no-properties
|
||||
(line-beginning-position) (line-end-position))))
|
||||
(setf (first python-indent-levels)
|
||||
(- (first python-indent-levels) python-continuation-offset))))
|
||||
|
||||
(defadvice python-indent-guess-indent-offset (around
|
||||
gyp-indent-guess-indent-offset
|
||||
activate)
|
||||
"Guess correct indent offset in gyp-mode."
|
||||
(or (and (not (eq major-mode 'gyp-mode))
|
||||
ad-do-it)
|
||||
(save-excursion
|
||||
(save-restriction
|
||||
(widen)
|
||||
(goto-char (point-min))
|
||||
;; Find first line ending with an opening brace that is not a comment.
|
||||
(or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
|
||||
(forward-line)
|
||||
(/= (current-indentation) 0)
|
||||
(set (make-local-variable 'python-indent-offset)
|
||||
(current-indentation))
|
||||
(set (make-local-variable 'python-continuation-offset)
|
||||
(current-indentation)))
|
||||
(message "Can't guess gyp indent offset, using default: %s"
|
||||
python-continuation-offset))))))
|
||||
|
||||
(define-derived-mode gyp-mode python-mode "Gyp"
|
||||
"Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
|
||||
;; gyp-parse-history is a stack of (POSITION . PARSE-STATE) tuples,
|
||||
;; with greater positions at the top of the stack. PARSE-STATE
|
||||
;; is a list of section symbols (see gyp-section-name and gyp-parse-to)
|
||||
;; with most nested section symbol at the front of the list.
|
||||
(set (make-local-variable 'gyp-parse-history) '((1 . (list))))
|
||||
(gyp-add-font-lock-keywords))
|
||||
|
||||
(defun gyp-set-indentation ()
|
||||
"Hook function to configure python indentation to suit gyp mode."
|
||||
(set (make-local-variable 'python-indent-offset) 2)
|
||||
(set (make-local-variable 'python-continuation-offset) 2)
|
||||
(set (make-local-variable 'python-indent-guess-indent-offset) t)
|
||||
(python-indent-guess-indent-offset))
|
||||
|
||||
(add-hook 'gyp-mode-hook 'gyp-set-indentation)
|
||||
|
||||
(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
|
||||
(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
|
||||
(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
|
||||
|
||||
;;; Font-lock support
|
||||
|
||||
(defconst gyp-dependencies-regexp
|
||||
(regexp-opt (list "dependencies" "export_dependent_settings"))
|
||||
"Regular expression to introduce 'dependencies' section")
|
||||
|
||||
(defconst gyp-sources-regexp
|
||||
(regexp-opt (list "action" "files" "include_dirs" "includes" "inputs"
|
||||
"libraries" "outputs" "sources"))
|
||||
"Regular expression to introduce 'sources' sections")
|
||||
|
||||
(defconst gyp-conditions-regexp
|
||||
(regexp-opt (list "conditions" "target_conditions"))
|
||||
"Regular expression to introduce conditions sections")
|
||||
|
||||
(defconst gyp-variables-regexp
|
||||
"^variables"
|
||||
"Regular expression to introduce variables sections")
|
||||
|
||||
(defconst gyp-defines-regexp
|
||||
"^defines"
|
||||
"Regular expression to introduce 'defines' sections")
|
||||
|
||||
(defconst gyp-targets-regexp
|
||||
"^targets"
|
||||
"Regular expression to introduce 'targets' sections")
|
||||
|
||||
(defun gyp-section-name (section)
|
||||
"Map the sections we are interested in from SECTION to symbol.
|
||||
|
||||
SECTION is a string from the buffer that introduces a section. The result is
|
||||
a symbol representing the kind of section.
|
||||
|
||||
This allows us to treat (for the purposes of font-lock) several different
|
||||
section names as the same kind of section. For example, a 'sources section
|
||||
can be introduced by the 'sources', 'inputs', 'outputs' keyword.
|
||||
|
||||
'other is the default section kind when a more specific match is not made."
|
||||
(cond ((string-match-p gyp-dependencies-regexp section) 'dependencies)
|
||||
((string-match-p gyp-sources-regexp section) 'sources)
|
||||
((string-match-p gyp-variables-regexp section) 'variables)
|
||||
((string-match-p gyp-conditions-regexp section) 'conditions)
|
||||
((string-match-p gyp-targets-regexp section) 'targets)
|
||||
((string-match-p gyp-defines-regexp section) 'defines)
|
||||
(t 'other)))
|
||||
|
||||
(defun gyp-invalidate-parse-states-after (target-point)
|
||||
"Erase any parse information after target-point."
|
||||
(while (> (caar gyp-parse-history) target-point)
|
||||
(setq gyp-parse-history (cdr gyp-parse-history))))
|
||||
|
||||
(defun gyp-parse-point ()
|
||||
"The point of the last parse state added by gyp-parse-to."
|
||||
(caar gyp-parse-history))
|
||||
|
||||
(defun gyp-parse-sections ()
|
||||
"A list of section symbols holding at the last parse state point."
|
||||
(cdar gyp-parse-history))
|
||||
|
||||
(defun gyp-inside-dictionary-p ()
|
||||
"Predicate returning true if the parser is inside a dictionary."
|
||||
(not (eq (cadar gyp-parse-history) 'list)))
|
||||
|
||||
(defun gyp-add-parse-history (point sections)
|
||||
"Add parse state SECTIONS to the parse history at POINT so that parsing can be
|
||||
resumed instantly."
|
||||
(while (>= (caar gyp-parse-history) point)
|
||||
(setq gyp-parse-history (cdr gyp-parse-history)))
|
||||
(setq gyp-parse-history (cons (cons point sections) gyp-parse-history)))
|
||||
|
||||
(defun gyp-parse-to (target-point)
|
||||
"Parses from (point) to TARGET-POINT adding the parse state information to
|
||||
gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a
|
||||
string literal has been parsed. Returns nil if no further parsing can be
|
||||
done, otherwise returns the position of the start of a parsed string, leaving
|
||||
the point at the end of the string."
|
||||
(let ((parsing t)
|
||||
string-start)
|
||||
(while parsing
|
||||
(setq string-start nil)
|
||||
;; Parse up to a character that starts a sexp, or if the nesting
|
||||
;; level decreases.
|
||||
(let ((state (parse-partial-sexp (gyp-parse-point)
|
||||
target-point
|
||||
-1
|
||||
t))
|
||||
(sections (gyp-parse-sections)))
|
||||
(if (= (nth 0 state) -1)
|
||||
(setq sections (cdr sections)) ; pop out a level
|
||||
(cond ((looking-at-p "['\"]") ; a string
|
||||
(setq string-start (point))
|
||||
(goto-char (scan-sexps (point) 1))
|
||||
(if (gyp-inside-dictionary-p)
|
||||
;; Look for sections inside a dictionary
|
||||
(let ((section (gyp-section-name
|
||||
(buffer-substring-no-properties
|
||||
(+ 1 string-start)
|
||||
(- (point) 1)))))
|
||||
(setq sections (cons section (cdr sections)))))
|
||||
;; Stop after the string so it can be fontified.
|
||||
(setq target-point (point)))
|
||||
((looking-at-p "{")
|
||||
;; Inside a dictionary. Increase nesting.
|
||||
(forward-char 1)
|
||||
(setq sections (cons 'unknown sections)))
|
||||
((looking-at-p "\\[")
|
||||
;; Inside a list. Increase nesting
|
||||
(forward-char 1)
|
||||
(setq sections (cons 'list sections)))
|
||||
((not (eobp))
|
||||
;; other
|
||||
(forward-char 1))))
|
||||
(gyp-add-parse-history (point) sections)
|
||||
(setq parsing (< (point) target-point))))
|
||||
string-start))
|
||||
|
||||
(defun gyp-section-at-point ()
|
||||
"Transform the last parse state, which is a list of nested sections and return
|
||||
the section symbol that should be used to determine font-lock information for
|
||||
the string. Can return nil indicating the string should not have any attached
|
||||
section."
|
||||
(let ((sections (gyp-parse-sections)))
|
||||
(cond
|
||||
((eq (car sections) 'conditions)
|
||||
;; conditions can occur in a variables section, but we still want to
|
||||
;; highlight it as a keyword.
|
||||
nil)
|
||||
((and (eq (car sections) 'list)
|
||||
(eq (cadr sections) 'list))
|
||||
;; conditions and sources can have items in [[ ]]
|
||||
(caddr sections))
|
||||
(t (cadr sections)))))
|
||||
|
||||
(defun gyp-section-match (limit)
|
||||
"Parse from (point) to LIMIT returning by means of match data what was
|
||||
matched. The group of the match indicates what style font-lock should apply.
|
||||
See also `gyp-add-font-lock-keywords'."
|
||||
(gyp-invalidate-parse-states-after (point))
|
||||
(let ((group nil)
|
||||
(string-start t))
|
||||
(while (and (< (point) limit)
|
||||
(not group)
|
||||
string-start)
|
||||
(setq string-start (gyp-parse-to limit))
|
||||
(if string-start
|
||||
(setq group (cl-case (gyp-section-at-point)
|
||||
('dependencies 1)
|
||||
('variables 2)
|
||||
('conditions 2)
|
||||
('sources 3)
|
||||
('defines 4)
|
||||
(nil nil)))))
|
||||
(if group
|
||||
(progn
|
||||
;; Set the match data to indicate to the font-lock mechanism the
|
||||
;; highlighting to be performed.
|
||||
(set-match-data (append (list string-start (point))
|
||||
(make-list (* (1- group) 2) nil)
|
||||
(list (1+ string-start) (1- (point)))))
|
||||
t))))
|
||||
|
||||
;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for
|
||||
;;; canonical list of keywords.
|
||||
(defun gyp-add-font-lock-keywords ()
|
||||
"Add gyp-mode keywords to font-lock mechanism."
|
||||
;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match
|
||||
;; so that we can do the font-locking in a single font-lock pass.
|
||||
(font-lock-add-keywords
|
||||
nil
|
||||
(list
|
||||
;; Top-level keywords
|
||||
(list (concat "['\"]\\("
|
||||
(regexp-opt (list "action" "action_name" "actions" "cflags"
|
||||
"cflags_cc" "conditions" "configurations"
|
||||
"copies" "defines" "dependencies" "destination"
|
||||
"direct_dependent_settings"
|
||||
"export_dependent_settings" "extension" "files"
|
||||
"include_dirs" "includes" "inputs" "ldflags" "libraries"
|
||||
"link_settings" "mac_bundle" "message"
|
||||
"msvs_external_rule" "outputs" "product_name"
|
||||
"process_outputs_as_sources" "rules" "rule_name"
|
||||
"sources" "suppress_wildcard"
|
||||
"target_conditions" "target_defaults"
|
||||
"target_defines" "target_name" "toolsets"
|
||||
"targets" "type" "variables" "xcode_settings"))
|
||||
"[!/+=]?\\)") 1 'font-lock-keyword-face t)
|
||||
;; Type of target
|
||||
(list (concat "['\"]\\("
|
||||
(regexp-opt (list "loadable_module" "static_library"
|
||||
"shared_library" "executable" "none"))
|
||||
"\\)") 1 'font-lock-type-face t)
|
||||
(list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1
|
||||
'font-lock-function-name-face t)
|
||||
(list 'gyp-section-match
|
||||
(list 1 'font-lock-function-name-face t t) ; dependencies
|
||||
(list 2 'font-lock-variable-name-face t t) ; variables, conditions
|
||||
(list 3 'font-lock-constant-face t t) ; sources
|
||||
(list 4 'font-lock-preprocessor-face t t)) ; preprocessor
|
||||
;; Variable expansion
|
||||
(list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
|
||||
;; Command expansion
|
||||
(list "<!@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
|
||||
)))
|
||||
|
||||
(provide 'gyp)
|
||||
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
generated
vendored
Executable file
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
generated
vendored
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/sh
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
emacs --no-site-file --no-init-file --batch \
|
||||
--load ert.el --load gyp.el --load gyp-tests.el \
|
||||
-f ert-run-tests-batch-and-exit
|
||||
1105
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
generated
vendored
Normal file
1105
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1107
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
generated
vendored
Normal file
1107
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
102
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/graphviz.py
generated
vendored
Executable file
102
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/graphviz.py
generated
vendored
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Using the JSON dumped by the dump-dependency-json generator,
|
||||
generate input suitable for graphviz to render a dependency graph of
|
||||
targets."""
|
||||
|
||||
|
||||
import collections
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
def ParseTarget(target):
|
||||
target, _, suffix = target.partition("#")
|
||||
filename, _, target = target.partition(":")
|
||||
return filename, target, suffix
|
||||
|
||||
|
||||
def LoadEdges(filename, targets):
|
||||
"""Load the edges map from the dump file, and filter it to only
|
||||
show targets in |targets| and their depedendents."""
|
||||
|
||||
file = open("dump.json")
|
||||
edges = json.load(file)
|
||||
file.close()
|
||||
|
||||
# Copy out only the edges we're interested in from the full edge list.
|
||||
target_edges = {}
|
||||
to_visit = targets[:]
|
||||
while to_visit:
|
||||
src = to_visit.pop()
|
||||
if src in target_edges:
|
||||
continue
|
||||
target_edges[src] = edges[src]
|
||||
to_visit.extend(edges[src])
|
||||
|
||||
return target_edges
|
||||
|
||||
|
||||
def WriteGraph(edges):
|
||||
"""Print a graphviz graph to stdout.
|
||||
|edges| is a map of target to a list of other targets it depends on."""
|
||||
|
||||
# Bucket targets by file.
|
||||
files = collections.defaultdict(list)
|
||||
for src, dst in edges.items():
|
||||
build_file, target_name, toolset = ParseTarget(src)
|
||||
files[build_file].append(src)
|
||||
|
||||
print("digraph D {")
|
||||
print(" fontsize=8") # Used by subgraphs.
|
||||
print(" node [fontsize=8]")
|
||||
|
||||
# Output nodes by file. We must first write out each node within
|
||||
# its file grouping before writing out any edges that may refer
|
||||
# to those nodes.
|
||||
for filename, targets in files.items():
|
||||
if len(targets) == 1:
|
||||
# If there's only one node for this file, simplify
|
||||
# the display by making it a box without an internal node.
|
||||
target = targets[0]
|
||||
build_file, target_name, toolset = ParseTarget(target)
|
||||
print(
|
||||
f' "{target}" [shape=box, label="{filename}\\n{target_name}"]'
|
||||
)
|
||||
else:
|
||||
# Group multiple nodes together in a subgraph.
|
||||
print(' subgraph "cluster_%s" {' % filename)
|
||||
print(' label = "%s"' % filename)
|
||||
for target in targets:
|
||||
build_file, target_name, toolset = ParseTarget(target)
|
||||
print(f' "{target}" [label="{target_name}"]')
|
||||
print(" }")
|
||||
|
||||
# Now that we've placed all the nodes within subgraphs, output all
|
||||
# the edges between nodes.
|
||||
for src, dsts in edges.items():
|
||||
for dst in dsts:
|
||||
print(f' "{src}" -> "{dst}"')
|
||||
|
||||
print("}")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print(__doc__, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
print("usage: %s target1 target2..." % (sys.argv[0]), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
edges = LoadEdges("dump.json", sys.argv[1:])
|
||||
|
||||
WriteGraph(edges)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
156
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_gyp.py
generated
vendored
Executable file
156
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_gyp.py
generated
vendored
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Pretty-prints the contents of a GYP file."""
|
||||
|
||||
|
||||
import sys
|
||||
import re
|
||||
|
||||
|
||||
# Regex to remove comments when we're counting braces.
|
||||
COMMENT_RE = re.compile(r"\s*#.*")
|
||||
|
||||
# Regex to remove quoted strings when we're counting braces.
|
||||
# It takes into account quoted quotes, and makes sure that the quotes match.
|
||||
# NOTE: It does not handle quotes that span more than one line, or
|
||||
# cases where an escaped quote is preceded by an escaped backslash.
|
||||
QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
|
||||
QUOTE_RE = re.compile(QUOTE_RE_STR)
|
||||
|
||||
|
||||
def comment_replace(matchobj):
|
||||
return matchobj.group(1) + matchobj.group(2) + "#" * len(matchobj.group(3))
|
||||
|
||||
|
||||
def mask_comments(input):
|
||||
"""Mask the quoted strings so we skip braces inside quoted strings."""
|
||||
search_re = re.compile(r"(.*?)(#)(.*)")
|
||||
return [search_re.sub(comment_replace, line) for line in input]
|
||||
|
||||
|
||||
def quote_replace(matchobj):
|
||||
return "{}{}{}{}".format(
|
||||
matchobj.group(1),
|
||||
matchobj.group(2),
|
||||
"x" * len(matchobj.group(3)),
|
||||
matchobj.group(2),
|
||||
)
|
||||
|
||||
|
||||
def mask_quotes(input):
|
||||
"""Mask the quoted strings so we skip braces inside quoted strings."""
|
||||
search_re = re.compile(r"(.*?)" + QUOTE_RE_STR)
|
||||
return [search_re.sub(quote_replace, line) for line in input]
|
||||
|
||||
|
||||
def do_split(input, masked_input, search_re):
|
||||
output = []
|
||||
mask_output = []
|
||||
for (line, masked_line) in zip(input, masked_input):
|
||||
m = search_re.match(masked_line)
|
||||
while m:
|
||||
split = len(m.group(1))
|
||||
line = line[:split] + r"\n" + line[split:]
|
||||
masked_line = masked_line[:split] + r"\n" + masked_line[split:]
|
||||
m = search_re.match(masked_line)
|
||||
output.extend(line.split(r"\n"))
|
||||
mask_output.extend(masked_line.split(r"\n"))
|
||||
return (output, mask_output)
|
||||
|
||||
|
||||
def split_double_braces(input):
|
||||
"""Masks out the quotes and comments, and then splits appropriate
|
||||
lines (lines that matche the double_*_brace re's above) before
|
||||
indenting them below.
|
||||
|
||||
These are used to split lines which have multiple braces on them, so
|
||||
that the indentation looks prettier when all laid out (e.g. closing
|
||||
braces make a nice diagonal line).
|
||||
"""
|
||||
double_open_brace_re = re.compile(r"(.*?[\[\{\(,])(\s*)([\[\{\(])")
|
||||
double_close_brace_re = re.compile(r"(.*?[\]\}\)],?)(\s*)([\]\}\)])")
|
||||
|
||||
masked_input = mask_quotes(input)
|
||||
masked_input = mask_comments(masked_input)
|
||||
|
||||
(output, mask_output) = do_split(input, masked_input, double_open_brace_re)
|
||||
(output, mask_output) = do_split(output, mask_output, double_close_brace_re)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def count_braces(line):
|
||||
"""keeps track of the number of braces on a given line and returns the result.
|
||||
|
||||
It starts at zero and subtracts for closed braces, and adds for open braces.
|
||||
"""
|
||||
open_braces = ["[", "(", "{"]
|
||||
close_braces = ["]", ")", "}"]
|
||||
closing_prefix_re = re.compile(r"[^\s\]\}\)]\s*[\]\}\)]+,?\s*$")
|
||||
cnt = 0
|
||||
stripline = COMMENT_RE.sub(r"", line)
|
||||
stripline = QUOTE_RE.sub(r"''", stripline)
|
||||
for char in stripline:
|
||||
for brace in open_braces:
|
||||
if char == brace:
|
||||
cnt += 1
|
||||
for brace in close_braces:
|
||||
if char == brace:
|
||||
cnt -= 1
|
||||
|
||||
after = False
|
||||
if cnt > 0:
|
||||
after = True
|
||||
|
||||
# This catches the special case of a closing brace having something
|
||||
# other than just whitespace ahead of it -- we don't want to
|
||||
# unindent that until after this line is printed so it stays with
|
||||
# the previous indentation level.
|
||||
if cnt < 0 and closing_prefix_re.match(stripline):
|
||||
after = True
|
||||
return (cnt, after)
|
||||
|
||||
|
||||
def prettyprint_input(lines):
|
||||
"""Does the main work of indenting the input based on the brace counts."""
|
||||
indent = 0
|
||||
basic_offset = 2
|
||||
for line in lines:
|
||||
if COMMENT_RE.match(line):
|
||||
print(line)
|
||||
else:
|
||||
line = line.strip("\r\n\t ") # Otherwise doesn't strip \r on Unix.
|
||||
if len(line) > 0:
|
||||
(brace_diff, after) = count_braces(line)
|
||||
if brace_diff != 0:
|
||||
if after:
|
||||
print(" " * (basic_offset * indent) + line)
|
||||
indent += brace_diff
|
||||
else:
|
||||
indent += brace_diff
|
||||
print(" " * (basic_offset * indent) + line)
|
||||
else:
|
||||
print(" " * (basic_offset * indent) + line)
|
||||
else:
|
||||
print("")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1:
|
||||
data = open(sys.argv[1]).read().splitlines()
|
||||
else:
|
||||
data = sys.stdin.read().splitlines()
|
||||
# Split up the double braces.
|
||||
lines = split_double_braces(data)
|
||||
|
||||
# Indent and print the output.
|
||||
prettyprint_input(lines)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
181
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_sln.py
generated
vendored
Executable file
181
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_sln.py
generated
vendored
Executable file
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Prints the information in a sln file in a diffable way.
|
||||
|
||||
It first outputs each projects in alphabetical order with their
|
||||
dependencies.
|
||||
|
||||
Then it outputs a possible build order.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import pretty_vcproj
|
||||
|
||||
__author__ = "nsylvain (Nicolas Sylvain)"
|
||||
|
||||
|
||||
def BuildProject(project, built, projects, deps):
|
||||
# if all dependencies are done, we can build it, otherwise we try to build the
|
||||
# dependency.
|
||||
# This is not infinite-recursion proof.
|
||||
for dep in deps[project]:
|
||||
if dep not in built:
|
||||
BuildProject(dep, built, projects, deps)
|
||||
print(project)
|
||||
built.append(project)
|
||||
|
||||
|
||||
def ParseSolution(solution_file):
|
||||
# All projects, their clsid and paths.
|
||||
projects = dict()
|
||||
|
||||
# A list of dependencies associated with a project.
|
||||
dependencies = dict()
|
||||
|
||||
# Regular expressions that matches the SLN format.
|
||||
# The first line of a project definition.
|
||||
begin_project = re.compile(
|
||||
r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
|
||||
r'}"\) = "(.*)", "(.*)", "(.*)"$'
|
||||
)
|
||||
# The last line of a project definition.
|
||||
end_project = re.compile("^EndProject$")
|
||||
# The first line of a dependency list.
|
||||
begin_dep = re.compile(r"ProjectSection\(ProjectDependencies\) = postProject$")
|
||||
# The last line of a dependency list.
|
||||
end_dep = re.compile("EndProjectSection$")
|
||||
# A line describing a dependency.
|
||||
dep_line = re.compile(" *({.*}) = ({.*})$")
|
||||
|
||||
in_deps = False
|
||||
solution = open(solution_file)
|
||||
for line in solution:
|
||||
results = begin_project.search(line)
|
||||
if results:
|
||||
# Hack to remove icu because the diff is too different.
|
||||
if results.group(1).find("icu") != -1:
|
||||
continue
|
||||
# We remove "_gyp" from the names because it helps to diff them.
|
||||
current_project = results.group(1).replace("_gyp", "")
|
||||
projects[current_project] = [
|
||||
results.group(2).replace("_gyp", ""),
|
||||
results.group(3),
|
||||
results.group(2),
|
||||
]
|
||||
dependencies[current_project] = []
|
||||
continue
|
||||
|
||||
results = end_project.search(line)
|
||||
if results:
|
||||
current_project = None
|
||||
continue
|
||||
|
||||
results = begin_dep.search(line)
|
||||
if results:
|
||||
in_deps = True
|
||||
continue
|
||||
|
||||
results = end_dep.search(line)
|
||||
if results:
|
||||
in_deps = False
|
||||
continue
|
||||
|
||||
results = dep_line.search(line)
|
||||
if results and in_deps and current_project:
|
||||
dependencies[current_project].append(results.group(1))
|
||||
continue
|
||||
|
||||
# Change all dependencies clsid to name instead.
|
||||
for project in dependencies:
|
||||
# For each dependencies in this project
|
||||
new_dep_array = []
|
||||
for dep in dependencies[project]:
|
||||
# Look for the project name matching this cldis
|
||||
for project_info in projects:
|
||||
if projects[project_info][1] == dep:
|
||||
new_dep_array.append(project_info)
|
||||
dependencies[project] = sorted(new_dep_array)
|
||||
|
||||
return (projects, dependencies)
|
||||
|
||||
|
||||
def PrintDependencies(projects, deps):
|
||||
print("---------------------------------------")
|
||||
print("Dependencies for all projects")
|
||||
print("---------------------------------------")
|
||||
print("-- --")
|
||||
|
||||
for (project, dep_list) in sorted(deps.items()):
|
||||
print("Project : %s" % project)
|
||||
print("Path : %s" % projects[project][0])
|
||||
if dep_list:
|
||||
for dep in dep_list:
|
||||
print(" - %s" % dep)
|
||||
print("")
|
||||
|
||||
print("-- --")
|
||||
|
||||
|
||||
def PrintBuildOrder(projects, deps):
|
||||
print("---------------------------------------")
|
||||
print("Build order ")
|
||||
print("---------------------------------------")
|
||||
print("-- --")
|
||||
|
||||
built = []
|
||||
for (project, _) in sorted(deps.items()):
|
||||
if project not in built:
|
||||
BuildProject(project, built, projects, deps)
|
||||
|
||||
print("-- --")
|
||||
|
||||
|
||||
def PrintVCProj(projects):
|
||||
|
||||
for project in projects:
|
||||
print("-------------------------------------")
|
||||
print("-------------------------------------")
|
||||
print(project)
|
||||
print(project)
|
||||
print(project)
|
||||
print("-------------------------------------")
|
||||
print("-------------------------------------")
|
||||
|
||||
project_path = os.path.abspath(
|
||||
os.path.join(os.path.dirname(sys.argv[1]), projects[project][2])
|
||||
)
|
||||
|
||||
pretty = pretty_vcproj
|
||||
argv = [
|
||||
"",
|
||||
project_path,
|
||||
"$(SolutionDir)=%s\\" % os.path.dirname(sys.argv[1]),
|
||||
]
|
||||
argv.extend(sys.argv[3:])
|
||||
pretty.main(argv)
|
||||
|
||||
|
||||
def main():
|
||||
# check if we have exactly 1 parameter.
|
||||
if len(sys.argv) < 2:
|
||||
print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0])
|
||||
return 1
|
||||
|
||||
(projects, deps) = ParseSolution(sys.argv[1])
|
||||
PrintDependencies(projects, deps)
|
||||
PrintBuildOrder(projects, deps)
|
||||
|
||||
if "--recursive" in sys.argv:
|
||||
PrintVCProj(projects)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
339
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
generated
vendored
Executable file
339
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
generated
vendored
Executable file
@@ -0,0 +1,339 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Make the format of a vcproj really pretty.
|
||||
|
||||
This script normalize and sort an xml. It also fetches all the properties
|
||||
inside linked vsprops and include them explicitly in the vcproj.
|
||||
|
||||
It outputs the resulting xml to stdout.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from xml.dom.minidom import parse
|
||||
from xml.dom.minidom import Node
|
||||
|
||||
__author__ = "nsylvain (Nicolas Sylvain)"
|
||||
ARGUMENTS = None
|
||||
REPLACEMENTS = dict()
|
||||
|
||||
|
||||
def cmp(x, y):
|
||||
return (x > y) - (x < y)
|
||||
|
||||
|
||||
class CmpTuple:
|
||||
"""Compare function between 2 tuple."""
|
||||
|
||||
def __call__(self, x, y):
|
||||
return cmp(x[0], y[0])
|
||||
|
||||
|
||||
class CmpNode:
|
||||
"""Compare function between 2 xml nodes."""
|
||||
|
||||
def __call__(self, x, y):
|
||||
def get_string(node):
|
||||
node_string = "node"
|
||||
node_string += node.nodeName
|
||||
if node.nodeValue:
|
||||
node_string += node.nodeValue
|
||||
|
||||
if node.attributes:
|
||||
# We first sort by name, if present.
|
||||
node_string += node.getAttribute("Name")
|
||||
|
||||
all_nodes = []
|
||||
for (name, value) in node.attributes.items():
|
||||
all_nodes.append((name, value))
|
||||
|
||||
all_nodes.sort(CmpTuple())
|
||||
for (name, value) in all_nodes:
|
||||
node_string += name
|
||||
node_string += value
|
||||
|
||||
return node_string
|
||||
|
||||
return cmp(get_string(x), get_string(y))
|
||||
|
||||
|
||||
def PrettyPrintNode(node, indent=0):
|
||||
if node.nodeType == Node.TEXT_NODE:
|
||||
if node.data.strip():
|
||||
print("{}{}".format(" " * indent, node.data.strip()))
|
||||
return
|
||||
|
||||
if node.childNodes:
|
||||
node.normalize()
|
||||
# Get the number of attributes
|
||||
attr_count = 0
|
||||
if node.attributes:
|
||||
attr_count = node.attributes.length
|
||||
|
||||
# Print the main tag
|
||||
if attr_count == 0:
|
||||
print("{}<{}>".format(" " * indent, node.nodeName))
|
||||
else:
|
||||
print("{}<{}".format(" " * indent, node.nodeName))
|
||||
|
||||
all_attributes = []
|
||||
for (name, value) in node.attributes.items():
|
||||
all_attributes.append((name, value))
|
||||
all_attributes.sort(CmpTuple())
|
||||
for (name, value) in all_attributes:
|
||||
print('{} {}="{}"'.format(" " * indent, name, value))
|
||||
print("%s>" % (" " * indent))
|
||||
if node.nodeValue:
|
||||
print("{} {}".format(" " * indent, node.nodeValue))
|
||||
|
||||
for sub_node in node.childNodes:
|
||||
PrettyPrintNode(sub_node, indent=indent + 2)
|
||||
print("{}</{}>".format(" " * indent, node.nodeName))
|
||||
|
||||
|
||||
def FlattenFilter(node):
|
||||
"""Returns a list of all the node and sub nodes."""
|
||||
node_list = []
|
||||
|
||||
if node.attributes and node.getAttribute("Name") == "_excluded_files":
|
||||
# We don't add the "_excluded_files" filter.
|
||||
return []
|
||||
|
||||
for current in node.childNodes:
|
||||
if current.nodeName == "Filter":
|
||||
node_list.extend(FlattenFilter(current))
|
||||
else:
|
||||
node_list.append(current)
|
||||
|
||||
return node_list
|
||||
|
||||
|
||||
def FixFilenames(filenames, current_directory):
|
||||
new_list = []
|
||||
for filename in filenames:
|
||||
if filename:
|
||||
for key in REPLACEMENTS:
|
||||
filename = filename.replace(key, REPLACEMENTS[key])
|
||||
os.chdir(current_directory)
|
||||
filename = filename.strip("\"' ")
|
||||
if filename.startswith("$"):
|
||||
new_list.append(filename)
|
||||
else:
|
||||
new_list.append(os.path.abspath(filename))
|
||||
return new_list
|
||||
|
||||
|
||||
def AbsoluteNode(node):
|
||||
"""Makes all the properties we know about in this node absolute."""
|
||||
if node.attributes:
|
||||
for (name, value) in node.attributes.items():
|
||||
if name in [
|
||||
"InheritedPropertySheets",
|
||||
"RelativePath",
|
||||
"AdditionalIncludeDirectories",
|
||||
"IntermediateDirectory",
|
||||
"OutputDirectory",
|
||||
"AdditionalLibraryDirectories",
|
||||
]:
|
||||
# We want to fix up these paths
|
||||
path_list = value.split(";")
|
||||
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
|
||||
node.setAttribute(name, ";".join(new_list))
|
||||
if not value:
|
||||
node.removeAttribute(name)
|
||||
|
||||
|
||||
def CleanupVcproj(node):
|
||||
"""For each sub node, we call recursively this function."""
|
||||
for sub_node in node.childNodes:
|
||||
AbsoluteNode(sub_node)
|
||||
CleanupVcproj(sub_node)
|
||||
|
||||
# Normalize the node, and remove all extraneous whitespaces.
|
||||
for sub_node in node.childNodes:
|
||||
if sub_node.nodeType == Node.TEXT_NODE:
|
||||
sub_node.data = sub_node.data.replace("\r", "")
|
||||
sub_node.data = sub_node.data.replace("\n", "")
|
||||
sub_node.data = sub_node.data.rstrip()
|
||||
|
||||
# Fix all the semicolon separated attributes to be sorted, and we also
|
||||
# remove the dups.
|
||||
if node.attributes:
|
||||
for (name, value) in node.attributes.items():
|
||||
sorted_list = sorted(value.split(";"))
|
||||
unique_list = []
|
||||
for i in sorted_list:
|
||||
if not unique_list.count(i):
|
||||
unique_list.append(i)
|
||||
node.setAttribute(name, ";".join(unique_list))
|
||||
if not value:
|
||||
node.removeAttribute(name)
|
||||
|
||||
if node.childNodes:
|
||||
node.normalize()
|
||||
|
||||
# For each node, take a copy, and remove it from the list.
|
||||
node_array = []
|
||||
while node.childNodes and node.childNodes[0]:
|
||||
# Take a copy of the node and remove it from the list.
|
||||
current = node.childNodes[0]
|
||||
node.removeChild(current)
|
||||
|
||||
# If the child is a filter, we want to append all its children
|
||||
# to this same list.
|
||||
if current.nodeName == "Filter":
|
||||
node_array.extend(FlattenFilter(current))
|
||||
else:
|
||||
node_array.append(current)
|
||||
|
||||
# Sort the list.
|
||||
node_array.sort(CmpNode())
|
||||
|
||||
# Insert the nodes in the correct order.
|
||||
for new_node in node_array:
|
||||
# But don't append empty tool node.
|
||||
if new_node.nodeName == "Tool":
|
||||
if new_node.attributes and new_node.attributes.length == 1:
|
||||
# This one was empty.
|
||||
continue
|
||||
if new_node.nodeName == "UserMacro":
|
||||
continue
|
||||
node.appendChild(new_node)
|
||||
|
||||
|
||||
def GetConfiguationNodes(vcproj):
|
||||
# TODO(nsylvain): Find a better way to navigate the xml.
|
||||
nodes = []
|
||||
for node in vcproj.childNodes:
|
||||
if node.nodeName == "Configurations":
|
||||
for sub_node in node.childNodes:
|
||||
if sub_node.nodeName == "Configuration":
|
||||
nodes.append(sub_node)
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def GetChildrenVsprops(filename):
|
||||
dom = parse(filename)
|
||||
if dom.documentElement.attributes:
|
||||
vsprops = dom.documentElement.getAttribute("InheritedPropertySheets")
|
||||
return FixFilenames(vsprops.split(";"), os.path.dirname(filename))
|
||||
return []
|
||||
|
||||
|
||||
def SeekToNode(node1, child2):
|
||||
# A text node does not have properties.
|
||||
if child2.nodeType == Node.TEXT_NODE:
|
||||
return None
|
||||
|
||||
# Get the name of the current node.
|
||||
current_name = child2.getAttribute("Name")
|
||||
if not current_name:
|
||||
# There is no name. We don't know how to merge.
|
||||
return None
|
||||
|
||||
# Look through all the nodes to find a match.
|
||||
for sub_node in node1.childNodes:
|
||||
if sub_node.nodeName == child2.nodeName:
|
||||
name = sub_node.getAttribute("Name")
|
||||
if name == current_name:
|
||||
return sub_node
|
||||
|
||||
# No match. We give up.
|
||||
return None
|
||||
|
||||
|
||||
def MergeAttributes(node1, node2):
|
||||
# No attributes to merge?
|
||||
if not node2.attributes:
|
||||
return
|
||||
|
||||
for (name, value2) in node2.attributes.items():
|
||||
# Don't merge the 'Name' attribute.
|
||||
if name == "Name":
|
||||
continue
|
||||
value1 = node1.getAttribute(name)
|
||||
if value1:
|
||||
# The attribute exist in the main node. If it's equal, we leave it
|
||||
# untouched, otherwise we concatenate it.
|
||||
if value1 != value2:
|
||||
node1.setAttribute(name, ";".join([value1, value2]))
|
||||
else:
|
||||
# The attribute does not exist in the main node. We append this one.
|
||||
node1.setAttribute(name, value2)
|
||||
|
||||
# If the attribute was a property sheet attributes, we remove it, since
|
||||
# they are useless.
|
||||
if name == "InheritedPropertySheets":
|
||||
node1.removeAttribute(name)
|
||||
|
||||
|
||||
def MergeProperties(node1, node2):
|
||||
MergeAttributes(node1, node2)
|
||||
for child2 in node2.childNodes:
|
||||
child1 = SeekToNode(node1, child2)
|
||||
if child1:
|
||||
MergeProperties(child1, child2)
|
||||
else:
|
||||
node1.appendChild(child2.cloneNode(True))
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""Main function of this vcproj prettifier."""
|
||||
global ARGUMENTS
|
||||
ARGUMENTS = argv
|
||||
|
||||
# check if we have exactly 1 parameter.
|
||||
if len(argv) < 2:
|
||||
print(
|
||||
'Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
|
||||
"[key2=value2]" % argv[0]
|
||||
)
|
||||
return 1
|
||||
|
||||
# Parse the keys
|
||||
for i in range(2, len(argv)):
|
||||
(key, value) = argv[i].split("=")
|
||||
REPLACEMENTS[key] = value
|
||||
|
||||
# Open the vcproj and parse the xml.
|
||||
dom = parse(argv[1])
|
||||
|
||||
# First thing we need to do is find the Configuration Node and merge them
|
||||
# with the vsprops they include.
|
||||
for configuration_node in GetConfiguationNodes(dom.documentElement):
|
||||
# Get the property sheets associated with this configuration.
|
||||
vsprops = configuration_node.getAttribute("InheritedPropertySheets")
|
||||
|
||||
# Fix the filenames to be absolute.
|
||||
vsprops_list = FixFilenames(
|
||||
vsprops.strip().split(";"), os.path.dirname(argv[1])
|
||||
)
|
||||
|
||||
# Extend the list of vsprops with all vsprops contained in the current
|
||||
# vsprops.
|
||||
for current_vsprops in vsprops_list:
|
||||
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
|
||||
|
||||
# Now that we have all the vsprops, we need to merge them.
|
||||
for current_vsprops in vsprops_list:
|
||||
MergeProperties(configuration_node, parse(current_vsprops).documentElement)
|
||||
|
||||
# Now that everything is merged, we need to cleanup the xml.
|
||||
CleanupVcproj(dom.documentElement)
|
||||
|
||||
# Finally, we use the prett xml function to print the vcproj back to the
|
||||
# user.
|
||||
# print dom.toprettyxml(newl="\n")
|
||||
PrettyPrintNode(dom.documentElement)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
250
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/Find-VisualStudio.cs
generated
vendored
Normal file
250
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/Find-VisualStudio.cs
generated
vendored
Normal file
@@ -0,0 +1,250 @@
|
||||
// Copyright 2017 - Refael Ackermann
|
||||
// Distributed under MIT style license
|
||||
// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf
|
||||
|
||||
// Usage:
|
||||
// powershell -ExecutionPolicy Unrestricted -Command "Add-Type -Path Find-VisualStudio.cs; [VisualStudioConfiguration.Main]::PrintJson()"
|
||||
// This script needs to be compatible with PowerShell v2 to run on Windows 2008R2 and Windows 7.
|
||||
|
||||
using System;
|
||||
using System.Text;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace VisualStudioConfiguration
|
||||
{
|
||||
[Flags]
|
||||
public enum InstanceState : uint
|
||||
{
|
||||
None = 0,
|
||||
Local = 1,
|
||||
Registered = 2,
|
||||
NoRebootRequired = 4,
|
||||
NoErrors = 8,
|
||||
Complete = 4294967295,
|
||||
}
|
||||
|
||||
[Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface IEnumSetupInstances
|
||||
{
|
||||
|
||||
void Next([MarshalAs(UnmanagedType.U4), In] int celt,
|
||||
[MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt,
|
||||
[MarshalAs(UnmanagedType.U4)] out int pceltFetched);
|
||||
|
||||
void Skip([MarshalAs(UnmanagedType.U4), In] int celt);
|
||||
|
||||
void Reset();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
IEnumSetupInstances Clone();
|
||||
}
|
||||
|
||||
[Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupConfiguration
|
||||
{
|
||||
}
|
||||
|
||||
[Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupConfiguration2 : ISetupConfiguration
|
||||
{
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
IEnumSetupInstances EnumInstances();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
ISetupInstance GetInstanceForCurrentProcess();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
IEnumSetupInstances EnumAllInstances();
|
||||
}
|
||||
|
||||
[Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupInstance
|
||||
{
|
||||
}
|
||||
|
||||
[Guid("89143C9A-05AF-49B0-B717-72E218A2185C")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupInstance2 : ISetupInstance
|
||||
{
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstanceId();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Struct)]
|
||||
System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstallationName();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstallationPath();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstallationVersion();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.U4)]
|
||||
InstanceState GetState();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
|
||||
ISetupPackageReference[] GetPackages();
|
||||
|
||||
ISetupPackageReference GetProduct();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetProductPath();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.VariantBool)]
|
||||
bool IsLaunchable();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.VariantBool)]
|
||||
bool IsComplete();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
|
||||
ISetupPropertyStore GetProperties();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetEnginePath();
|
||||
}
|
||||
|
||||
[Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupPackageReference
|
||||
{
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetId();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetVersion();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetChip();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetLanguage();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetBranch();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetType();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetUniqueId();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.VariantBool)]
|
||||
bool GetIsExtension();
|
||||
}
|
||||
|
||||
[Guid("c601c175-a3be-44bc-91f6-4568d230fc83")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupPropertyStore
|
||||
{
|
||||
|
||||
[return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)]
|
||||
string[] GetNames();
|
||||
|
||||
object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName);
|
||||
}
|
||||
|
||||
[Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
|
||||
[CoClass(typeof(SetupConfigurationClass))]
|
||||
[ComImport]
|
||||
public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration
|
||||
{
|
||||
}
|
||||
|
||||
[Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")]
|
||||
[ClassInterface(ClassInterfaceType.None)]
|
||||
[ComImport]
|
||||
public class SetupConfigurationClass
|
||||
{
|
||||
}
|
||||
|
||||
public static class Main
|
||||
{
|
||||
public static void PrintJson()
|
||||
{
|
||||
ISetupConfiguration query = new SetupConfiguration();
|
||||
ISetupConfiguration2 query2 = (ISetupConfiguration2)query;
|
||||
IEnumSetupInstances e = query2.EnumAllInstances();
|
||||
|
||||
int pceltFetched;
|
||||
ISetupInstance2[] rgelt = new ISetupInstance2[1];
|
||||
List<string> instances = new List<string>();
|
||||
while (true)
|
||||
{
|
||||
e.Next(1, rgelt, out pceltFetched);
|
||||
if (pceltFetched <= 0)
|
||||
{
|
||||
Console.WriteLine(String.Format("[{0}]", string.Join(",", instances.ToArray())));
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
instances.Add(InstanceJson(rgelt[0]));
|
||||
}
|
||||
catch (COMException)
|
||||
{
|
||||
// Ignore instances that can't be queried.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string JsonString(string s)
|
||||
{
|
||||
return "\"" + s.Replace("\\", "\\\\").Replace("\"", "\\\"") + "\"";
|
||||
}
|
||||
|
||||
private static string InstanceJson(ISetupInstance2 setupInstance2)
|
||||
{
|
||||
// Visual Studio component directory:
|
||||
// https://docs.microsoft.com/en-us/visualstudio/install/workload-and-component-ids
|
||||
|
||||
StringBuilder json = new StringBuilder();
|
||||
json.Append("{");
|
||||
|
||||
string path = JsonString(setupInstance2.GetInstallationPath());
|
||||
json.Append(String.Format("\"path\":{0},", path));
|
||||
|
||||
string version = JsonString(setupInstance2.GetInstallationVersion());
|
||||
json.Append(String.Format("\"version\":{0},", version));
|
||||
|
||||
List<string> packages = new List<string>();
|
||||
foreach (ISetupPackageReference package in setupInstance2.GetPackages())
|
||||
{
|
||||
string id = JsonString(package.GetId());
|
||||
packages.Add(id);
|
||||
}
|
||||
json.Append(String.Format("\"packages\":[{0}]", string.Join(",", packages.ToArray())));
|
||||
|
||||
json.Append("}");
|
||||
return json.ToString();
|
||||
}
|
||||
}
|
||||
}
|
||||
213
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/build.js
generated
vendored
Normal file
213
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/build.js
generated
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const glob = require('glob')
|
||||
const log = require('npmlog')
|
||||
const which = require('which')
|
||||
const win = process.platform === 'win32'
|
||||
|
||||
function build (gyp, argv, callback) {
|
||||
var platformMake = 'make'
|
||||
if (process.platform === 'aix') {
|
||||
platformMake = 'gmake'
|
||||
} else if (process.platform === 'os400') {
|
||||
platformMake = 'gmake'
|
||||
} else if (process.platform.indexOf('bsd') !== -1) {
|
||||
platformMake = 'gmake'
|
||||
} else if (win && argv.length > 0) {
|
||||
argv = argv.map(function (target) {
|
||||
return '/t:' + target
|
||||
})
|
||||
}
|
||||
|
||||
var makeCommand = gyp.opts.make || process.env.MAKE || platformMake
|
||||
var command = win ? 'msbuild' : makeCommand
|
||||
var jobs = gyp.opts.jobs || process.env.JOBS
|
||||
var buildType
|
||||
var config
|
||||
var arch
|
||||
var nodeDir
|
||||
var guessedSolution
|
||||
|
||||
loadConfigGypi()
|
||||
|
||||
/**
|
||||
* Load the "config.gypi" file that was generated during "configure".
|
||||
*/
|
||||
|
||||
function loadConfigGypi () {
|
||||
var configPath = path.resolve('build', 'config.gypi')
|
||||
|
||||
fs.readFile(configPath, 'utf8', function (err, data) {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
callback(new Error('You must run `node-gyp configure` first!'))
|
||||
} else {
|
||||
callback(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
config = JSON.parse(data.replace(/#.+\n/, ''))
|
||||
|
||||
// get the 'arch', 'buildType', and 'nodeDir' vars from the config
|
||||
buildType = config.target_defaults.default_configuration
|
||||
arch = config.variables.target_arch
|
||||
nodeDir = config.variables.nodedir
|
||||
|
||||
if ('debug' in gyp.opts) {
|
||||
buildType = gyp.opts.debug ? 'Debug' : 'Release'
|
||||
}
|
||||
if (!buildType) {
|
||||
buildType = 'Release'
|
||||
}
|
||||
|
||||
log.verbose('build type', buildType)
|
||||
log.verbose('architecture', arch)
|
||||
log.verbose('node dev dir', nodeDir)
|
||||
|
||||
if (win) {
|
||||
findSolutionFile()
|
||||
} else {
|
||||
doWhich()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* On Windows, find the first build/*.sln file.
|
||||
*/
|
||||
|
||||
function findSolutionFile () {
|
||||
glob('build/*.sln', function (err, files) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (files.length === 0) {
|
||||
return callback(new Error('Could not find *.sln file. Did you run "configure"?'))
|
||||
}
|
||||
guessedSolution = files[0]
|
||||
log.verbose('found first Solution file', guessedSolution)
|
||||
doWhich()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses node-which to locate the msbuild / make executable.
|
||||
*/
|
||||
|
||||
function doWhich () {
|
||||
// On Windows use msbuild provided by node-gyp configure
|
||||
if (win) {
|
||||
if (!config.variables.msbuild_path) {
|
||||
return callback(new Error(
|
||||
'MSBuild is not set, please run `node-gyp configure`.'))
|
||||
}
|
||||
command = config.variables.msbuild_path
|
||||
log.verbose('using MSBuild:', command)
|
||||
doBuild()
|
||||
return
|
||||
}
|
||||
// First make sure we have the build command in the PATH
|
||||
which(command, function (err, execPath) {
|
||||
if (err) {
|
||||
// Some other error or 'make' not found on Unix, report that to the user
|
||||
callback(err)
|
||||
return
|
||||
}
|
||||
log.verbose('`which` succeeded for `' + command + '`', execPath)
|
||||
doBuild()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Actually spawn the process and compile the module.
|
||||
*/
|
||||
|
||||
function doBuild () {
|
||||
// Enable Verbose build
|
||||
var verbose = log.levels[log.level] <= log.levels.verbose
|
||||
var j
|
||||
|
||||
if (!win && verbose) {
|
||||
argv.push('V=1')
|
||||
}
|
||||
|
||||
if (win && !verbose) {
|
||||
argv.push('/clp:Verbosity=minimal')
|
||||
}
|
||||
|
||||
if (win) {
|
||||
// Turn off the Microsoft logo on Windows
|
||||
argv.push('/nologo')
|
||||
}
|
||||
|
||||
// Specify the build type, Release by default
|
||||
if (win) {
|
||||
// Convert .gypi config target_arch to MSBuild /Platform
|
||||
// Since there are many ways to state '32-bit Intel', default to it.
|
||||
// N.B. msbuild's Condition string equality tests are case-insensitive.
|
||||
var archLower = arch.toLowerCase()
|
||||
var p = archLower === 'x64' ? 'x64'
|
||||
: (archLower === 'arm' ? 'ARM'
|
||||
: (archLower === 'arm64' ? 'ARM64' : 'Win32'))
|
||||
argv.push('/p:Configuration=' + buildType + ';Platform=' + p)
|
||||
if (jobs) {
|
||||
j = parseInt(jobs, 10)
|
||||
if (!isNaN(j) && j > 0) {
|
||||
argv.push('/m:' + j)
|
||||
} else if (jobs.toUpperCase() === 'MAX') {
|
||||
argv.push('/m:' + require('os').cpus().length)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
argv.push('BUILDTYPE=' + buildType)
|
||||
// Invoke the Makefile in the 'build' dir.
|
||||
argv.push('-C')
|
||||
argv.push('build')
|
||||
if (jobs) {
|
||||
j = parseInt(jobs, 10)
|
||||
if (!isNaN(j) && j > 0) {
|
||||
argv.push('--jobs')
|
||||
argv.push(j)
|
||||
} else if (jobs.toUpperCase() === 'MAX') {
|
||||
argv.push('--jobs')
|
||||
argv.push(require('os').cpus().length)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (win) {
|
||||
// did the user specify their own .sln file?
|
||||
var hasSln = argv.some(function (arg) {
|
||||
return path.extname(arg) === '.sln'
|
||||
})
|
||||
if (!hasSln) {
|
||||
argv.unshift(gyp.opts.solution || guessedSolution)
|
||||
}
|
||||
}
|
||||
|
||||
if (!win) {
|
||||
// Add build-time dependency symlinks (such as Python) to PATH
|
||||
const buildBinsDir = path.resolve('build', 'node_gyp_bins')
|
||||
process.env.PATH = `${buildBinsDir}:${process.env.PATH}`
|
||||
log.verbose('bin symlinks', `adding symlinks (such as Python), at "${buildBinsDir}", to PATH`)
|
||||
}
|
||||
|
||||
var proc = gyp.spawn(command, argv)
|
||||
proc.on('exit', onExit)
|
||||
}
|
||||
|
||||
function onExit (code, signal) {
|
||||
if (code !== 0) {
|
||||
return callback(new Error('`' + command + '` failed with exit code: ' + code))
|
||||
}
|
||||
if (signal) {
|
||||
return callback(new Error('`' + command + '` got signal: ' + signal))
|
||||
}
|
||||
callback()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = build
|
||||
module.exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module'
|
||||
15
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/clean.js
generated
vendored
Normal file
15
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/clean.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
'use strict'
|
||||
|
||||
const rm = require('rimraf')
|
||||
const log = require('npmlog')
|
||||
|
||||
function clean (gyp, argv, callback) {
|
||||
// Remove the 'build' dir
|
||||
var buildDir = 'build'
|
||||
|
||||
log.verbose('clean', 'removing "%s" directory', buildDir)
|
||||
rm(buildDir, callback)
|
||||
}
|
||||
|
||||
module.exports = clean
|
||||
module.exports.usage = 'Removes any generated build files and the "out" dir'
|
||||
360
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/configure.js
generated
vendored
Normal file
360
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/configure.js
generated
vendored
Normal file
@@ -0,0 +1,360 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const log = require('npmlog')
|
||||
const os = require('os')
|
||||
const processRelease = require('./process-release')
|
||||
const win = process.platform === 'win32'
|
||||
const findNodeDirectory = require('./find-node-directory')
|
||||
const createConfigGypi = require('./create-config-gypi')
|
||||
const msgFormat = require('util').format
|
||||
var findPython = require('./find-python')
|
||||
if (win) {
|
||||
var findVisualStudio = require('./find-visualstudio')
|
||||
}
|
||||
|
||||
function configure (gyp, argv, callback) {
|
||||
var python
|
||||
var buildDir = path.resolve('build')
|
||||
var buildBinsDir = path.join(buildDir, 'node_gyp_bins')
|
||||
var configNames = ['config.gypi', 'common.gypi']
|
||||
var configs = []
|
||||
var nodeDir
|
||||
var release = processRelease(argv, gyp, process.version, process.release)
|
||||
|
||||
findPython(gyp.opts.python, function (err, found) {
|
||||
if (err) {
|
||||
callback(err)
|
||||
} else {
|
||||
python = found
|
||||
getNodeDir()
|
||||
}
|
||||
})
|
||||
|
||||
function getNodeDir () {
|
||||
// 'python' should be set by now
|
||||
process.env.PYTHON = python
|
||||
|
||||
if (gyp.opts.nodedir) {
|
||||
// --nodedir was specified. use that for the dev files
|
||||
nodeDir = gyp.opts.nodedir.replace(/^~/, os.homedir())
|
||||
|
||||
log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir)
|
||||
createBuildDir()
|
||||
} else {
|
||||
// if no --nodedir specified, ensure node dependencies are installed
|
||||
if ('v' + release.version !== process.version) {
|
||||
// if --target was given, then determine a target version to compile for
|
||||
log.verbose('get node dir', 'compiling against --target node version: %s', release.version)
|
||||
} else {
|
||||
// if no --target was specified then use the current host node version
|
||||
log.verbose('get node dir', 'no --target version specified, falling back to host node version: %s', release.version)
|
||||
}
|
||||
|
||||
if (!release.semver) {
|
||||
// could not parse the version string with semver
|
||||
return callback(new Error('Invalid version number: ' + release.version))
|
||||
}
|
||||
|
||||
// If the tarball option is set, always remove and reinstall the headers
|
||||
// into devdir. Otherwise only install if they're not already there.
|
||||
gyp.opts.ensure = !gyp.opts.tarball
|
||||
|
||||
gyp.commands.install([release.version], function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
log.verbose('get node dir', 'target node version installed:', release.versionDir)
|
||||
nodeDir = path.resolve(gyp.devDir, release.versionDir)
|
||||
createBuildDir()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function createBuildDir () {
|
||||
log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir)
|
||||
|
||||
const deepestBuildDirSubdirectory = win ? buildDir : buildBinsDir
|
||||
fs.mkdir(deepestBuildDirSubdirectory, { recursive: true }, function (err, isNew) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
log.verbose(
|
||||
'build dir', '"build" dir needed to be created?', isNew ? 'Yes' : 'No'
|
||||
)
|
||||
if (win) {
|
||||
findVisualStudio(release.semver, gyp.opts.msvs_version,
|
||||
createConfigFile)
|
||||
} else {
|
||||
createPythonSymlink()
|
||||
createConfigFile()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function createPythonSymlink () {
|
||||
const symlinkDestination = path.join(buildBinsDir, 'python3')
|
||||
|
||||
log.verbose('python symlink', `creating symlink to "${python}" at "${symlinkDestination}"`)
|
||||
|
||||
fs.unlink(symlinkDestination, function (err) {
|
||||
if (err && err.code !== 'ENOENT') {
|
||||
log.verbose('python symlink', 'error when attempting to remove existing symlink')
|
||||
log.verbose('python symlink', err.stack, 'errno: ' + err.errno)
|
||||
}
|
||||
fs.symlink(python, symlinkDestination, function (err) {
|
||||
if (err) {
|
||||
log.verbose('python symlink', 'error when attempting to create Python symlink')
|
||||
log.verbose('python symlink', err.stack, 'errno: ' + err.errno)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function createConfigFile (err, vsInfo) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
if (process.platform === 'win32') {
|
||||
process.env.GYP_MSVS_VERSION = Math.min(vsInfo.versionYear, 2015)
|
||||
process.env.GYP_MSVS_OVERRIDE_PATH = vsInfo.path
|
||||
}
|
||||
createConfigGypi({ gyp, buildDir, nodeDir, vsInfo }).then(configPath => {
|
||||
configs.push(configPath)
|
||||
findConfigs()
|
||||
}).catch(err => {
|
||||
callback(err)
|
||||
})
|
||||
}
|
||||
|
||||
function findConfigs () {
|
||||
var name = configNames.shift()
|
||||
if (!name) {
|
||||
return runGyp()
|
||||
}
|
||||
var fullPath = path.resolve(name)
|
||||
|
||||
log.verbose(name, 'checking for gypi file: %s', fullPath)
|
||||
fs.stat(fullPath, function (err) {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
findConfigs() // check next gypi filename
|
||||
} else {
|
||||
callback(err)
|
||||
}
|
||||
} else {
|
||||
log.verbose(name, 'found gypi file')
|
||||
configs.push(fullPath)
|
||||
findConfigs()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function runGyp (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) {
|
||||
if (win) {
|
||||
log.verbose('gyp', 'gyp format was not specified; forcing "msvs"')
|
||||
// force the 'make' target for non-Windows
|
||||
argv.push('-f', 'msvs')
|
||||
} else {
|
||||
log.verbose('gyp', 'gyp format was not specified; forcing "make"')
|
||||
// force the 'make' target for non-Windows
|
||||
argv.push('-f', 'make')
|
||||
}
|
||||
}
|
||||
|
||||
// include all the ".gypi" files that were found
|
||||
configs.forEach(function (config) {
|
||||
argv.push('-I', config)
|
||||
})
|
||||
|
||||
// For AIX and z/OS we need to set up the path to the exports file
|
||||
// which contains the symbols needed for linking.
|
||||
var nodeExpFile
|
||||
if (process.platform === 'aix' || process.platform === 'os390' || process.platform === 'os400') {
|
||||
var ext = process.platform === 'os390' ? 'x' : 'exp'
|
||||
var nodeRootDir = findNodeDirectory()
|
||||
var candidates
|
||||
|
||||
if (process.platform === 'aix' || process.platform === 'os400') {
|
||||
candidates = [
|
||||
'include/node/node',
|
||||
'out/Release/node',
|
||||
'out/Debug/node',
|
||||
'node'
|
||||
].map(function (file) {
|
||||
return file + '.' + ext
|
||||
})
|
||||
} else {
|
||||
candidates = [
|
||||
'out/Release/lib.target/libnode',
|
||||
'out/Debug/lib.target/libnode',
|
||||
'out/Release/obj.target/libnode',
|
||||
'out/Debug/obj.target/libnode',
|
||||
'lib/libnode'
|
||||
].map(function (file) {
|
||||
return file + '.' + ext
|
||||
})
|
||||
}
|
||||
|
||||
var logprefix = 'find exports file'
|
||||
nodeExpFile = findAccessibleSync(logprefix, nodeRootDir, candidates)
|
||||
if (nodeExpFile !== undefined) {
|
||||
log.verbose(logprefix, 'Found exports file: %s', nodeExpFile)
|
||||
} else {
|
||||
var msg = msgFormat('Could not find node.%s file in %s', ext, nodeRootDir)
|
||||
log.error(logprefix, 'Could not find exports file')
|
||||
return callback(new Error(msg))
|
||||
}
|
||||
}
|
||||
|
||||
// For z/OS we need to set up the path to zoslib include directory,
|
||||
// which contains headers included in v8config.h.
|
||||
var zoslibIncDir
|
||||
if (process.platform === 'os390') {
|
||||
logprefix = "find zoslib's zos-base.h:"
|
||||
let msg
|
||||
var zoslibIncPath = process.env.ZOSLIB_INCLUDES
|
||||
if (zoslibIncPath) {
|
||||
zoslibIncPath = findAccessibleSync(logprefix, zoslibIncPath, ['zos-base.h'])
|
||||
if (zoslibIncPath === undefined) {
|
||||
msg = msgFormat('Could not find zos-base.h file in the directory set ' +
|
||||
'in ZOSLIB_INCLUDES environment variable: %s; set it ' +
|
||||
'to the correct path, or unset it to search %s', process.env.ZOSLIB_INCLUDES, nodeRootDir)
|
||||
}
|
||||
} else {
|
||||
candidates = [
|
||||
'include/node/zoslib/zos-base.h',
|
||||
'include/zoslib/zos-base.h',
|
||||
'zoslib/include/zos-base.h',
|
||||
'install/include/node/zoslib/zos-base.h'
|
||||
]
|
||||
zoslibIncPath = findAccessibleSync(logprefix, nodeRootDir, candidates)
|
||||
if (zoslibIncPath === undefined) {
|
||||
msg = msgFormat('Could not find any of %s in directory %s; set ' +
|
||||
'environmant variable ZOSLIB_INCLUDES to the path ' +
|
||||
'that contains zos-base.h', candidates.toString(), nodeRootDir)
|
||||
}
|
||||
}
|
||||
if (zoslibIncPath !== undefined) {
|
||||
zoslibIncDir = path.dirname(zoslibIncPath)
|
||||
log.verbose(logprefix, "Found zoslib's zos-base.h in: %s", zoslibIncDir)
|
||||
} else if (release.version.split('.')[0] >= 16) {
|
||||
// zoslib is only shipped in Node v16 and above.
|
||||
log.error(logprefix, msg)
|
||||
return callback(new Error(msg))
|
||||
}
|
||||
}
|
||||
|
||||
// this logic ported from the old `gyp_addon` python file
|
||||
var gypScript = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py')
|
||||
var addonGypi = path.resolve(__dirname, '..', 'addon.gypi')
|
||||
var commonGypi = path.resolve(nodeDir, 'include/node/common.gypi')
|
||||
fs.stat(commonGypi, function (err) {
|
||||
if (err) {
|
||||
commonGypi = path.resolve(nodeDir, 'common.gypi')
|
||||
}
|
||||
|
||||
var outputDir = 'build'
|
||||
if (win) {
|
||||
// Windows expects an absolute path
|
||||
outputDir = buildDir
|
||||
}
|
||||
var nodeGypDir = path.resolve(__dirname, '..')
|
||||
|
||||
var nodeLibFile = path.join(nodeDir,
|
||||
!gyp.opts.nodedir ? '<(target_arch)' : '$(Configuration)',
|
||||
release.name + '.lib')
|
||||
|
||||
argv.push('-I', addonGypi)
|
||||
argv.push('-I', commonGypi)
|
||||
argv.push('-Dlibrary=shared_library')
|
||||
argv.push('-Dvisibility=default')
|
||||
argv.push('-Dnode_root_dir=' + nodeDir)
|
||||
if (process.platform === 'aix' || process.platform === 'os390' || process.platform === 'os400') {
|
||||
argv.push('-Dnode_exp_file=' + nodeExpFile)
|
||||
if (process.platform === 'os390' && zoslibIncDir) {
|
||||
argv.push('-Dzoslib_include_dir=' + zoslibIncDir)
|
||||
}
|
||||
}
|
||||
argv.push('-Dnode_gyp_dir=' + nodeGypDir)
|
||||
|
||||
// Do this to keep Cygwin environments happy, else the unescaped '\' gets eaten up,
|
||||
// resulting in bad paths, Ex c:parentFolderfolderanotherFolder instead of c:\parentFolder\folder\anotherFolder
|
||||
if (win) {
|
||||
nodeLibFile = nodeLibFile.replace(/\\/g, '\\\\')
|
||||
}
|
||||
argv.push('-Dnode_lib_file=' + nodeLibFile)
|
||||
argv.push('-Dmodule_root_dir=' + process.cwd())
|
||||
argv.push('-Dnode_engine=' +
|
||||
(gyp.opts.node_engine || process.jsEngine || 'v8'))
|
||||
argv.push('--depth=.')
|
||||
argv.push('--no-parallel')
|
||||
|
||||
// tell gyp to write the Makefile/Solution files into output_dir
|
||||
argv.push('--generator-output', outputDir)
|
||||
|
||||
// tell make to write its output into the same dir
|
||||
argv.push('-Goutput_dir=.')
|
||||
|
||||
// enforce use of the "binding.gyp" file
|
||||
argv.unshift('binding.gyp')
|
||||
|
||||
// execute `gyp` from the current target nodedir
|
||||
argv.unshift(gypScript)
|
||||
|
||||
// make sure python uses files that came with this particular node package
|
||||
var pypath = [path.join(__dirname, '..', 'gyp', 'pylib')]
|
||||
if (process.env.PYTHONPATH) {
|
||||
pypath.push(process.env.PYTHONPATH)
|
||||
}
|
||||
process.env.PYTHONPATH = pypath.join(win ? ';' : ':')
|
||||
|
||||
var cp = gyp.spawn(python, argv)
|
||||
cp.on('exit', onCpExit)
|
||||
})
|
||||
}
|
||||
|
||||
function onCpExit (code) {
|
||||
if (code !== 0) {
|
||||
callback(new Error('`gyp` failed with exit code: ' + code))
|
||||
} else {
|
||||
// we're done
|
||||
callback()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first file or directory from an array of candidates that is
|
||||
* readable by the current user, or undefined if none of the candidates are
|
||||
* readable.
|
||||
*/
|
||||
function findAccessibleSync (logprefix, dir, candidates) {
|
||||
for (var next = 0; next < candidates.length; next++) {
|
||||
var candidate = path.resolve(dir, candidates[next])
|
||||
try {
|
||||
var fd = fs.openSync(candidate, 'r')
|
||||
} catch (e) {
|
||||
// this candidate was not found or not readable, do nothing
|
||||
log.silly(logprefix, 'Could not open %s: %s', candidate, e.message)
|
||||
continue
|
||||
}
|
||||
fs.closeSync(fd)
|
||||
log.silly(logprefix, 'Found readable %s', candidate)
|
||||
return candidate
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
module.exports = configure
|
||||
module.exports.test = {
|
||||
findAccessibleSync: findAccessibleSync
|
||||
}
|
||||
module.exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module'
|
||||
147
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/create-config-gypi.js
generated
vendored
Normal file
147
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/create-config-gypi.js
generated
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const log = require('npmlog')
|
||||
const path = require('path')
|
||||
|
||||
function parseConfigGypi (config) {
|
||||
// translated from tools/js2c.py of Node.js
|
||||
// 1. string comments
|
||||
config = config.replace(/#.*/g, '')
|
||||
// 2. join multiline strings
|
||||
config = config.replace(/'$\s+'/mg, '')
|
||||
// 3. normalize string literals from ' into "
|
||||
config = config.replace(/'/g, '"')
|
||||
return JSON.parse(config)
|
||||
}
|
||||
|
||||
async function getBaseConfigGypi ({ gyp, nodeDir }) {
|
||||
// try reading $nodeDir/include/node/config.gypi first when:
|
||||
// 1. --dist-url or --nodedir is specified
|
||||
// 2. and --force-process-config is not specified
|
||||
const useCustomHeaders = gyp.opts.nodedir || gyp.opts.disturl || gyp.opts['dist-url']
|
||||
const shouldReadConfigGypi = useCustomHeaders && !gyp.opts['force-process-config']
|
||||
if (shouldReadConfigGypi && nodeDir) {
|
||||
try {
|
||||
const baseConfigGypiPath = path.resolve(nodeDir, 'include/node/config.gypi')
|
||||
const baseConfigGypi = await fs.promises.readFile(baseConfigGypiPath)
|
||||
return parseConfigGypi(baseConfigGypi.toString())
|
||||
} catch (err) {
|
||||
log.warn('read config.gypi', err.message)
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to process.config if it is invalid
|
||||
return JSON.parse(JSON.stringify(process.config))
|
||||
}
|
||||
|
||||
async function getCurrentConfigGypi ({ gyp, nodeDir, vsInfo }) {
|
||||
const config = await getBaseConfigGypi({ gyp, nodeDir })
|
||||
if (!config.target_defaults) {
|
||||
config.target_defaults = {}
|
||||
}
|
||||
if (!config.variables) {
|
||||
config.variables = {}
|
||||
}
|
||||
|
||||
const defaults = config.target_defaults
|
||||
const variables = config.variables
|
||||
|
||||
// don't inherit the "defaults" from the base config.gypi.
|
||||
// doing so could cause problems in cases where the `node` executable was
|
||||
// compiled on a different machine (with different lib/include paths) than
|
||||
// the machine where the addon is being built to
|
||||
defaults.cflags = []
|
||||
defaults.defines = []
|
||||
defaults.include_dirs = []
|
||||
defaults.libraries = []
|
||||
|
||||
// set the default_configuration prop
|
||||
if ('debug' in gyp.opts) {
|
||||
defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release'
|
||||
}
|
||||
|
||||
if (!defaults.default_configuration) {
|
||||
defaults.default_configuration = 'Release'
|
||||
}
|
||||
|
||||
// set the target_arch variable
|
||||
variables.target_arch = gyp.opts.arch || process.arch || 'ia32'
|
||||
if (variables.target_arch === 'arm64') {
|
||||
defaults.msvs_configuration_platform = 'ARM64'
|
||||
defaults.xcode_configuration_platform = 'arm64'
|
||||
}
|
||||
|
||||
// set the node development directory
|
||||
variables.nodedir = nodeDir
|
||||
|
||||
// disable -T "thin" static archives by default
|
||||
variables.standalone_static_library = gyp.opts.thin ? 0 : 1
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
defaults.msbuild_toolset = vsInfo.toolset
|
||||
if (vsInfo.sdk) {
|
||||
defaults.msvs_windows_target_platform_version = vsInfo.sdk
|
||||
}
|
||||
if (variables.target_arch === 'arm64') {
|
||||
if (vsInfo.versionMajor > 15 ||
|
||||
(vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) {
|
||||
defaults.msvs_enable_marmasm = 1
|
||||
} else {
|
||||
log.warn('Compiling ARM64 assembly is only available in\n' +
|
||||
'Visual Studio 2017 version 15.9 and above')
|
||||
}
|
||||
}
|
||||
variables.msbuild_path = vsInfo.msBuild
|
||||
}
|
||||
|
||||
// loop through the rest of the opts and add the unknown ones as variables.
|
||||
// this allows for module-specific configure flags like:
|
||||
//
|
||||
// $ node-gyp configure --shared-libxml2
|
||||
Object.keys(gyp.opts).forEach(function (opt) {
|
||||
if (opt === 'argv') {
|
||||
return
|
||||
}
|
||||
if (opt in gyp.configDefs) {
|
||||
return
|
||||
}
|
||||
variables[opt.replace(/-/g, '_')] = gyp.opts[opt]
|
||||
})
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
async function createConfigGypi ({ gyp, buildDir, nodeDir, vsInfo }) {
|
||||
const configFilename = 'config.gypi'
|
||||
const configPath = path.resolve(buildDir, configFilename)
|
||||
|
||||
log.verbose('build/' + configFilename, 'creating config file')
|
||||
|
||||
const config = await getCurrentConfigGypi({ gyp, nodeDir, vsInfo })
|
||||
|
||||
// ensures that any boolean values in config.gypi get stringified
|
||||
function boolsToString (k, v) {
|
||||
if (typeof v === 'boolean') {
|
||||
return String(v)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
log.silly('build/' + configFilename, config)
|
||||
|
||||
// now write out the config.gypi file to the build/ dir
|
||||
const prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step'
|
||||
|
||||
const json = JSON.stringify(config, boolsToString, 2)
|
||||
log.verbose('build/' + configFilename, 'writing out config file: %s', configPath)
|
||||
await fs.promises.writeFile(configPath, [prefix, json, ''].join('\n'))
|
||||
|
||||
return configPath
|
||||
}
|
||||
|
||||
module.exports = createConfigGypi
|
||||
module.exports.test = {
|
||||
parseConfigGypi: parseConfigGypi,
|
||||
getCurrentConfigGypi: getCurrentConfigGypi
|
||||
}
|
||||
63
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/find-node-directory.js
generated
vendored
Normal file
63
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/find-node-directory.js
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const log = require('npmlog')
|
||||
|
||||
function findNodeDirectory (scriptLocation, processObj) {
|
||||
// set dirname and process if not passed in
|
||||
// this facilitates regression tests
|
||||
if (scriptLocation === undefined) {
|
||||
scriptLocation = __dirname
|
||||
}
|
||||
if (processObj === undefined) {
|
||||
processObj = process
|
||||
}
|
||||
|
||||
// Have a look to see what is above us, to try and work out where we are
|
||||
var npmParentDirectory = path.join(scriptLocation, '../../../..')
|
||||
log.verbose('node-gyp root', 'npm_parent_directory is ' +
|
||||
path.basename(npmParentDirectory))
|
||||
var nodeRootDir = ''
|
||||
|
||||
log.verbose('node-gyp root', 'Finding node root directory')
|
||||
if (path.basename(npmParentDirectory) === 'deps') {
|
||||
// We are in a build directory where this script lives in
|
||||
// deps/npm/node_modules/node-gyp/lib
|
||||
nodeRootDir = path.join(npmParentDirectory, '..')
|
||||
log.verbose('node-gyp root', 'in build directory, root = ' +
|
||||
nodeRootDir)
|
||||
} else if (path.basename(npmParentDirectory) === 'node_modules') {
|
||||
// We are in a node install directory where this script lives in
|
||||
// lib/node_modules/npm/node_modules/node-gyp/lib or
|
||||
// node_modules/npm/node_modules/node-gyp/lib depending on the
|
||||
// platform
|
||||
if (processObj.platform === 'win32') {
|
||||
nodeRootDir = path.join(npmParentDirectory, '..')
|
||||
} else {
|
||||
nodeRootDir = path.join(npmParentDirectory, '../..')
|
||||
}
|
||||
log.verbose('node-gyp root', 'in install directory, root = ' +
|
||||
nodeRootDir)
|
||||
} else {
|
||||
// We don't know where we are, try working it out from the location
|
||||
// of the node binary
|
||||
var nodeDir = path.dirname(processObj.execPath)
|
||||
var directoryUp = path.basename(nodeDir)
|
||||
if (directoryUp === 'bin') {
|
||||
nodeRootDir = path.join(nodeDir, '..')
|
||||
} else if (directoryUp === 'Release' || directoryUp === 'Debug') {
|
||||
// If we are a recently built node, and the directory structure
|
||||
// is that of a repository. If we are on Windows then we only need
|
||||
// to go one level up, everything else, two
|
||||
if (processObj.platform === 'win32') {
|
||||
nodeRootDir = path.join(nodeDir, '..')
|
||||
} else {
|
||||
nodeRootDir = path.join(nodeDir, '../..')
|
||||
}
|
||||
}
|
||||
// Else return the default blank, "".
|
||||
}
|
||||
return nodeRootDir
|
||||
}
|
||||
|
||||
module.exports = findNodeDirectory
|
||||
344
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/find-python.js
generated
vendored
Normal file
344
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/find-python.js
generated
vendored
Normal file
@@ -0,0 +1,344 @@
|
||||
'use strict'
|
||||
|
||||
const log = require('npmlog')
|
||||
const semver = require('semver')
|
||||
const cp = require('child_process')
|
||||
const extend = require('util')._extend // eslint-disable-line
|
||||
const win = process.platform === 'win32'
|
||||
const logWithPrefix = require('./util').logWithPrefix
|
||||
|
||||
const systemDrive = process.env.SystemDrive || 'C:'
|
||||
const username = process.env.USERNAME || process.env.USER || getOsUserInfo()
|
||||
const localAppData = process.env.LOCALAPPDATA || `${systemDrive}\\${username}\\AppData\\Local`
|
||||
const foundLocalAppData = process.env.LOCALAPPDATA || username
|
||||
const programFiles = process.env.ProgramW6432 || process.env.ProgramFiles || `${systemDrive}\\Program Files`
|
||||
const programFilesX86 = process.env['ProgramFiles(x86)'] || `${programFiles} (x86)`
|
||||
|
||||
const winDefaultLocationsArray = []
|
||||
for (const majorMinor of ['39', '38', '37', '36']) {
|
||||
if (foundLocalAppData) {
|
||||
winDefaultLocationsArray.push(
|
||||
`${localAppData}\\Programs\\Python\\Python${majorMinor}\\python.exe`,
|
||||
`${programFiles}\\Python${majorMinor}\\python.exe`,
|
||||
`${localAppData}\\Programs\\Python\\Python${majorMinor}-32\\python.exe`,
|
||||
`${programFiles}\\Python${majorMinor}-32\\python.exe`,
|
||||
`${programFilesX86}\\Python${majorMinor}-32\\python.exe`
|
||||
)
|
||||
} else {
|
||||
winDefaultLocationsArray.push(
|
||||
`${programFiles}\\Python${majorMinor}\\python.exe`,
|
||||
`${programFiles}\\Python${majorMinor}-32\\python.exe`,
|
||||
`${programFilesX86}\\Python${majorMinor}-32\\python.exe`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function getOsUserInfo () {
|
||||
try {
|
||||
return require('os').userInfo().username
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
function PythonFinder (configPython, callback) {
|
||||
this.callback = callback
|
||||
this.configPython = configPython
|
||||
this.errorLog = []
|
||||
}
|
||||
|
||||
PythonFinder.prototype = {
|
||||
log: logWithPrefix(log, 'find Python'),
|
||||
argsExecutable: ['-c', 'import sys; print(sys.executable);'],
|
||||
argsVersion: ['-c', 'import sys; print("%s.%s.%s" % sys.version_info[:3]);'],
|
||||
semverRange: '>=3.6.0',
|
||||
|
||||
// These can be overridden for testing:
|
||||
execFile: cp.execFile,
|
||||
env: process.env,
|
||||
win: win,
|
||||
pyLauncher: 'py.exe',
|
||||
winDefaultLocations: winDefaultLocationsArray,
|
||||
|
||||
// Logs a message at verbose level, but also saves it to be displayed later
|
||||
// at error level if an error occurs. This should help diagnose the problem.
|
||||
addLog: function addLog (message) {
|
||||
this.log.verbose(message)
|
||||
this.errorLog.push(message)
|
||||
},
|
||||
|
||||
// Find Python by trying a sequence of possibilities.
|
||||
// Ignore errors, keep trying until Python is found.
|
||||
findPython: function findPython () {
|
||||
const SKIP = 0; const FAIL = 1
|
||||
var toCheck = getChecks.apply(this)
|
||||
|
||||
function getChecks () {
|
||||
if (this.env.NODE_GYP_FORCE_PYTHON) {
|
||||
return [{
|
||||
before: () => {
|
||||
this.addLog(
|
||||
'checking Python explicitly set from NODE_GYP_FORCE_PYTHON')
|
||||
this.addLog('- process.env.NODE_GYP_FORCE_PYTHON is ' +
|
||||
`"${this.env.NODE_GYP_FORCE_PYTHON}"`)
|
||||
},
|
||||
check: this.checkCommand,
|
||||
arg: this.env.NODE_GYP_FORCE_PYTHON
|
||||
}]
|
||||
}
|
||||
|
||||
var checks = [
|
||||
{
|
||||
before: () => {
|
||||
if (!this.configPython) {
|
||||
this.addLog(
|
||||
'Python is not set from command line or npm configuration')
|
||||
return SKIP
|
||||
}
|
||||
this.addLog('checking Python explicitly set from command line or ' +
|
||||
'npm configuration')
|
||||
this.addLog('- "--python=" or "npm config get python" is ' +
|
||||
`"${this.configPython}"`)
|
||||
},
|
||||
check: this.checkCommand,
|
||||
arg: this.configPython
|
||||
},
|
||||
{
|
||||
before: () => {
|
||||
if (!this.env.PYTHON) {
|
||||
this.addLog('Python is not set from environment variable ' +
|
||||
'PYTHON')
|
||||
return SKIP
|
||||
}
|
||||
this.addLog('checking Python explicitly set from environment ' +
|
||||
'variable PYTHON')
|
||||
this.addLog(`- process.env.PYTHON is "${this.env.PYTHON}"`)
|
||||
},
|
||||
check: this.checkCommand,
|
||||
arg: this.env.PYTHON
|
||||
},
|
||||
{
|
||||
before: () => { this.addLog('checking if "python3" can be used') },
|
||||
check: this.checkCommand,
|
||||
arg: 'python3'
|
||||
},
|
||||
{
|
||||
before: () => { this.addLog('checking if "python" can be used') },
|
||||
check: this.checkCommand,
|
||||
arg: 'python'
|
||||
}
|
||||
]
|
||||
|
||||
if (this.win) {
|
||||
for (var i = 0; i < this.winDefaultLocations.length; ++i) {
|
||||
const location = this.winDefaultLocations[i]
|
||||
checks.push({
|
||||
before: () => {
|
||||
this.addLog('checking if Python is ' +
|
||||
`${location}`)
|
||||
},
|
||||
check: this.checkExecPath,
|
||||
arg: location
|
||||
})
|
||||
}
|
||||
checks.push({
|
||||
before: () => {
|
||||
this.addLog(
|
||||
'checking if the py launcher can be used to find Python 3')
|
||||
},
|
||||
check: this.checkPyLauncher
|
||||
})
|
||||
}
|
||||
|
||||
return checks
|
||||
}
|
||||
|
||||
function runChecks (err) {
|
||||
this.log.silly('runChecks: err = %j', (err && err.stack) || err)
|
||||
|
||||
const check = toCheck.shift()
|
||||
if (!check) {
|
||||
return this.fail()
|
||||
}
|
||||
|
||||
const before = check.before.apply(this)
|
||||
if (before === SKIP) {
|
||||
return runChecks.apply(this)
|
||||
}
|
||||
if (before === FAIL) {
|
||||
return this.fail()
|
||||
}
|
||||
|
||||
const args = [runChecks.bind(this)]
|
||||
if (check.arg) {
|
||||
args.unshift(check.arg)
|
||||
}
|
||||
check.check.apply(this, args)
|
||||
}
|
||||
|
||||
runChecks.apply(this)
|
||||
},
|
||||
|
||||
// Check if command is a valid Python to use.
|
||||
// Will exit the Python finder on success.
|
||||
// If on Windows, run in a CMD shell to support BAT/CMD launchers.
|
||||
checkCommand: function checkCommand (command, errorCallback) {
|
||||
var exec = command
|
||||
var args = this.argsExecutable
|
||||
var shell = false
|
||||
if (this.win) {
|
||||
// Arguments have to be manually quoted
|
||||
exec = `"${exec}"`
|
||||
args = args.map(a => `"${a}"`)
|
||||
shell = true
|
||||
}
|
||||
|
||||
this.log.verbose(`- executing "${command}" to get executable path`)
|
||||
this.run(exec, args, shell, function (err, execPath) {
|
||||
// Possible outcomes:
|
||||
// - Error: not in PATH, not executable or execution fails
|
||||
// - Gibberish: the next command to check version will fail
|
||||
// - Absolute path to executable
|
||||
if (err) {
|
||||
this.addLog(`- "${command}" is not in PATH or produced an error`)
|
||||
return errorCallback(err)
|
||||
}
|
||||
this.addLog(`- executable path is "${execPath}"`)
|
||||
this.checkExecPath(execPath, errorCallback)
|
||||
}.bind(this))
|
||||
},
|
||||
|
||||
// Check if the py launcher can find a valid Python to use.
|
||||
// Will exit the Python finder on success.
|
||||
// Distributions of Python on Windows by default install with the "py.exe"
|
||||
// Python launcher which is more likely to exist than the Python executable
|
||||
// being in the $PATH.
|
||||
// Because the Python launcher supports Python 2 and Python 3, we should
|
||||
// explicitly request a Python 3 version. This is done by supplying "-3" as
|
||||
// the first command line argument. Since "py.exe -3" would be an invalid
|
||||
// executable for "execFile", we have to use the launcher to figure out
|
||||
// where the actual "python.exe" executable is located.
|
||||
checkPyLauncher: function checkPyLauncher (errorCallback) {
|
||||
this.log.verbose(
|
||||
`- executing "${this.pyLauncher}" to get Python 3 executable path`)
|
||||
this.run(this.pyLauncher, ['-3', ...this.argsExecutable], false,
|
||||
function (err, execPath) {
|
||||
// Possible outcomes: same as checkCommand
|
||||
if (err) {
|
||||
this.addLog(
|
||||
`- "${this.pyLauncher}" is not in PATH or produced an error`)
|
||||
return errorCallback(err)
|
||||
}
|
||||
this.addLog(`- executable path is "${execPath}"`)
|
||||
this.checkExecPath(execPath, errorCallback)
|
||||
}.bind(this))
|
||||
},
|
||||
|
||||
// Check if a Python executable is the correct version to use.
|
||||
// Will exit the Python finder on success.
|
||||
checkExecPath: function checkExecPath (execPath, errorCallback) {
|
||||
this.log.verbose(`- executing "${execPath}" to get version`)
|
||||
this.run(execPath, this.argsVersion, false, function (err, version) {
|
||||
// Possible outcomes:
|
||||
// - Error: executable can not be run (likely meaning the command wasn't
|
||||
// a Python executable and the previous command produced gibberish)
|
||||
// - Gibberish: somehow the last command produced an executable path,
|
||||
// this will fail when verifying the version
|
||||
// - Version of the Python executable
|
||||
if (err) {
|
||||
this.addLog(`- "${execPath}" could not be run`)
|
||||
return errorCallback(err)
|
||||
}
|
||||
this.addLog(`- version is "${version}"`)
|
||||
|
||||
const range = new semver.Range(this.semverRange)
|
||||
var valid = false
|
||||
try {
|
||||
valid = range.test(version)
|
||||
} catch (err) {
|
||||
this.log.silly('range.test() threw:\n%s', err.stack)
|
||||
this.addLog(`- "${execPath}" does not have a valid version`)
|
||||
this.addLog('- is it a Python executable?')
|
||||
return errorCallback(err)
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
this.addLog(`- version is ${version} - should be ${this.semverRange}`)
|
||||
this.addLog('- THIS VERSION OF PYTHON IS NOT SUPPORTED')
|
||||
return errorCallback(new Error(
|
||||
`Found unsupported Python version ${version}`))
|
||||
}
|
||||
this.succeed(execPath, version)
|
||||
}.bind(this))
|
||||
},
|
||||
|
||||
// Run an executable or shell command, trimming the output.
|
||||
run: function run (exec, args, shell, callback) {
|
||||
var env = extend({}, this.env)
|
||||
env.TERM = 'dumb'
|
||||
const opts = { env: env, shell: shell }
|
||||
|
||||
this.log.silly('execFile: exec = %j', exec)
|
||||
this.log.silly('execFile: args = %j', args)
|
||||
this.log.silly('execFile: opts = %j', opts)
|
||||
try {
|
||||
this.execFile(exec, args, opts, execFileCallback.bind(this))
|
||||
} catch (err) {
|
||||
this.log.silly('execFile: threw:\n%s', err.stack)
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
function execFileCallback (err, stdout, stderr) {
|
||||
this.log.silly('execFile result: err = %j', (err && err.stack) || err)
|
||||
this.log.silly('execFile result: stdout = %j', stdout)
|
||||
this.log.silly('execFile result: stderr = %j', stderr)
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
const execPath = stdout.trim()
|
||||
callback(null, execPath)
|
||||
}
|
||||
},
|
||||
|
||||
succeed: function succeed (execPath, version) {
|
||||
this.log.info(`using Python version ${version} found at "${execPath}"`)
|
||||
process.nextTick(this.callback.bind(null, null, execPath))
|
||||
},
|
||||
|
||||
fail: function fail () {
|
||||
const errorLog = this.errorLog.join('\n')
|
||||
|
||||
const pathExample = this.win ? 'C:\\Path\\To\\python.exe'
|
||||
: '/path/to/pythonexecutable'
|
||||
// For Windows 80 col console, use up to the column before the one marked
|
||||
// with X (total 79 chars including logger prefix, 58 chars usable here):
|
||||
// X
|
||||
const info = [
|
||||
'**********************************************************',
|
||||
'You need to install the latest version of Python.',
|
||||
'Node-gyp should be able to find and use Python. If not,',
|
||||
'you can try one of the following options:',
|
||||
`- Use the switch --python="${pathExample}"`,
|
||||
' (accepted by both node-gyp and npm)',
|
||||
'- Set the environment variable PYTHON',
|
||||
'- Set the npm configuration variable python:',
|
||||
` npm config set python "${pathExample}"`,
|
||||
'For more information consult the documentation at:',
|
||||
'https://github.com/nodejs/node-gyp#installation',
|
||||
'**********************************************************'
|
||||
].join('\n')
|
||||
|
||||
this.log.error(`\n${errorLog}\n\n${info}\n`)
|
||||
process.nextTick(this.callback.bind(null, new Error(
|
||||
'Could not find any Python installation to use')))
|
||||
}
|
||||
}
|
||||
|
||||
function findPython (configPython, callback) {
|
||||
var finder = new PythonFinder(configPython, callback)
|
||||
finder.findPython()
|
||||
}
|
||||
|
||||
module.exports = findPython
|
||||
module.exports.test = {
|
||||
PythonFinder: PythonFinder,
|
||||
findPython: findPython
|
||||
}
|
||||
452
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/find-visualstudio.js
generated
vendored
Normal file
452
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/lib/find-visualstudio.js
generated
vendored
Normal file
@@ -0,0 +1,452 @@
|
||||
'use strict'
|
||||
|
||||
const log = require('npmlog')
|
||||
const execFile = require('child_process').execFile
|
||||
const fs = require('fs')
|
||||
const path = require('path').win32
|
||||
const logWithPrefix = require('./util').logWithPrefix
|
||||
const regSearchKeys = require('./util').regSearchKeys
|
||||
|
||||
function findVisualStudio (nodeSemver, configMsvsVersion, callback) {
|
||||
const finder = new VisualStudioFinder(nodeSemver, configMsvsVersion,
|
||||
callback)
|
||||
finder.findVisualStudio()
|
||||
}
|
||||
|
||||
function VisualStudioFinder (nodeSemver, configMsvsVersion, callback) {
|
||||
this.nodeSemver = nodeSemver
|
||||
this.configMsvsVersion = configMsvsVersion
|
||||
this.callback = callback
|
||||
this.errorLog = []
|
||||
this.validVersions = []
|
||||
}
|
||||
|
||||
VisualStudioFinder.prototype = {
|
||||
log: logWithPrefix(log, 'find VS'),
|
||||
|
||||
regSearchKeys: regSearchKeys,
|
||||
|
||||
// Logs a message at verbose level, but also saves it to be displayed later
|
||||
// at error level if an error occurs. This should help diagnose the problem.
|
||||
addLog: function addLog (message) {
|
||||
this.log.verbose(message)
|
||||
this.errorLog.push(message)
|
||||
},
|
||||
|
||||
findVisualStudio: function findVisualStudio () {
|
||||
this.configVersionYear = null
|
||||
this.configPath = null
|
||||
if (this.configMsvsVersion) {
|
||||
this.addLog('msvs_version was set from command line or npm config')
|
||||
if (this.configMsvsVersion.match(/^\d{4}$/)) {
|
||||
this.configVersionYear = parseInt(this.configMsvsVersion, 10)
|
||||
this.addLog(
|
||||
`- looking for Visual Studio version ${this.configVersionYear}`)
|
||||
} else {
|
||||
this.configPath = path.resolve(this.configMsvsVersion)
|
||||
this.addLog(
|
||||
`- looking for Visual Studio installed in "${this.configPath}"`)
|
||||
}
|
||||
} else {
|
||||
this.addLog('msvs_version not set from command line or npm config')
|
||||
}
|
||||
|
||||
if (process.env.VCINSTALLDIR) {
|
||||
this.envVcInstallDir =
|
||||
path.resolve(process.env.VCINSTALLDIR, '..')
|
||||
this.addLog('running in VS Command Prompt, installation path is:\n' +
|
||||
`"${this.envVcInstallDir}"\n- will only use this version`)
|
||||
} else {
|
||||
this.addLog('VCINSTALLDIR not set, not running in VS Command Prompt')
|
||||
}
|
||||
|
||||
this.findVisualStudio2017OrNewer((info) => {
|
||||
if (info) {
|
||||
return this.succeed(info)
|
||||
}
|
||||
this.findVisualStudio2015((info) => {
|
||||
if (info) {
|
||||
return this.succeed(info)
|
||||
}
|
||||
this.findVisualStudio2013((info) => {
|
||||
if (info) {
|
||||
return this.succeed(info)
|
||||
}
|
||||
this.fail()
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
succeed: function succeed (info) {
|
||||
this.log.info(`using VS${info.versionYear} (${info.version}) found at:` +
|
||||
`\n"${info.path}"` +
|
||||
'\nrun with --verbose for detailed information')
|
||||
process.nextTick(this.callback.bind(null, null, info))
|
||||
},
|
||||
|
||||
fail: function fail () {
|
||||
if (this.configMsvsVersion && this.envVcInstallDir) {
|
||||
this.errorLog.push(
|
||||
'msvs_version does not match this VS Command Prompt or the',
|
||||
'installation cannot be used.')
|
||||
} else if (this.configMsvsVersion) {
|
||||
// If msvs_version was specified but finding VS failed, print what would
|
||||
// have been accepted
|
||||
this.errorLog.push('')
|
||||
if (this.validVersions) {
|
||||
this.errorLog.push('valid versions for msvs_version:')
|
||||
this.validVersions.forEach((version) => {
|
||||
this.errorLog.push(`- "${version}"`)
|
||||
})
|
||||
} else {
|
||||
this.errorLog.push('no valid versions for msvs_version were found')
|
||||
}
|
||||
}
|
||||
|
||||
const errorLog = this.errorLog.join('\n')
|
||||
|
||||
// For Windows 80 col console, use up to the column before the one marked
|
||||
// with X (total 79 chars including logger prefix, 62 chars usable here):
|
||||
// X
|
||||
const infoLog = [
|
||||
'**************************************************************',
|
||||
'You need to install the latest version of Visual Studio',
|
||||
'including the "Desktop development with C++" workload.',
|
||||
'For more information consult the documentation at:',
|
||||
'https://github.com/nodejs/node-gyp#on-windows',
|
||||
'**************************************************************'
|
||||
].join('\n')
|
||||
|
||||
this.log.error(`\n${errorLog}\n\n${infoLog}\n`)
|
||||
process.nextTick(this.callback.bind(null, new Error(
|
||||
'Could not find any Visual Studio installation to use')))
|
||||
},
|
||||
|
||||
// Invoke the PowerShell script to get information about Visual Studio 2017
|
||||
// or newer installations
|
||||
findVisualStudio2017OrNewer: function findVisualStudio2017OrNewer (cb) {
|
||||
var ps = path.join(process.env.SystemRoot, 'System32',
|
||||
'WindowsPowerShell', 'v1.0', 'powershell.exe')
|
||||
var csFile = path.join(__dirname, 'Find-VisualStudio.cs')
|
||||
var psArgs = [
|
||||
'-ExecutionPolicy',
|
||||
'Unrestricted',
|
||||
'-NoProfile',
|
||||
'-Command',
|
||||
'&{Add-Type -Path \'' + csFile + '\';' + '[VisualStudioConfiguration.Main]::PrintJson()}'
|
||||
]
|
||||
|
||||
this.log.silly('Running', ps, psArgs)
|
||||
var child = execFile(ps, psArgs, { encoding: 'utf8' },
|
||||
(err, stdout, stderr) => {
|
||||
this.parseData(err, stdout, stderr, cb)
|
||||
})
|
||||
child.stdin.end()
|
||||
},
|
||||
|
||||
// Parse the output of the PowerShell script and look for an installation
|
||||
// of Visual Studio 2017 or newer to use
|
||||
parseData: function parseData (err, stdout, stderr, cb) {
|
||||
this.log.silly('PS stderr = %j', stderr)
|
||||
|
||||
const failPowershell = () => {
|
||||
this.addLog(
|
||||
'could not use PowerShell to find Visual Studio 2017 or newer, try re-running with \'--loglevel silly\' for more details')
|
||||
cb(null)
|
||||
}
|
||||
|
||||
if (err) {
|
||||
this.log.silly('PS err = %j', err && (err.stack || err))
|
||||
return failPowershell()
|
||||
}
|
||||
|
||||
var vsInfo
|
||||
try {
|
||||
vsInfo = JSON.parse(stdout)
|
||||
} catch (e) {
|
||||
this.log.silly('PS stdout = %j', stdout)
|
||||
this.log.silly(e)
|
||||
return failPowershell()
|
||||
}
|
||||
|
||||
if (!Array.isArray(vsInfo)) {
|
||||
this.log.silly('PS stdout = %j', stdout)
|
||||
return failPowershell()
|
||||
}
|
||||
|
||||
vsInfo = vsInfo.map((info) => {
|
||||
this.log.silly(`processing installation: "${info.path}"`)
|
||||
info.path = path.resolve(info.path)
|
||||
var ret = this.getVersionInfo(info)
|
||||
ret.path = info.path
|
||||
ret.msBuild = this.getMSBuild(info, ret.versionYear)
|
||||
ret.toolset = this.getToolset(info, ret.versionYear)
|
||||
ret.sdk = this.getSDK(info)
|
||||
return ret
|
||||
})
|
||||
this.log.silly('vsInfo:', vsInfo)
|
||||
|
||||
// Remove future versions or errors parsing version number
|
||||
vsInfo = vsInfo.filter((info) => {
|
||||
if (info.versionYear) {
|
||||
return true
|
||||
}
|
||||
this.addLog(`unknown version "${info.version}" found at "${info.path}"`)
|
||||
return false
|
||||
})
|
||||
|
||||
// Sort to place newer versions first
|
||||
vsInfo.sort((a, b) => b.versionYear - a.versionYear)
|
||||
|
||||
for (var i = 0; i < vsInfo.length; ++i) {
|
||||
const info = vsInfo[i]
|
||||
this.addLog(`checking VS${info.versionYear} (${info.version}) found ` +
|
||||
`at:\n"${info.path}"`)
|
||||
|
||||
if (info.msBuild) {
|
||||
this.addLog('- found "Visual Studio C++ core features"')
|
||||
} else {
|
||||
this.addLog('- "Visual Studio C++ core features" missing')
|
||||
continue
|
||||
}
|
||||
|
||||
if (info.toolset) {
|
||||
this.addLog(`- found VC++ toolset: ${info.toolset}`)
|
||||
} else {
|
||||
this.addLog('- missing any VC++ toolset')
|
||||
continue
|
||||
}
|
||||
|
||||
if (info.sdk) {
|
||||
this.addLog(`- found Windows SDK: ${info.sdk}`)
|
||||
} else {
|
||||
this.addLog('- missing any Windows SDK')
|
||||
continue
|
||||
}
|
||||
|
||||
if (!this.checkConfigVersion(info.versionYear, info.path)) {
|
||||
continue
|
||||
}
|
||||
|
||||
return cb(info)
|
||||
}
|
||||
|
||||
this.addLog(
|
||||
'could not find a version of Visual Studio 2017 or newer to use')
|
||||
cb(null)
|
||||
},
|
||||
|
||||
// Helper - process version information
|
||||
getVersionInfo: function getVersionInfo (info) {
|
||||
const match = /^(\d+)\.(\d+)\..*/.exec(info.version)
|
||||
if (!match) {
|
||||
this.log.silly('- failed to parse version:', info.version)
|
||||
return {}
|
||||
}
|
||||
this.log.silly('- version match = %j', match)
|
||||
var ret = {
|
||||
version: info.version,
|
||||
versionMajor: parseInt(match[1], 10),
|
||||
versionMinor: parseInt(match[2], 10)
|
||||
}
|
||||
if (ret.versionMajor === 15) {
|
||||
ret.versionYear = 2017
|
||||
return ret
|
||||
}
|
||||
if (ret.versionMajor === 16) {
|
||||
ret.versionYear = 2019
|
||||
return ret
|
||||
}
|
||||
if (ret.versionMajor === 17) {
|
||||
ret.versionYear = 2022
|
||||
return ret
|
||||
}
|
||||
this.log.silly('- unsupported version:', ret.versionMajor)
|
||||
return {}
|
||||
},
|
||||
|
||||
// Helper - process MSBuild information
|
||||
getMSBuild: function getMSBuild (info, versionYear) {
|
||||
const pkg = 'Microsoft.VisualStudio.VC.MSBuild.Base'
|
||||
const msbuildPath = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'MSBuild.exe')
|
||||
if (info.packages.indexOf(pkg) !== -1) {
|
||||
this.log.silly('- found VC.MSBuild.Base')
|
||||
if (versionYear === 2017) {
|
||||
return path.join(info.path, 'MSBuild', '15.0', 'Bin', 'MSBuild.exe')
|
||||
}
|
||||
if (versionYear === 2019) {
|
||||
return msbuildPath
|
||||
}
|
||||
}
|
||||
// visual studio 2022 don't has msbuild pkg
|
||||
if (fs.existsSync(msbuildPath)) {
|
||||
return msbuildPath
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
// Helper - process toolset information
|
||||
getToolset: function getToolset (info, versionYear) {
|
||||
const pkg = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64'
|
||||
const express = 'Microsoft.VisualStudio.WDExpress'
|
||||
|
||||
if (info.packages.indexOf(pkg) !== -1) {
|
||||
this.log.silly('- found VC.Tools.x86.x64')
|
||||
} else if (info.packages.indexOf(express) !== -1) {
|
||||
this.log.silly('- found Visual Studio Express (looking for toolset)')
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
|
||||
if (versionYear === 2017) {
|
||||
return 'v141'
|
||||
} else if (versionYear === 2019) {
|
||||
return 'v142'
|
||||
} else if (versionYear === 2022) {
|
||||
return 'v143'
|
||||
}
|
||||
this.log.silly('- invalid versionYear:', versionYear)
|
||||
return null
|
||||
},
|
||||
|
||||
// Helper - process Windows SDK information
|
||||
getSDK: function getSDK (info) {
|
||||
const win8SDK = 'Microsoft.VisualStudio.Component.Windows81SDK'
|
||||
const win10SDKPrefix = 'Microsoft.VisualStudio.Component.Windows10SDK.'
|
||||
const win11SDKPrefix = 'Microsoft.VisualStudio.Component.Windows11SDK.'
|
||||
|
||||
var Win10or11SDKVer = 0
|
||||
info.packages.forEach((pkg) => {
|
||||
if (!pkg.startsWith(win10SDKPrefix) && !pkg.startsWith(win11SDKPrefix)) {
|
||||
return
|
||||
}
|
||||
const parts = pkg.split('.')
|
||||
if (parts.length > 5 && parts[5] !== 'Desktop') {
|
||||
this.log.silly('- ignoring non-Desktop Win10/11SDK:', pkg)
|
||||
return
|
||||
}
|
||||
const foundSdkVer = parseInt(parts[4], 10)
|
||||
if (isNaN(foundSdkVer)) {
|
||||
// Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb
|
||||
this.log.silly('- failed to parse Win10/11SDK number:', pkg)
|
||||
return
|
||||
}
|
||||
this.log.silly('- found Win10/11SDK:', foundSdkVer)
|
||||
Win10or11SDKVer = Math.max(Win10or11SDKVer, foundSdkVer)
|
||||
})
|
||||
|
||||
if (Win10or11SDKVer !== 0) {
|
||||
return `10.0.${Win10or11SDKVer}.0`
|
||||
} else if (info.packages.indexOf(win8SDK) !== -1) {
|
||||
this.log.silly('- found Win8SDK')
|
||||
return '8.1'
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
// Find an installation of Visual Studio 2015 to use
|
||||
findVisualStudio2015: function findVisualStudio2015 (cb) {
|
||||
if (this.nodeSemver.major >= 19) {
|
||||
this.addLog(
|
||||
'not looking for VS2015 as it is only supported up to Node.js 18')
|
||||
return cb(null)
|
||||
}
|
||||
return this.findOldVS({
|
||||
version: '14.0',
|
||||
versionMajor: 14,
|
||||
versionMinor: 0,
|
||||
versionYear: 2015,
|
||||
toolset: 'v140'
|
||||
}, cb)
|
||||
},
|
||||
|
||||
// Find an installation of Visual Studio 2013 to use
|
||||
findVisualStudio2013: function findVisualStudio2013 (cb) {
|
||||
if (this.nodeSemver.major >= 9) {
|
||||
this.addLog(
|
||||
'not looking for VS2013 as it is only supported up to Node.js 8')
|
||||
return cb(null)
|
||||
}
|
||||
return this.findOldVS({
|
||||
version: '12.0',
|
||||
versionMajor: 12,
|
||||
versionMinor: 0,
|
||||
versionYear: 2013,
|
||||
toolset: 'v120'
|
||||
}, cb)
|
||||
},
|
||||
|
||||
// Helper - common code for VS2013 and VS2015
|
||||
findOldVS: function findOldVS (info, cb) {
|
||||
const regVC7 = ['HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7',
|
||||
'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7']
|
||||
const regMSBuild = 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions'
|
||||
|
||||
this.addLog(`looking for Visual Studio ${info.versionYear}`)
|
||||
this.regSearchKeys(regVC7, info.version, [], (err, res) => {
|
||||
if (err) {
|
||||
this.addLog('- not found')
|
||||
return cb(null)
|
||||
}
|
||||
|
||||
const vsPath = path.resolve(res, '..')
|
||||
this.addLog(`- found in "${vsPath}"`)
|
||||
|
||||
const msBuildRegOpts = process.arch === 'ia32' ? [] : ['/reg:32']
|
||||
this.regSearchKeys([`${regMSBuild}\\${info.version}`],
|
||||
'MSBuildToolsPath', msBuildRegOpts, (err, res) => {
|
||||
if (err) {
|
||||
this.addLog(
|
||||
'- could not find MSBuild in registry for this version')
|
||||
return cb(null)
|
||||
}
|
||||
|
||||
const msBuild = path.join(res, 'MSBuild.exe')
|
||||
this.addLog(`- MSBuild in "${msBuild}"`)
|
||||
|
||||
if (!this.checkConfigVersion(info.versionYear, vsPath)) {
|
||||
return cb(null)
|
||||
}
|
||||
|
||||
info.path = vsPath
|
||||
info.msBuild = msBuild
|
||||
info.sdk = null
|
||||
cb(info)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
// After finding a usable version of Visual Studio:
|
||||
// - add it to validVersions to be displayed at the end if a specific
|
||||
// version was requested and not found;
|
||||
// - check if this is the version that was requested.
|
||||
// - check if this matches the Visual Studio Command Prompt
|
||||
checkConfigVersion: function checkConfigVersion (versionYear, vsPath) {
|
||||
this.validVersions.push(versionYear)
|
||||
this.validVersions.push(vsPath)
|
||||
|
||||
if (this.configVersionYear && this.configVersionYear !== versionYear) {
|
||||
this.addLog('- msvs_version does not match this version')
|
||||
return false
|
||||
}
|
||||
if (this.configPath &&
|
||||
path.relative(this.configPath, vsPath) !== '') {
|
||||
this.addLog('- msvs_version does not point to this installation')
|
||||
return false
|
||||
}
|
||||
if (this.envVcInstallDir &&
|
||||
path.relative(this.envVcInstallDir, vsPath) !== '') {
|
||||
this.addLog('- does not match this Visual Studio Command Prompt')
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = findVisualStudio
|
||||
module.exports.test = {
|
||||
VisualStudioFinder: VisualStudioFinder,
|
||||
findVisualStudio: findVisualStudio
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user