first docs
This commit is contained in:
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.flake8
generated
vendored
Normal file
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.flake8
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
[flake8]
|
||||
max-complexity = 101
|
||||
max-line-length = 88
|
||||
extend-ignore = E203 # whitespace before ':' to agree with psf/black
|
||||
36
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml
generated
vendored
Normal file
36
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# TODO: Enable os: windows-latest
|
||||
# TODO: Enable pytest --doctest-modules
|
||||
|
||||
name: Python_tests
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
Python_tests:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 8
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest] # , windows-latest]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools
|
||||
pip install --editable ".[dev]"
|
||||
- run: ./gyp -V && ./gyp --version && gyp -V && gyp --version
|
||||
- name: Lint with flake8
|
||||
run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics
|
||||
- name: Test with pytest
|
||||
run: pytest
|
||||
# - name: Run doctests with pytest
|
||||
# run: pytest --doctest-modules
|
||||
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml
generated
vendored
Normal file
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: node-gyp integration
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
integration:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
python: ["3.7", "3.10"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Clone gyp-next
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: gyp-next
|
||||
- name: Clone nodejs/node-gyp
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: nodejs/node-gyp
|
||||
path: node-gyp
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 14.x
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd node-gyp
|
||||
npm install --no-progress
|
||||
- name: Replace gyp in node-gyp
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf node-gyp/gyp
|
||||
cp -r gyp-next node-gyp/gyp
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd node-gyp
|
||||
npm test
|
||||
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml
generated
vendored
Normal file
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Node.js Windows integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-windows:
|
||||
runs-on: windows-2019
|
||||
steps:
|
||||
- name: Clone gyp-next
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: gyp-next
|
||||
- name: Clone nodejs/node
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: nodejs/node
|
||||
path: node
|
||||
- name: Install deps
|
||||
run: choco install nasm
|
||||
- name: Replace gyp in Node.js
|
||||
run: |
|
||||
rm -Recurse node/tools/gyp
|
||||
cp -Recurse gyp-next node/tools/gyp
|
||||
- name: Build Node.js
|
||||
run: |
|
||||
cd node
|
||||
./vcbuild.bat
|
||||
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/release-please.yml
generated
vendored
Normal file
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/.github/workflows/release-please.yml
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: release-please
|
||||
jobs:
|
||||
release-please:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: google-github-actions/release-please-action@v3
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
release-type: python
|
||||
package-name: gyp-next
|
||||
bump-minor-pre-major: true
|
||||
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/AUTHORS
generated
vendored
Normal file
16
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/AUTHORS
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Names should be added to this file like so:
|
||||
# Name or Organization <email address>
|
||||
|
||||
Google Inc. <*@google.com>
|
||||
Bloomberg Finance L.P. <*@bloomberg.net>
|
||||
IBM Inc. <*@*.ibm.com>
|
||||
Yandex LLC <*@yandex-team.ru>
|
||||
|
||||
Steven Knight <knight@baldmt.com>
|
||||
Ryan Norton <rnorton10@gmail.com>
|
||||
David J. Sankel <david@sankelsoftware.com>
|
||||
Eric N. Vander Weele <ericvw@gmail.com>
|
||||
Tom Freudenberg <th.freudenberg@gmail.com>
|
||||
Julien Brianceau <jbriance@cisco.com>
|
||||
Refael Ackermann <refack@gmail.com>
|
||||
Ujjwal Sharma <ryzokuken@disroot.org>
|
||||
233
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CHANGELOG.md
generated
vendored
Normal file
233
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,233 @@
|
||||
# Changelog
|
||||
|
||||
## [0.14.0](https://github.com/nodejs/gyp-next/compare/v0.13.0...v0.14.0) (2022-10-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add command line argument for `gyp --version` ([#164](https://github.com/nodejs/gyp-next/issues/164)) ([5c9f4d0](https://github.com/nodejs/gyp-next/commit/5c9f4d05678dd855e18ed2327219e5d18e5374db))
|
||||
* ninja build for iOS ([#174](https://github.com/nodejs/gyp-next/issues/174)) ([b6f2714](https://github.com/nodejs/gyp-next/commit/b6f271424e0033d7ed54d437706695af2ba7a1bf))
|
||||
* **zos:** support IBM Open XL C/C++ & PL/I compilers on z/OS ([#178](https://github.com/nodejs/gyp-next/issues/178)) ([43a7211](https://github.com/nodejs/gyp-next/commit/43a72110ae3fafb13c9625cc7a969624b27cda47))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* lock windows env ([#163](https://github.com/nodejs/gyp-next/issues/163)) ([44bd0dd](https://github.com/nodejs/gyp-next/commit/44bd0ddc93ea0b5770a44dd326a2e4ae62c21442))
|
||||
* move configuration information into pyproject.toml ([#176](https://github.com/nodejs/gyp-next/issues/176)) ([d69d8ec](https://github.com/nodejs/gyp-next/commit/d69d8ece6dbff7af4f2ea073c9fd170baf8cb7f7))
|
||||
* node.js debugger adds stderr (but exit code is 0) -> shouldn't throw ([#179](https://github.com/nodejs/gyp-next/issues/179)) ([1a457d9](https://github.com/nodejs/gyp-next/commit/1a457d9ed08cfd30c9fa551bc5cf0d90fb583787))
|
||||
|
||||
## [0.13.0](https://www.github.com/nodejs/gyp-next/compare/v0.12.1...v0.13.0) (2022-05-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add PRODUCT_DIR_ABS variable ([#151](https://www.github.com/nodejs/gyp-next/issues/151)) ([80d2626](https://www.github.com/nodejs/gyp-next/commit/80d26263581db829b61b312a7bdb5cc791df7824))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* execvp: printf: Argument list too long ([#147](https://www.github.com/nodejs/gyp-next/issues/147)) ([c4e14f3](https://www.github.com/nodejs/gyp-next/commit/c4e14f301673fadbac3ab7882d0b5f4d02530cb9))
|
||||
|
||||
### [0.12.1](https://www.github.com/nodejs/gyp-next/compare/v0.12.0...v0.12.1) (2022-04-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **msvs:** avoid fixing path for arguments with "=" ([#143](https://www.github.com/nodejs/gyp-next/issues/143)) ([7e8f16e](https://www.github.com/nodejs/gyp-next/commit/7e8f16eb165e042e64bec98fa6c2a0232a42c26b))
|
||||
|
||||
## [0.12.0](https://www.github.com/nodejs/gyp-next/compare/v0.11.0...v0.12.0) (2022-04-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support building shared libraries on z/OS ([#137](https://www.github.com/nodejs/gyp-next/issues/137)) ([293bcfa](https://www.github.com/nodejs/gyp-next/commit/293bcfa4c25c6adb743377adafc45a80fee492c6))
|
||||
|
||||
## [0.11.0](https://www.github.com/nodejs/gyp-next/compare/v0.10.1...v0.11.0) (2022-03-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add proper support for IBM i ([#140](https://www.github.com/nodejs/gyp-next/issues/140)) ([fdda4a3](https://www.github.com/nodejs/gyp-next/commit/fdda4a3038b8a7042ad960ce7a223687c24a21b1))
|
||||
|
||||
### [0.10.1](https://www.github.com/nodejs/gyp-next/compare/v0.10.0...v0.10.1) (2021-11-24)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **make:** only generate makefile for multiple toolsets if requested ([#133](https://www.github.com/nodejs/gyp-next/issues/133)) ([f463a77](https://www.github.com/nodejs/gyp-next/commit/f463a77705973289ea38fec1b244c922ac438e26))
|
||||
|
||||
## [0.10.0](https://www.github.com/nodejs/gyp-next/compare/v0.9.6...v0.10.0) (2021-08-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **msvs:** add support for Visual Studio 2022 ([#124](https://www.github.com/nodejs/gyp-next/issues/124)) ([4bd9215](https://www.github.com/nodejs/gyp-next/commit/4bd9215c44d300f06e916aec1d6327c22b78272d))
|
||||
|
||||
### [0.9.6](https://www.github.com/nodejs/gyp-next/compare/v0.9.5...v0.9.6) (2021-08-23)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* align flake8 test ([#122](https://www.github.com/nodejs/gyp-next/issues/122)) ([f1faa8d](https://www.github.com/nodejs/gyp-next/commit/f1faa8d3081e1a47e917ff910892f00dff16cf8a))
|
||||
* **msvs:** fix paths again in action command arguments ([#121](https://www.github.com/nodejs/gyp-next/issues/121)) ([7159dfb](https://www.github.com/nodejs/gyp-next/commit/7159dfbc5758c9ec717e215f2c36daf482c846a1))
|
||||
|
||||
### [0.9.5](https://www.github.com/nodejs/gyp-next/compare/v0.9.4...v0.9.5) (2021-08-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add python 3.6 to node-gyp integration test ([3462d4c](https://www.github.com/nodejs/gyp-next/commit/3462d4ce3c31cce747513dc7ca9760c81d57c60e))
|
||||
* revert for windows compatibility ([d078e7d](https://www.github.com/nodejs/gyp-next/commit/d078e7d7ae080ddae243188f6415f940376a7368))
|
||||
* support msvs_quote_cmd in ninja generator ([#117](https://www.github.com/nodejs/gyp-next/issues/117)) ([46486ac](https://www.github.com/nodejs/gyp-next/commit/46486ac6e9329529d51061e006a5b39631e46729))
|
||||
|
||||
### [0.9.4](https://www.github.com/nodejs/gyp-next/compare/v0.9.3...v0.9.4) (2021-08-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* .S is an extension for asm file on Windows ([#115](https://www.github.com/nodejs/gyp-next/issues/115)) ([d2fad44](https://www.github.com/nodejs/gyp-next/commit/d2fad44ef3a79ca8900f1307060153ded57053fc))
|
||||
|
||||
### [0.9.3](https://www.github.com/nodejs/gyp-next/compare/v0.9.2...v0.9.3) (2021-07-07)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* build failure with ninja and Python 3 on Windows ([#113](https://www.github.com/nodejs/gyp-next/issues/113)) ([c172d10](https://www.github.com/nodejs/gyp-next/commit/c172d105deff5db4244e583942215918fa80dd3c))
|
||||
|
||||
### [0.9.2](https://www.github.com/nodejs/gyp-next/compare/v0.9.1...v0.9.2) (2021-05-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add support of utf8 encoding ([#105](https://www.github.com/nodejs/gyp-next/issues/105)) ([4d0f93c](https://www.github.com/nodejs/gyp-next/commit/4d0f93c249286d1f0c0f665f5fe7346119f98cf1))
|
||||
|
||||
### [0.9.1](https://www.github.com/nodejs/gyp-next/compare/v0.9.0...v0.9.1) (2021-05-14)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* py lint ([3b6a8ee](https://www.github.com/nodejs/gyp-next/commit/3b6a8ee7a66193a8a6867eba9e1d2b70bdf04402))
|
||||
|
||||
## [0.9.0](https://www.github.com/nodejs/gyp-next/compare/v0.8.1...v0.9.0) (2021-05-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* use LDFLAGS_host for host toolset ([#98](https://www.github.com/nodejs/gyp-next/issues/98)) ([bea5c7b](https://www.github.com/nodejs/gyp-next/commit/bea5c7bd67d6ad32acbdce79767a5481c70675a2))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* msvs.py: remove overindentation ([#102](https://www.github.com/nodejs/gyp-next/issues/102)) ([3f83e99](https://www.github.com/nodejs/gyp-next/commit/3f83e99056d004d9579ceb786e06b624ddc36529))
|
||||
* update gyp.el to change case to cl-case ([#93](https://www.github.com/nodejs/gyp-next/issues/93)) ([13d5b66](https://www.github.com/nodejs/gyp-next/commit/13d5b66aab35985af9c2fb1174fdc6e1c1407ecc))
|
||||
|
||||
### [0.8.1](https://www.github.com/nodejs/gyp-next/compare/v0.8.0...v0.8.1) (2021-02-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update shebang lines from python to python3 ([#94](https://www.github.com/nodejs/gyp-next/issues/94)) ([a1b0d41](https://www.github.com/nodejs/gyp-next/commit/a1b0d4171a8049a4ab7a614202063dec332f2df4))
|
||||
|
||||
## [0.8.0](https://www.github.com/nodejs/gyp-next/compare/v0.7.0...v0.8.0) (2021-01-15)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* remove support for Python 2
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* revert posix build job ([#86](https://www.github.com/nodejs/gyp-next/issues/86)) ([39dc34f](https://www.github.com/nodejs/gyp-next/commit/39dc34f0799c074624005fb9bbccf6e028607f9d))
|
||||
|
||||
|
||||
### gyp
|
||||
|
||||
* Remove support for Python 2 ([#88](https://www.github.com/nodejs/gyp-next/issues/88)) ([22e4654](https://www.github.com/nodejs/gyp-next/commit/22e465426fd892403c95534229af819a99c3f8dc))
|
||||
|
||||
## [0.7.0](https://www.github.com/nodejs/gyp-next/compare/v0.6.2...v0.7.0) (2020-12-17)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **msvs:** On Windows, arguments passed to the "action" commands are no longer transformed to replace slashes with backslashes.
|
||||
|
||||
### Features
|
||||
|
||||
* **xcode:** --cross-compiling overrides arch-specific settings ([973bae0](https://www.github.com/nodejs/gyp-next/commit/973bae0b7b08be7b680ecae9565fbd04b3e0787d))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **msvs:** do not fix paths in action command arguments ([fc22f83](https://www.github.com/nodejs/gyp-next/commit/fc22f8335e2016da4aae4f4233074bd651d2faea))
|
||||
* cmake on python 3 ([fd61f5f](https://www.github.com/nodejs/gyp-next/commit/fd61f5faa5275ec8fc98e3c7868c0dd46f109540))
|
||||
* ValueError: invalid mode: 'rU' while trying to load binding.gyp ([d0504e6](https://www.github.com/nodejs/gyp-next/commit/d0504e6700ce48f44957a4d5891b142a60be946f))
|
||||
* xcode cmake parsing ([eefe8d1](https://www.github.com/nodejs/gyp-next/commit/eefe8d10e99863bc4ac7e2ed32facd608d400d4b))
|
||||
|
||||
### [0.6.2](https://www.github.com/nodejs/gyp-next/compare/v0.6.1...v0.6.2) (2020-10-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not rewrite absolute paths to avoid long paths ([#74](https://www.github.com/nodejs/gyp-next/issues/74)) ([c2ccc1a](https://www.github.com/nodejs/gyp-next/commit/c2ccc1a81f7f94433a94f4d01a2e820db4c4331a))
|
||||
* only include MARMASM when toolset is target ([5a2794a](https://www.github.com/nodejs/gyp-next/commit/5a2794aefb58f0c00404ff042b61740bc8b8d5cd))
|
||||
|
||||
### [0.6.1](https://github.com/nodejs/gyp-next/compare/v0.6.0...v0.6.1) (2020-10-14)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Correctly rename object files for absolute paths in MSVS generator.
|
||||
|
||||
## [0.6.0](https://github.com/nodejs/gyp-next/compare/v0.5.0...v0.6.0) (2020-10-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* The Makefile generator will now output shared libraries directly to the product directory on all platforms (previously only macOS).
|
||||
|
||||
## [0.5.0](https://github.com/nodejs/gyp-next/compare/v0.4.0...v0.5.0) (2020-09-30)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Extended compile_commands_json generator to consider more file extensions than just `c` and `cc`. `cpp` and `cxx` are now supported.
|
||||
* Source files with duplicate basenames are now supported.
|
||||
|
||||
### Removed
|
||||
|
||||
* The `--no-duplicate-basename-check` option was removed.
|
||||
* The `msvs_enable_marmasm` configuration option was removed in favor of auto-inclusion of the "marmasm" sections for Windows on ARM.
|
||||
|
||||
## [0.4.0](https://github.com/nodejs/gyp-next/compare/v0.3.0...v0.4.0) (2020-07-14)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Added support for passing arbitrary architectures to Xcode builds, enables `arm64` builds.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixed a bug on Solaris where copying archives failed.
|
||||
|
||||
## [0.3.0](https://github.com/nodejs/gyp-next/compare/v0.2.1...v0.3.0) (2020-06-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Added support for MSVC cross-compilation. This allows compilation on x64 for a Windows ARM target.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixed XCode CLT version detection on macOS Catalina.
|
||||
|
||||
### [0.2.1](https://github.com/nodejs/gyp-next/compare/v0.2.0...v0.2.1) (2020-05-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Relicensed to Node.js contributors.
|
||||
* Fixed Windows bug introduced in v0.2.0.
|
||||
|
||||
## [0.2.0](https://github.com/nodejs/gyp-next/releases/tag/v0.2.0) (2020-04-06)
|
||||
|
||||
This is the first release of this project, based on https://chromium.googlesource.com/external/gyp with changes made over the years in Node.js and node-gyp.
|
||||
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
generated
vendored
Normal file
4
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Code of Conduct
|
||||
|
||||
* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md)
|
||||
* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/HEAD/Moderation-Policy.md)
|
||||
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CONTRIBUTING.md
generated
vendored
Normal file
32
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# Contributing to gyp-next
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md).
|
||||
|
||||
<a id="developers-certificate-of-origin"></a>
|
||||
## Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
* (a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
* (b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
* (c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
* (d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
28
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Normal file
28
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
Copyright (c) 2020 Node.js contributors. All rights reserved.
|
||||
Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/README.md
generated
vendored
Normal file
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/README.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
GYP can Generate Your Projects.
|
||||
===================================
|
||||
|
||||
Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
|
||||
|
||||
__gyp-next__ is [released](https://github.com/nodejs/gyp-next/releases) to the [__Python Packaging Index__](https://pypi.org/project/gyp-next) (PyPI) and can be installed with the command:
|
||||
* `python3 -m pip install gyp-next`
|
||||
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Normal file
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
// Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
|
||||
// then used during the final link for modules that have large PDBs. Otherwise,
|
||||
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
|
||||
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
|
||||
// (rather than the linker), this limit is avoided. With this in place PDBs may
|
||||
// grow to 2GB.
|
||||
//
|
||||
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
|
||||
8
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp
generated
vendored
Executable file
8
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp
generated
vendored
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
set -e
|
||||
base=$(dirname "$0")
|
||||
exec python "${base}/gyp_main.py" "$@"
|
||||
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Executable file
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Executable file
@@ -0,0 +1,5 @@
|
||||
@rem Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
@rem Use of this source code is governed by a BSD-style license that can be
|
||||
@rem found in the LICENSE file.
|
||||
|
||||
@python "%~dp0gyp_main.py" %*
|
||||
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Executable file
45
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
# Function copied from pylib/gyp/common.py
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return "CYGWIN" in stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def UnixifyPath(path):
|
||||
try:
|
||||
if not IsCygwin():
|
||||
return path
|
||||
out = subprocess.Popen(
|
||||
["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return path
|
||||
|
||||
|
||||
# Make sure we're using the version of pylib in this repo, not one installed
|
||||
# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
|
||||
# else the 'gyp' library will not be found
|
||||
path = UnixifyPath(sys.argv[0])
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib"))
|
||||
import gyp # noqa: E402
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(gyp.script_main())
|
||||
367
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Normal file
367
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Normal file
@@ -0,0 +1,367 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""New implementation of Visual Studio project generation."""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import random
|
||||
from operator import attrgetter
|
||||
|
||||
import gyp.common
|
||||
|
||||
|
||||
def cmp(x, y):
|
||||
return (x > y) - (x < y)
|
||||
|
||||
|
||||
# Initialize random number generator
|
||||
random.seed()
|
||||
|
||||
# GUIDs for project types
|
||||
ENTRY_TYPE_GUIDS = {
|
||||
"project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
|
||||
"folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}",
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Helper functions
|
||||
|
||||
|
||||
def MakeGuid(name, seed="msvs_new"):
|
||||
"""Returns a GUID for the specified target name.
|
||||
|
||||
Args:
|
||||
name: Target name.
|
||||
seed: Seed for MD5 hash.
|
||||
Returns:
|
||||
A GUID-line string calculated from the name and seed.
|
||||
|
||||
This generates something which looks like a GUID, but depends only on the
|
||||
name and seed. This means the same name/seed will always generate the same
|
||||
GUID, so that projects and solutions which refer to each other can explicitly
|
||||
determine the GUID to refer to explicitly. It also means that the GUID will
|
||||
not change when the project for a target is rebuilt.
|
||||
"""
|
||||
# Calculate a MD5 signature for the seed and name.
|
||||
d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
|
||||
# Convert most of the signature to GUID form (discard the rest)
|
||||
guid = (
|
||||
"{"
|
||||
+ d[:8]
|
||||
+ "-"
|
||||
+ d[8:12]
|
||||
+ "-"
|
||||
+ d[12:16]
|
||||
+ "-"
|
||||
+ d[16:20]
|
||||
+ "-"
|
||||
+ d[20:32]
|
||||
+ "}"
|
||||
)
|
||||
return guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolutionEntry:
|
||||
def __cmp__(self, other):
|
||||
# Sort by name then guid (so things are in order on vs2008).
|
||||
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
|
||||
|
||||
|
||||
class MSVSFolder(MSVSSolutionEntry):
|
||||
"""Folder in a Visual Studio project or solution."""
|
||||
|
||||
def __init__(self, path, name=None, entries=None, guid=None, items=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
path: Full path to the folder.
|
||||
name: Name of the folder.
|
||||
entries: List of folder entries to nest inside this folder. May contain
|
||||
Folder or Project objects. May be None, if the folder is empty.
|
||||
guid: GUID to use for folder, if not None.
|
||||
items: List of solution items to include in the folder project. May be
|
||||
None, if the folder does not directly contain items.
|
||||
"""
|
||||
if name:
|
||||
self.name = name
|
||||
else:
|
||||
# Use last layer.
|
||||
self.name = os.path.basename(path)
|
||||
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = sorted(entries or [], key=attrgetter("path"))
|
||||
self.items = list(items or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"]
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Use consistent guids for folders (so things don't regenerate).
|
||||
self.guid = MakeGuid(self.path, seed="msvs_folder")
|
||||
return self.guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSProject(MSVSSolutionEntry):
|
||||
"""Visual Studio project."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path,
|
||||
name=None,
|
||||
dependencies=None,
|
||||
guid=None,
|
||||
spec=None,
|
||||
build_file=None,
|
||||
config_platform_overrides=None,
|
||||
fixpath_prefix=None,
|
||||
):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
path: Absolute path to the project file.
|
||||
name: Name of project. If None, the name will be the same as the base
|
||||
name of the project file.
|
||||
dependencies: List of other Project objects this project is dependent
|
||||
upon, if not None.
|
||||
guid: GUID to use for project, if not None.
|
||||
spec: Dictionary specifying how to build this project.
|
||||
build_file: Filename of the .gyp file that the vcproj file comes from.
|
||||
config_platform_overrides: optional dict of configuration platforms to
|
||||
used in place of the default for this target.
|
||||
fixpath_prefix: the path used to adjust the behavior of _fixpath
|
||||
"""
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
self.spec = spec
|
||||
self.build_file = build_file
|
||||
# Use project filename if name not specified
|
||||
self.name = name or os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["project"]
|
||||
|
||||
if config_platform_overrides:
|
||||
self.config_platform_overrides = config_platform_overrides
|
||||
else:
|
||||
self.config_platform_overrides = {}
|
||||
self.fixpath_prefix = fixpath_prefix
|
||||
self.msbuild_toolset = None
|
||||
|
||||
def set_dependencies(self, dependencies):
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Set GUID from path
|
||||
# TODO(rspangler): This is fragile.
|
||||
# 1. We can't just use the project filename sans path, since there could
|
||||
# be multiple projects with the same base name (for example,
|
||||
# foo/unittest.vcproj and bar/unittest.vcproj).
|
||||
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
|
||||
# GUID is the same whether it's included from base/base.sln or
|
||||
# foo/bar/baz/baz.sln.
|
||||
# 3. The GUID needs to be the same each time this builder is invoked, so
|
||||
# that we don't need to rebuild the solution when the project changes.
|
||||
# 4. We should be able to handle pre-built project files by reading the
|
||||
# GUID from the files.
|
||||
self.guid = MakeGuid(self.name)
|
||||
return self.guid
|
||||
|
||||
def set_msbuild_toolset(self, msbuild_toolset):
|
||||
self.msbuild_toolset = msbuild_toolset
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolution:
|
||||
"""Visual Studio solution."""
|
||||
|
||||
def __init__(
|
||||
self, path, version, entries=None, variants=None, websiteProperties=True
|
||||
):
|
||||
"""Initializes the solution.
|
||||
|
||||
Args:
|
||||
path: Path to solution file.
|
||||
version: Format version to emit.
|
||||
entries: List of entries in solution. May contain Folder or Project
|
||||
objects. May be None, if the folder is empty.
|
||||
variants: List of build variant strings. If none, a default list will
|
||||
be used.
|
||||
websiteProperties: Flag to decide if the website properties section
|
||||
is generated.
|
||||
"""
|
||||
self.path = path
|
||||
self.websiteProperties = websiteProperties
|
||||
self.version = version
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = list(entries or [])
|
||||
|
||||
if variants:
|
||||
# Copy passed list
|
||||
self.variants = variants[:]
|
||||
else:
|
||||
# Use default
|
||||
self.variants = ["Debug|Win32", "Release|Win32"]
|
||||
# TODO(rspangler): Need to be able to handle a mapping of solution config
|
||||
# to project config. Should we be able to handle variants being a dict,
|
||||
# or add a separate variant_map variable? If it's a dict, we can't
|
||||
# guarantee the order of variants since dict keys aren't ordered.
|
||||
|
||||
# TODO(rspangler): Automatically write to disk for now; should delay until
|
||||
# node-evaluation time.
|
||||
self.Write()
|
||||
|
||||
def Write(self, writer=gyp.common.WriteOnDiff):
|
||||
"""Writes the solution file to disk.
|
||||
|
||||
Raises:
|
||||
IndexError: An entry appears multiple times.
|
||||
"""
|
||||
# Walk the entry tree and collect all the folders and projects.
|
||||
all_entries = set()
|
||||
entries_to_check = self.entries[:]
|
||||
while entries_to_check:
|
||||
e = entries_to_check.pop(0)
|
||||
|
||||
# If this entry has been visited, nothing to do.
|
||||
if e in all_entries:
|
||||
continue
|
||||
|
||||
all_entries.add(e)
|
||||
|
||||
# If this is a folder, check its entries too.
|
||||
if isinstance(e, MSVSFolder):
|
||||
entries_to_check += e.entries
|
||||
|
||||
all_entries = sorted(all_entries, key=attrgetter("path"))
|
||||
|
||||
# Open file and print header
|
||||
f = writer(self.path)
|
||||
f.write(
|
||||
"Microsoft Visual Studio Solution File, "
|
||||
"Format Version %s\r\n" % self.version.SolutionVersion()
|
||||
)
|
||||
f.write("# %s\r\n" % self.version.Description())
|
||||
|
||||
# Project entries
|
||||
sln_root = os.path.split(self.path)[0]
|
||||
for e in all_entries:
|
||||
relative_path = gyp.common.RelativePath(e.path, sln_root)
|
||||
# msbuild does not accept an empty folder_name.
|
||||
# use '.' in case relative_path is empty.
|
||||
folder_name = relative_path.replace("/", "\\") or "."
|
||||
f.write(
|
||||
'Project("%s") = "%s", "%s", "%s"\r\n'
|
||||
% (
|
||||
e.entry_type_guid, # Entry type GUID
|
||||
e.name, # Folder name
|
||||
folder_name, # Folder name (again)
|
||||
e.get_guid(), # Entry GUID
|
||||
)
|
||||
)
|
||||
|
||||
# TODO(rspangler): Need a way to configure this stuff
|
||||
if self.websiteProperties:
|
||||
f.write(
|
||||
"\tProjectSection(WebsiteProperties) = preProject\r\n"
|
||||
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
|
||||
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
|
||||
"\tEndProjectSection\r\n"
|
||||
)
|
||||
|
||||
if isinstance(e, MSVSFolder):
|
||||
if e.items:
|
||||
f.write("\tProjectSection(SolutionItems) = preProject\r\n")
|
||||
for i in e.items:
|
||||
f.write(f"\t\t{i} = {i}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
if isinstance(e, MSVSProject):
|
||||
if e.dependencies:
|
||||
f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
|
||||
for d in e.dependencies:
|
||||
f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
f.write("EndProject\r\n")
|
||||
|
||||
# Global section
|
||||
f.write("Global\r\n")
|
||||
|
||||
# Configurations (variants)
|
||||
f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
|
||||
for v in self.variants:
|
||||
f.write(f"\t\t{v} = {v}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Sort config guids for easier diffing of solution changes.
|
||||
config_guids = []
|
||||
config_guids_overrides = {}
|
||||
for e in all_entries:
|
||||
if isinstance(e, MSVSProject):
|
||||
config_guids.append(e.get_guid())
|
||||
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
|
||||
config_guids.sort()
|
||||
|
||||
f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
|
||||
for g in config_guids:
|
||||
for v in self.variants:
|
||||
nv = config_guids_overrides[g].get(v, v)
|
||||
# Pick which project configuration to build for this solution
|
||||
# configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.ActiveCfg = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
|
||||
# Enable project in this solution configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.Build.0 = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# TODO(rspangler): Should be able to configure this stuff too (though I've
|
||||
# never seen this be any different)
|
||||
f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
|
||||
f.write("\t\tHideSolutionNode = FALSE\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Folder mappings
|
||||
# Omit this section if there are no folders
|
||||
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
|
||||
f.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
|
||||
for e in all_entries:
|
||||
if not isinstance(e, MSVSFolder):
|
||||
continue # Does not apply to projects, only folders
|
||||
for subentry in e.entries:
|
||||
f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
f.write("EndGlobal\r\n")
|
||||
|
||||
f.close()
|
||||
206
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Normal file
206
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Normal file
@@ -0,0 +1,206 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Tool:
|
||||
"""Visual Studio tool."""
|
||||
|
||||
def __init__(self, name, attrs=None):
|
||||
"""Initializes the tool.
|
||||
|
||||
Args:
|
||||
name: Tool name.
|
||||
attrs: Dict of tool attributes; may be None.
|
||||
"""
|
||||
self._attrs = attrs or {}
|
||||
self._attrs["Name"] = name
|
||||
|
||||
def _GetSpecification(self):
|
||||
"""Creates an element for the tool.
|
||||
|
||||
Returns:
|
||||
A new xml.dom.Element for the tool.
|
||||
"""
|
||||
return ["Tool", self._attrs]
|
||||
|
||||
|
||||
class Filter:
|
||||
"""Visual Studio filter - that is, a virtual folder."""
|
||||
|
||||
def __init__(self, name, contents=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
name: Filter (folder) name.
|
||||
contents: List of filenames and/or Filter objects contained.
|
||||
"""
|
||||
self.name = name
|
||||
self.contents = list(contents or [])
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML project writer."""
|
||||
|
||||
def __init__(self, project_path, version, name, guid=None, platforms=None):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
project_path: Path to the project file.
|
||||
version: Format version to emit.
|
||||
name: Name of the project.
|
||||
guid: GUID to use for project, if not None.
|
||||
platforms: Array of string, the supported platforms. If null, ['Win32']
|
||||
"""
|
||||
self.project_path = project_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.guid = guid
|
||||
|
||||
# Default to Win32 for platforms.
|
||||
if not platforms:
|
||||
platforms = ["Win32"]
|
||||
|
||||
# Initialize the specifications of the various sections.
|
||||
self.platform_section = ["Platforms"]
|
||||
for platform in platforms:
|
||||
self.platform_section.append(["Platform", {"Name": platform}])
|
||||
self.tool_files_section = ["ToolFiles"]
|
||||
self.configurations_section = ["Configurations"]
|
||||
self.files_section = ["Files"]
|
||||
|
||||
# Keep a dict keyed on filename to speed up access.
|
||||
self.files_dict = dict()
|
||||
|
||||
def AddToolFile(self, path):
|
||||
"""Adds a tool file to the project.
|
||||
|
||||
Args:
|
||||
path: Relative path from project to tool file.
|
||||
"""
|
||||
self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
|
||||
|
||||
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
|
||||
"""Returns the specification for a configuration.
|
||||
|
||||
Args:
|
||||
config_type: Type of configuration node.
|
||||
config_name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
Returns:
|
||||
"""
|
||||
# Handle defaults
|
||||
if not attrs:
|
||||
attrs = {}
|
||||
if not tools:
|
||||
tools = []
|
||||
|
||||
# Add configuration node and its attributes
|
||||
node_attrs = attrs.copy()
|
||||
node_attrs["Name"] = config_name
|
||||
specification = [config_type, node_attrs]
|
||||
|
||||
# Add tool nodes and their attributes
|
||||
if tools:
|
||||
for t in tools:
|
||||
if isinstance(t, Tool):
|
||||
specification.append(t._GetSpecification())
|
||||
else:
|
||||
specification.append(Tool(t)._GetSpecification())
|
||||
return specification
|
||||
|
||||
def AddConfig(self, name, attrs=None, tools=None):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
"""
|
||||
spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
|
||||
self.configurations_section.append(spec)
|
||||
|
||||
def _AddFilesToNode(self, parent, files):
|
||||
"""Adds files and/or filters to the parent node.
|
||||
|
||||
Args:
|
||||
parent: Destination node
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
Will call itself recursively, if the files list contains Filter objects.
|
||||
"""
|
||||
for f in files:
|
||||
if isinstance(f, Filter):
|
||||
node = ["Filter", {"Name": f.name}]
|
||||
self._AddFilesToNode(node, f.contents)
|
||||
else:
|
||||
node = ["File", {"RelativePath": f}]
|
||||
self.files_dict[f] = node
|
||||
parent.append(node)
|
||||
|
||||
def AddFiles(self, files):
|
||||
"""Adds files to the project.
|
||||
|
||||
Args:
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
This makes a copy of the file/filter tree at the time of this call. If you
|
||||
later add files to a Filter object which was passed into a previous call
|
||||
to AddFiles(), it will not be reflected in this project.
|
||||
"""
|
||||
self._AddFilesToNode(self.files_section, files)
|
||||
# TODO(rspangler) This also doesn't handle adding files to an existing
|
||||
# filter. That is, it doesn't merge the trees.
|
||||
|
||||
def AddFileConfig(self, path, config, attrs=None, tools=None):
|
||||
"""Adds a configuration to a file.
|
||||
|
||||
Args:
|
||||
path: Relative path to the file.
|
||||
config: Name of configuration to add.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
|
||||
Raises:
|
||||
ValueError: Relative path does not match any file added via AddFiles().
|
||||
"""
|
||||
# Find the file node with the right relative path
|
||||
parent = self.files_dict.get(path)
|
||||
if not parent:
|
||||
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
|
||||
|
||||
# Add the config to the file node
|
||||
spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools)
|
||||
parent.append(spec)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the project file."""
|
||||
# First create XML content definition
|
||||
content = [
|
||||
"VisualStudioProject",
|
||||
{
|
||||
"ProjectType": "Visual C++",
|
||||
"Version": self.version.ProjectVersion(),
|
||||
"Name": self.name,
|
||||
"ProjectGUID": self.guid,
|
||||
"RootNamespace": self.name,
|
||||
"Keyword": "Win32Proj",
|
||||
},
|
||||
self.platform_section,
|
||||
self.tool_files_section,
|
||||
self.configurations_section,
|
||||
["References"], # empty section
|
||||
self.files_section,
|
||||
["Globals"], # empty section
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252")
|
||||
1270
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Normal file
1270
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1547
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Executable file
1547
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
59
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Normal file
59
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML tool file writer."""
|
||||
|
||||
def __init__(self, tool_file_path, name):
|
||||
"""Initializes the tool file.
|
||||
|
||||
Args:
|
||||
tool_file_path: Path to the tool file.
|
||||
name: Name of the tool file.
|
||||
"""
|
||||
self.tool_file_path = tool_file_path
|
||||
self.name = name
|
||||
self.rules_section = ["Rules"]
|
||||
|
||||
def AddCustomBuildRule(
|
||||
self, name, cmd, description, additional_dependencies, outputs, extensions
|
||||
):
|
||||
"""Adds a rule to the tool file.
|
||||
|
||||
Args:
|
||||
name: Name of the rule.
|
||||
description: Description of the rule.
|
||||
cmd: Command line of the rule.
|
||||
additional_dependencies: other files which may trigger the rule.
|
||||
outputs: outputs of the rule.
|
||||
extensions: extensions handled by the rule.
|
||||
"""
|
||||
rule = [
|
||||
"CustomBuildRule",
|
||||
{
|
||||
"Name": name,
|
||||
"ExecutionDescription": description,
|
||||
"CommandLine": cmd,
|
||||
"Outputs": ";".join(outputs),
|
||||
"FileExtensions": ";".join(extensions),
|
||||
"AdditionalDependencies": ";".join(additional_dependencies),
|
||||
},
|
||||
]
|
||||
self.rules_section.append(rule)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the tool file."""
|
||||
content = [
|
||||
"VisualStudioToolFile",
|
||||
{"Version": "8.00", "Name": self.name},
|
||||
self.rules_section,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.tool_file_path, encoding="Windows-1252"
|
||||
)
|
||||
153
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Normal file
153
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Normal file
@@ -0,0 +1,153 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio user preferences file writer."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket # for gethostname
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _FindCommandInPath(command):
|
||||
"""If there are no slashes in the command given, this function
|
||||
searches the PATH env to find the given command, and converts it
|
||||
to an absolute path. We have to do this because MSVS is looking
|
||||
for an actual file to launch a debugger on, not just a command
|
||||
line. Note that this happens at GYP time, so anything needing to
|
||||
be built needs to have a full path."""
|
||||
if "/" in command or "\\" in command:
|
||||
# If the command already has path elements (either relative or
|
||||
# absolute), then assume it is constructed properly.
|
||||
return command
|
||||
else:
|
||||
# Search through the path list and find an existing file that
|
||||
# we can access.
|
||||
paths = os.environ.get("PATH", "").split(os.pathsep)
|
||||
for path in paths:
|
||||
item = os.path.join(path, command)
|
||||
if os.path.isfile(item) and os.access(item, os.X_OK):
|
||||
return item
|
||||
return command
|
||||
|
||||
|
||||
def _QuoteWin32CommandLineArgs(args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
# Replace all double-quotes with double-double-quotes to escape
|
||||
# them for cmd shell, and then quote the whole thing if there
|
||||
# are any.
|
||||
if arg.find('"') != -1:
|
||||
arg = '""'.join(arg.split('"'))
|
||||
arg = '"%s"' % arg
|
||||
|
||||
# Otherwise, if there are any spaces, quote the whole arg.
|
||||
elif re.search(r"[ \t\n]", arg):
|
||||
arg = '"%s"' % arg
|
||||
new_args.append(arg)
|
||||
return new_args
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML user user file writer."""
|
||||
|
||||
def __init__(self, user_file_path, version, name):
|
||||
"""Initializes the user file.
|
||||
|
||||
Args:
|
||||
user_file_path: Path to the user file.
|
||||
version: Version info.
|
||||
name: Name of the user file.
|
||||
"""
|
||||
self.user_file_path = user_file_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.configurations = {}
|
||||
|
||||
def AddConfig(self, name):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
"""
|
||||
self.configurations[name] = ["Configuration", {"Name": name}]
|
||||
|
||||
def AddDebugSettings(
|
||||
self, config_name, command, environment={}, working_directory=""
|
||||
):
|
||||
"""Adds a DebugSettings node to the user file for a particular config.
|
||||
|
||||
Args:
|
||||
command: command line to run. First element in the list is the
|
||||
executable. All elements of the command will be quoted if
|
||||
necessary.
|
||||
working_directory: other files which may trigger the rule. (optional)
|
||||
"""
|
||||
command = _QuoteWin32CommandLineArgs(command)
|
||||
|
||||
abs_command = _FindCommandInPath(command[0])
|
||||
|
||||
if environment and isinstance(environment, dict):
|
||||
env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
|
||||
environment = " ".join(env_list)
|
||||
else:
|
||||
environment = ""
|
||||
|
||||
n_cmd = [
|
||||
"DebugSettings",
|
||||
{
|
||||
"Command": abs_command,
|
||||
"WorkingDirectory": working_directory,
|
||||
"CommandArguments": " ".join(command[1:]),
|
||||
"RemoteMachine": socket.gethostname(),
|
||||
"Environment": environment,
|
||||
"EnvironmentMerge": "true",
|
||||
# Currently these are all "dummy" values that we're just setting
|
||||
# in the default manner that MSVS does it. We could use some of
|
||||
# these to add additional capabilities, I suppose, but they might
|
||||
# not have parity with other platforms then.
|
||||
"Attach": "false",
|
||||
"DebuggerType": "3", # 'auto' debugger
|
||||
"Remote": "1",
|
||||
"RemoteCommand": "",
|
||||
"HttpUrl": "",
|
||||
"PDBPath": "",
|
||||
"SQLDebugging": "",
|
||||
"DebuggerFlavor": "0",
|
||||
"MPIRunCommand": "",
|
||||
"MPIRunArguments": "",
|
||||
"MPIRunWorkingDirectory": "",
|
||||
"ApplicationCommand": "",
|
||||
"ApplicationArguments": "",
|
||||
"ShimCommand": "",
|
||||
"MPIAcceptMode": "",
|
||||
"MPIAcceptFilter": "",
|
||||
},
|
||||
]
|
||||
|
||||
# Find the config, and add it if it doesn't exist.
|
||||
if config_name not in self.configurations:
|
||||
self.AddConfig(config_name)
|
||||
|
||||
# Add the DebugSettings onto the appropriate config.
|
||||
self.configurations[config_name].append(n_cmd)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the user file."""
|
||||
configs = ["Configurations"]
|
||||
for config, spec in sorted(self.configurations.items()):
|
||||
configs.append(spec)
|
||||
|
||||
content = [
|
||||
"VisualStudioUserFile",
|
||||
{"Version": self.version.ProjectVersion(), "Name": self.name},
|
||||
configs,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.user_file_path, encoding="Windows-1252"
|
||||
)
|
||||
271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Normal file
271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Normal file
@@ -0,0 +1,271 @@
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions shared amongst the Windows generators."""
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
|
||||
# A dictionary mapping supported target types to extensions.
|
||||
TARGET_TYPE_EXT = {
|
||||
"executable": "exe",
|
||||
"loadable_module": "dll",
|
||||
"shared_library": "dll",
|
||||
"static_library": "lib",
|
||||
"windows_driver": "sys",
|
||||
}
|
||||
|
||||
|
||||
def _GetLargePdbShimCcPath():
|
||||
"""Returns the path of the large_pdb_shim.cc file."""
|
||||
this_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
src_dir = os.path.abspath(os.path.join(this_dir, "..", ".."))
|
||||
win_data_dir = os.path.join(src_dir, "data", "win")
|
||||
large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc")
|
||||
return large_pdb_shim_cc
|
||||
|
||||
|
||||
def _DeepCopySomeKeys(in_dict, keys):
|
||||
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
||||
|
||||
Arguments:
|
||||
in_dict: The dictionary to copy.
|
||||
keys: The keys to be copied. If a key is in this list and doesn't exist in
|
||||
|in_dict| this is not an error.
|
||||
Returns:
|
||||
The partially deep-copied dictionary.
|
||||
"""
|
||||
d = {}
|
||||
for key in keys:
|
||||
if key not in in_dict:
|
||||
continue
|
||||
d[key] = copy.deepcopy(in_dict[key])
|
||||
return d
|
||||
|
||||
|
||||
def _SuffixName(name, suffix):
|
||||
"""Add a suffix to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
suffix: the suffix to be added
|
||||
Returns:
|
||||
Target name with suffix added (foo_suffix#target)
|
||||
"""
|
||||
parts = name.rsplit("#", 1)
|
||||
parts[0] = f"{parts[0]}_{suffix}"
|
||||
return "#".join(parts)
|
||||
|
||||
|
||||
def _ShardName(name, number):
|
||||
"""Add a shard number to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
number: shard number
|
||||
Returns:
|
||||
Target name with shard added (foo_1#target)
|
||||
"""
|
||||
return _SuffixName(name, str(number))
|
||||
|
||||
|
||||
def ShardTargets(target_list, target_dicts):
|
||||
"""Shard some targets apart to work around the linkers limits.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
Returns:
|
||||
Tuple of the new sharded versions of the inputs.
|
||||
"""
|
||||
# Gather the targets to shard, and how many pieces.
|
||||
targets_to_shard = {}
|
||||
for t in target_dicts:
|
||||
shards = int(target_dicts[t].get("msvs_shard", 0))
|
||||
if shards:
|
||||
targets_to_shard[t] = shards
|
||||
# Shard target_list.
|
||||
new_target_list = []
|
||||
for t in target_list:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
new_target_list.append(_ShardName(t, i))
|
||||
else:
|
||||
new_target_list.append(t)
|
||||
# Shard target_dict.
|
||||
new_target_dicts = {}
|
||||
for t in target_dicts:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
name = _ShardName(t, i)
|
||||
new_target_dicts[name] = copy.copy(target_dicts[t])
|
||||
new_target_dicts[name]["target_name"] = _ShardName(
|
||||
new_target_dicts[name]["target_name"], i
|
||||
)
|
||||
sources = new_target_dicts[name].get("sources", [])
|
||||
new_sources = []
|
||||
for pos in range(i, len(sources), targets_to_shard[t]):
|
||||
new_sources.append(sources[pos])
|
||||
new_target_dicts[name]["sources"] = new_sources
|
||||
else:
|
||||
new_target_dicts[t] = target_dicts[t]
|
||||
# Shard dependencies.
|
||||
for t in sorted(new_target_dicts):
|
||||
for deptype in ("dependencies", "dependencies_original"):
|
||||
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
|
||||
new_dependencies = []
|
||||
for d in dependencies:
|
||||
if d in targets_to_shard:
|
||||
for i in range(targets_to_shard[d]):
|
||||
new_dependencies.append(_ShardName(d, i))
|
||||
else:
|
||||
new_dependencies.append(d)
|
||||
new_target_dicts[t][deptype] = new_dependencies
|
||||
|
||||
return (new_target_list, new_target_dicts)
|
||||
|
||||
|
||||
def _GetPdbPath(target_dict, config_name, vars):
|
||||
"""Returns the path to the PDB file that will be generated by a given
|
||||
configuration.
|
||||
|
||||
The lookup proceeds as follows:
|
||||
- Look for an explicit path in the VCLinkerTool configuration block.
|
||||
- Look for an 'msvs_large_pdb_path' variable.
|
||||
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
|
||||
specified.
|
||||
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
|
||||
|
||||
Arguments:
|
||||
target_dict: The target dictionary to be searched.
|
||||
config_name: The name of the configuration of interest.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
The path of the corresponding PDB file.
|
||||
"""
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
linker = msvs.get("VCLinkerTool", {})
|
||||
|
||||
pdb_path = linker.get("ProgramDatabaseFile")
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
variables = target_dict.get("variables", {})
|
||||
pdb_path = variables.get("msvs_large_pdb_path", None)
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
pdb_base = target_dict.get("product_name", target_dict["target_name"])
|
||||
pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
|
||||
pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
|
||||
|
||||
return pdb_path
|
||||
|
||||
|
||||
def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
||||
|
||||
This is a workaround for targets with PDBs greater than 1GB in size, the
|
||||
limit for the 1KB pagesize PDBs created by the linker by default.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
Tuple of the shimmed version of the inputs.
|
||||
"""
|
||||
# Determine which targets need shimming.
|
||||
targets_to_shim = []
|
||||
for t in target_dicts:
|
||||
target_dict = target_dicts[t]
|
||||
|
||||
# We only want to shim targets that have msvs_large_pdb enabled.
|
||||
if not int(target_dict.get("msvs_large_pdb", 0)):
|
||||
continue
|
||||
# This is intended for executable, shared_library and loadable_module
|
||||
# targets where every configuration is set up to produce a PDB output.
|
||||
# If any of these conditions is not true then the shim logic will fail
|
||||
# below.
|
||||
targets_to_shim.append(t)
|
||||
|
||||
large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
||||
|
||||
for t in targets_to_shim:
|
||||
target_dict = target_dicts[t]
|
||||
target_name = target_dict.get("target_name")
|
||||
|
||||
base_dict = _DeepCopySomeKeys(
|
||||
target_dict, ["configurations", "default_configuration", "toolset"]
|
||||
)
|
||||
|
||||
# This is the dict for copying the source file (part of the GYP tree)
|
||||
# to the intermediate directory of the project. This is necessary because
|
||||
# we can't always build a relative path to the shim source file (on Windows
|
||||
# GYP and the project may be on different drives), and Ninja hates absolute
|
||||
# paths (it ends up generating the .obj and .obj.d alongside the source
|
||||
# file, polluting GYPs tree).
|
||||
copy_suffix = "large_pdb_copy"
|
||||
copy_target_name = target_name + "_" + copy_suffix
|
||||
full_copy_target_name = _SuffixName(t, copy_suffix)
|
||||
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
||||
shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name
|
||||
shim_cc_path = shim_cc_dir + "/" + shim_cc_basename
|
||||
copy_dict = copy.deepcopy(base_dict)
|
||||
copy_dict["target_name"] = copy_target_name
|
||||
copy_dict["type"] = "none"
|
||||
copy_dict["sources"] = [large_pdb_shim_cc]
|
||||
copy_dict["copies"] = [
|
||||
{"destination": shim_cc_dir, "files": [large_pdb_shim_cc]}
|
||||
]
|
||||
|
||||
# This is the dict for the PDB generating shim target. It depends on the
|
||||
# copy target.
|
||||
shim_suffix = "large_pdb_shim"
|
||||
shim_target_name = target_name + "_" + shim_suffix
|
||||
full_shim_target_name = _SuffixName(t, shim_suffix)
|
||||
shim_dict = copy.deepcopy(base_dict)
|
||||
shim_dict["target_name"] = shim_target_name
|
||||
shim_dict["type"] = "static_library"
|
||||
shim_dict["sources"] = [shim_cc_path]
|
||||
shim_dict["dependencies"] = [full_copy_target_name]
|
||||
|
||||
# Set up the shim to output its PDB to the same location as the final linker
|
||||
# target.
|
||||
for config_name, config in shim_dict.get("configurations").items():
|
||||
pdb_path = _GetPdbPath(target_dict, config_name, vars)
|
||||
|
||||
# A few keys that we don't want to propagate.
|
||||
for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]:
|
||||
config.pop(key, None)
|
||||
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
# Update the compiler directives in the shim target.
|
||||
compiler = msvs.setdefault("VCCLCompilerTool", {})
|
||||
compiler["DebugInformationFormat"] = "3"
|
||||
compiler["ProgramDataBaseFileName"] = pdb_path
|
||||
|
||||
# Set the explicit PDB path in the appropriate configuration of the
|
||||
# original target.
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
linker = msvs.setdefault("VCLinkerTool", {})
|
||||
linker["GenerateDebugInformation"] = "true"
|
||||
linker["ProgramDatabaseFile"] = pdb_path
|
||||
|
||||
# Add the new targets. They must go to the beginning of the list so that
|
||||
# the dependency generation works as expected in ninja.
|
||||
target_list.insert(0, full_copy_target_name)
|
||||
target_list.insert(0, full_shim_target_name)
|
||||
target_dicts[full_copy_target_name] = copy_dict
|
||||
target_dicts[full_shim_target_name] = shim_dict
|
||||
|
||||
# Update the original target to depend on the shim target.
|
||||
target_dict.setdefault("dependencies", []).append(full_shim_target_name)
|
||||
|
||||
return (target_list, target_dicts)
|
||||
574
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Normal file
574
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Normal file
@@ -0,0 +1,574 @@
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Handle version information related to Visual Stuio."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import glob
|
||||
|
||||
|
||||
def JoinPath(*args):
|
||||
return os.path.normpath(os.path.join(*args))
|
||||
|
||||
|
||||
class VisualStudioVersion:
|
||||
"""Information regarding a version of Visual Studio."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
short_name,
|
||||
description,
|
||||
solution_version,
|
||||
project_version,
|
||||
flat_sln,
|
||||
uses_vcxproj,
|
||||
path,
|
||||
sdk_based,
|
||||
default_toolset=None,
|
||||
compatible_sdks=None,
|
||||
):
|
||||
self.short_name = short_name
|
||||
self.description = description
|
||||
self.solution_version = solution_version
|
||||
self.project_version = project_version
|
||||
self.flat_sln = flat_sln
|
||||
self.uses_vcxproj = uses_vcxproj
|
||||
self.path = path
|
||||
self.sdk_based = sdk_based
|
||||
self.default_toolset = default_toolset
|
||||
compatible_sdks = compatible_sdks or []
|
||||
compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True)
|
||||
self.compatible_sdks = compatible_sdks
|
||||
|
||||
def ShortName(self):
|
||||
return self.short_name
|
||||
|
||||
def Description(self):
|
||||
"""Get the full description of the version."""
|
||||
return self.description
|
||||
|
||||
def SolutionVersion(self):
|
||||
"""Get the version number of the sln files."""
|
||||
return self.solution_version
|
||||
|
||||
def ProjectVersion(self):
|
||||
"""Get the version number of the vcproj or vcxproj files."""
|
||||
return self.project_version
|
||||
|
||||
def FlatSolution(self):
|
||||
return self.flat_sln
|
||||
|
||||
def UsesVcxproj(self):
|
||||
"""Returns true if this version uses a vcxproj file."""
|
||||
return self.uses_vcxproj
|
||||
|
||||
def ProjectExtension(self):
|
||||
"""Returns the file extension for the project."""
|
||||
return self.uses_vcxproj and ".vcxproj" or ".vcproj"
|
||||
|
||||
def Path(self):
|
||||
"""Returns the path to Visual Studio installation."""
|
||||
return self.path
|
||||
|
||||
def ToolPath(self, tool):
|
||||
"""Returns the path to a given compiler tool. """
|
||||
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
|
||||
|
||||
def DefaultToolset(self):
|
||||
"""Returns the msbuild toolset version that will be used in the absence
|
||||
of a user override."""
|
||||
return self.default_toolset
|
||||
|
||||
def _SetupScriptInternal(self, target_arch):
|
||||
"""Returns a command (with arguments) to be used to set up the
|
||||
environment."""
|
||||
assert target_arch in ("x86", "x64"), "target_arch not supported"
|
||||
# If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
|
||||
# depot_tools build tools and should run SetEnv.Cmd to set up the
|
||||
# environment. The check for WindowsSDKDir alone is not sufficient because
|
||||
# this is set by running vcvarsall.bat.
|
||||
sdk_dir = os.environ.get("WindowsSDKDir", "")
|
||||
setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd")
|
||||
if self.sdk_based and sdk_dir and os.path.exists(setup_path):
|
||||
return [setup_path, "/" + target_arch]
|
||||
|
||||
is_host_arch_x64 = (
|
||||
os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64"
|
||||
or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64"
|
||||
)
|
||||
|
||||
# For VS2017 (and newer) it's fairly easy
|
||||
if self.short_name >= "2017":
|
||||
script_path = JoinPath(
|
||||
self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat"
|
||||
)
|
||||
|
||||
# Always use a native executable, cross-compiling if necessary.
|
||||
host_arch = "amd64" if is_host_arch_x64 else "x86"
|
||||
msvc_target_arch = "amd64" if target_arch == "x64" else "x86"
|
||||
arg = host_arch
|
||||
if host_arch != msvc_target_arch:
|
||||
arg += "_" + msvc_target_arch
|
||||
|
||||
return [script_path, arg]
|
||||
|
||||
# We try to find the best version of the env setup batch.
|
||||
vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat")
|
||||
if target_arch == "x86":
|
||||
if (
|
||||
self.short_name >= "2013"
|
||||
and self.short_name[-1] != "e"
|
||||
and is_host_arch_x64
|
||||
):
|
||||
# VS2013 and later, non-Express have a x64-x86 cross that we want
|
||||
# to prefer.
|
||||
return [vcvarsall, "amd64_x86"]
|
||||
else:
|
||||
# Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
|
||||
# for x86 because vcvarsall calls vcvars32, which it can only find if
|
||||
# VS??COMNTOOLS is set, which isn't guaranteed.
|
||||
return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")]
|
||||
elif target_arch == "x64":
|
||||
arg = "x86_amd64"
|
||||
# Use the 64-on-64 compiler if we're not using an express edition and
|
||||
# we're running on a 64bit OS.
|
||||
if self.short_name[-1] != "e" and is_host_arch_x64:
|
||||
arg = "amd64"
|
||||
return [vcvarsall, arg]
|
||||
|
||||
def SetupScript(self, target_arch):
|
||||
script_data = self._SetupScriptInternal(target_arch)
|
||||
script_path = script_data[0]
|
||||
if not os.path.exists(script_path):
|
||||
raise Exception(
|
||||
"%s is missing - make sure VC++ tools are installed." % script_path
|
||||
)
|
||||
return script_data
|
||||
|
||||
|
||||
def _RegistryQueryBase(sysdir, key, value):
|
||||
"""Use reg.exe to read a particular key.
|
||||
|
||||
While ideally we might use the win32 module, we would like gyp to be
|
||||
python neutral, so for instance cygwin python lacks this module.
|
||||
|
||||
Arguments:
|
||||
sysdir: The system subdirectory to attempt to launch reg.exe from.
|
||||
key: The registry key to read from.
|
||||
value: The particular value to read.
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
# Skip if not on Windows or Python Win32 setup issue
|
||||
if sys.platform not in ("win32", "cygwin"):
|
||||
return None
|
||||
# Setup params to pass to and attempt to launch reg.exe
|
||||
cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key]
|
||||
if value:
|
||||
cmd.extend(["/v", value])
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
|
||||
# Note that the error text may be in [1] in some cases
|
||||
text = p.communicate()[0].decode("utf-8")
|
||||
# Check return code from reg.exe; officially 0==success and 1==error
|
||||
if p.returncode:
|
||||
return None
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryQuery(key, value=None):
|
||||
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
||||
|
||||
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
||||
that fails, it falls back to System32. Sysnative is available on Vista and
|
||||
up and available on Windows Server 2003 and XP through KB patch 942589. Note
|
||||
that Sysnative will always fail if using 64-bit python due to it being a
|
||||
virtual directory and System32 will work correctly in the first place.
|
||||
|
||||
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
|
||||
|
||||
Arguments:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read (optional).
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
text = None
|
||||
try:
|
||||
text = _RegistryQueryBase("Sysnative", key, value)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
text = _RegistryQueryBase("System32", key, value)
|
||||
else:
|
||||
raise
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryGetValueUsingWinReg(key, value):
|
||||
"""Use the _winreg module to obtain the value of a registry key.
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure. Throws
|
||||
ImportError if winreg is unavailable.
|
||||
"""
|
||||
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
|
||||
try:
|
||||
root, subkey = key.split("\\", 1)
|
||||
assert root == "HKLM" # Only need HKLM for now.
|
||||
with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
|
||||
return QueryValueEx(hkey, value)[0]
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
def _RegistryGetValue(key, value):
|
||||
"""Use _winreg or reg.exe to obtain the value of a registry key.
|
||||
|
||||
Using _winreg is preferable because it solves an issue on some corporate
|
||||
environments where access to reg.exe is locked down. However, we still need
|
||||
to fallback to reg.exe for the case where the _winreg module is not available
|
||||
(for example in cygwin python).
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure.
|
||||
"""
|
||||
try:
|
||||
return _RegistryGetValueUsingWinReg(key, value)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Fallback to reg.exe if we fail to import _winreg.
|
||||
text = _RegistryQuery(key, value)
|
||||
if not text:
|
||||
return None
|
||||
# Extract value.
|
||||
match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def _CreateVersion(name, path, sdk_based=False):
|
||||
"""Sets up MSVS project generation.
|
||||
|
||||
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
|
||||
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
|
||||
passed in that doesn't match a value in versions python will throw a error.
|
||||
"""
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
versions = {
|
||||
"2022": VisualStudioVersion(
|
||||
"2022",
|
||||
"Visual Studio 2022",
|
||||
solution_version="12.00",
|
||||
project_version="17.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v143",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2019": VisualStudioVersion(
|
||||
"2019",
|
||||
"Visual Studio 2019",
|
||||
solution_version="12.00",
|
||||
project_version="16.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v142",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2017": VisualStudioVersion(
|
||||
"2017",
|
||||
"Visual Studio 2017",
|
||||
solution_version="12.00",
|
||||
project_version="15.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v141",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2015": VisualStudioVersion(
|
||||
"2015",
|
||||
"Visual Studio 2015",
|
||||
solution_version="12.00",
|
||||
project_version="14.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v140",
|
||||
),
|
||||
"2013": VisualStudioVersion(
|
||||
"2013",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2013e": VisualStudioVersion(
|
||||
"2013e",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2012": VisualStudioVersion(
|
||||
"2012",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2012e": VisualStudioVersion(
|
||||
"2012e",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2010": VisualStudioVersion(
|
||||
"2010",
|
||||
"Visual Studio 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2010e": VisualStudioVersion(
|
||||
"2010e",
|
||||
"Visual C++ Express 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008": VisualStudioVersion(
|
||||
"2008",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008e": VisualStudioVersion(
|
||||
"2008e",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005": VisualStudioVersion(
|
||||
"2005",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005e": VisualStudioVersion(
|
||||
"2005e",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
}
|
||||
return versions[str(name)]
|
||||
|
||||
|
||||
def _ConvertToCygpath(path):
|
||||
"""Convert to cygwin path if we are using cygwin."""
|
||||
if sys.platform == "cygwin":
|
||||
p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE)
|
||||
path = p.communicate()[0].decode("utf-8").strip()
|
||||
return path
|
||||
|
||||
|
||||
def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||
"""Collect the list of installed visual studio versions.
|
||||
|
||||
Returns:
|
||||
A list of visual studio versions installed in descending order of
|
||||
usage preference.
|
||||
Base this on the registry and a quick check if devenv.exe exists.
|
||||
Possibilities are:
|
||||
2005(e) - Visual Studio 2005 (8)
|
||||
2008(e) - Visual Studio 2008 (9)
|
||||
2010(e) - Visual Studio 2010 (10)
|
||||
2012(e) - Visual Studio 2012 (11)
|
||||
2013(e) - Visual Studio 2013 (12)
|
||||
2015 - Visual Studio 2015 (14)
|
||||
2017 - Visual Studio 2017 (15)
|
||||
2019 - Visual Studio 2019 (16)
|
||||
2022 - Visual Studio 2022 (17)
|
||||
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||
"""
|
||||
version_to_year = {
|
||||
"8.0": "2005",
|
||||
"9.0": "2008",
|
||||
"10.0": "2010",
|
||||
"11.0": "2012",
|
||||
"12.0": "2013",
|
||||
"14.0": "2015",
|
||||
"15.0": "2017",
|
||||
"16.0": "2019",
|
||||
"17.0": "2022",
|
||||
}
|
||||
versions = []
|
||||
for version in versions_to_check:
|
||||
# Old method of searching for which VS version is installed
|
||||
# We don't use the 2010-encouraged-way because we also want to get the
|
||||
# path to the binaries, which it doesn't offer.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Microsoft\VCExpress\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version,
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], "InstallDir")
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
# Check for full.
|
||||
full_path = os.path.join(path, "devenv.exe")
|
||||
express_path = os.path.join(path, "*express.exe")
|
||||
if not force_express and os.path.exists(full_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version], os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
# Check for express.
|
||||
elif glob.glob(express_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e", os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
|
||||
# The old method above does not work when only SDK is installed.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7",
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], version)
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
if version == "15.0":
|
||||
if os.path.exists(path):
|
||||
versions.append(_CreateVersion("2017", path))
|
||||
elif version != "14.0": # There is no Express edition for 2015.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e",
|
||||
os.path.join(path, ".."),
|
||||
sdk_based=True,
|
||||
)
|
||||
)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
def SelectVisualStudioVersion(version="auto", allow_fallback=True):
|
||||
"""Select which version of Visual Studio projects to generate.
|
||||
|
||||
Arguments:
|
||||
version: Hook to allow caller to force a particular version (vs auto).
|
||||
Returns:
|
||||
An object representing a visual studio project format version.
|
||||
"""
|
||||
# In auto mode, check environment variable for override.
|
||||
if version == "auto":
|
||||
version = os.environ.get("GYP_MSVS_VERSION", "auto")
|
||||
version_map = {
|
||||
"auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"),
|
||||
"2005": ("8.0",),
|
||||
"2005e": ("8.0",),
|
||||
"2008": ("9.0",),
|
||||
"2008e": ("9.0",),
|
||||
"2010": ("10.0",),
|
||||
"2010e": ("10.0",),
|
||||
"2012": ("11.0",),
|
||||
"2012e": ("11.0",),
|
||||
"2013": ("12.0",),
|
||||
"2013e": ("12.0",),
|
||||
"2015": ("14.0",),
|
||||
"2017": ("15.0",),
|
||||
"2019": ("16.0",),
|
||||
"2022": ("17.0",),
|
||||
}
|
||||
override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
|
||||
if override_path:
|
||||
msvs_version = os.environ.get("GYP_MSVS_VERSION")
|
||||
if not msvs_version:
|
||||
raise ValueError(
|
||||
"GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be "
|
||||
"set to a particular version (e.g. 2010e)."
|
||||
)
|
||||
return _CreateVersion(msvs_version, override_path, sdk_based=True)
|
||||
version = str(version)
|
||||
versions = _DetectVisualStudioVersions(version_map[version], "e" in version)
|
||||
if not versions:
|
||||
if not allow_fallback:
|
||||
raise ValueError("Could not locate Visual Studio installation.")
|
||||
if version == "auto":
|
||||
# Default to 2005 if we couldn't find anything
|
||||
return _CreateVersion("2005", None)
|
||||
else:
|
||||
return _CreateVersion(version, None)
|
||||
return versions[0]
|
||||
690
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Executable file
690
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Executable file
@@ -0,0 +1,690 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import copy
|
||||
import gyp.input
|
||||
import argparse
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import traceback
|
||||
from gyp.common import GypError
|
||||
|
||||
|
||||
# Default debug modes for GYP
|
||||
debug = {}
|
||||
|
||||
# List of "official" debug modes, but you can use anything you like.
|
||||
DEBUG_GENERAL = "general"
|
||||
DEBUG_VARIABLES = "variables"
|
||||
DEBUG_INCLUDES = "includes"
|
||||
|
||||
|
||||
def DebugOutput(mode, message, *args):
|
||||
if "all" in gyp.debug or mode in gyp.debug:
|
||||
ctx = ("unknown", 0, "unknown")
|
||||
try:
|
||||
f = traceback.extract_stack(limit=2)
|
||||
if f:
|
||||
ctx = f[0][:3]
|
||||
except Exception:
|
||||
pass
|
||||
if args:
|
||||
message %= args
|
||||
print(
|
||||
"%s:%s:%d:%s %s"
|
||||
% (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message)
|
||||
)
|
||||
|
||||
|
||||
def FindBuildFiles():
|
||||
extension = ".gyp"
|
||||
files = os.listdir(os.getcwd())
|
||||
build_files = []
|
||||
for file in files:
|
||||
if file.endswith(extension):
|
||||
build_files.append(file)
|
||||
return build_files
|
||||
|
||||
|
||||
def Load(
|
||||
build_files,
|
||||
format,
|
||||
default_variables={},
|
||||
includes=[],
|
||||
depth=".",
|
||||
params=None,
|
||||
check=False,
|
||||
circular_check=True,
|
||||
):
|
||||
"""
|
||||
Loads one or more specified build files.
|
||||
default_variables and includes will be copied before use.
|
||||
Returns the generator for the specified format and the
|
||||
data returned by loading the specified build files.
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
if "-" in format:
|
||||
format, params["flavor"] = format.split("-", 1)
|
||||
|
||||
default_variables = copy.copy(default_variables)
|
||||
|
||||
# Default variables provided by this program and its modules should be
|
||||
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
||||
# avoiding collisions with user and automatic variables.
|
||||
default_variables["GENERATOR"] = format
|
||||
default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "")
|
||||
|
||||
# Format can be a custom python file, or by default the name of a module
|
||||
# within gyp.generator.
|
||||
if format.endswith(".py"):
|
||||
generator_name = os.path.splitext(format)[0]
|
||||
path, generator_name = os.path.split(generator_name)
|
||||
|
||||
# Make sure the path to the custom generator is in sys.path
|
||||
# Don't worry about removing it once we are done. Keeping the path
|
||||
# to each generator that is used in sys.path is likely harmless and
|
||||
# arguably a good idea.
|
||||
path = os.path.abspath(path)
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
else:
|
||||
generator_name = "gyp.generator." + format
|
||||
|
||||
# These parameters are passed in order (as opposed to by key)
|
||||
# because ActivePython cannot handle key parameters to __import__.
|
||||
generator = __import__(generator_name, globals(), locals(), generator_name)
|
||||
for (key, val) in generator.generator_default_variables.items():
|
||||
default_variables.setdefault(key, val)
|
||||
|
||||
output_dir = params["options"].generator_output or params["options"].toplevel_dir
|
||||
if default_variables["GENERATOR"] == "ninja":
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(output_dir, "out", default_variables["build_type"]),
|
||||
)
|
||||
else:
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(output_dir, default_variables["CONFIGURATION_NAME"]),
|
||||
)
|
||||
|
||||
# Give the generator the opportunity to set additional variables based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateVariables", None):
|
||||
generator.CalculateVariables(default_variables, params)
|
||||
|
||||
# Give the generator the opportunity to set generator_input_info based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateGeneratorInputInfo", None):
|
||||
generator.CalculateGeneratorInputInfo(params)
|
||||
|
||||
# Fetch the generator specific info that gets fed to input, we use getattr
|
||||
# so we can default things and the generators only have to provide what
|
||||
# they need.
|
||||
generator_input_info = {
|
||||
"non_configuration_keys": getattr(
|
||||
generator, "generator_additional_non_configuration_keys", []
|
||||
),
|
||||
"path_sections": getattr(generator, "generator_additional_path_sections", []),
|
||||
"extra_sources_for_rules": getattr(
|
||||
generator, "generator_extra_sources_for_rules", []
|
||||
),
|
||||
"generator_supports_multiple_toolsets": getattr(
|
||||
generator, "generator_supports_multiple_toolsets", False
|
||||
),
|
||||
"generator_wants_static_library_dependencies_adjusted": getattr(
|
||||
generator, "generator_wants_static_library_dependencies_adjusted", True
|
||||
),
|
||||
"generator_wants_sorted_dependencies": getattr(
|
||||
generator, "generator_wants_sorted_dependencies", False
|
||||
),
|
||||
"generator_filelist_paths": getattr(
|
||||
generator, "generator_filelist_paths", None
|
||||
),
|
||||
}
|
||||
|
||||
# Process the input specific to this generator.
|
||||
result = gyp.input.Load(
|
||||
build_files,
|
||||
default_variables,
|
||||
includes[:],
|
||||
depth,
|
||||
generator_input_info,
|
||||
check,
|
||||
circular_check,
|
||||
params["parallel"],
|
||||
params["root_targets"],
|
||||
)
|
||||
return [generator] + result
|
||||
|
||||
|
||||
def NameValueListToDict(name_value_list):
|
||||
"""
|
||||
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
||||
of the pairs. If a string is simply NAME, then the value in the dictionary
|
||||
is set to True. If VALUE can be converted to an integer, it is.
|
||||
"""
|
||||
result = {}
|
||||
for item in name_value_list:
|
||||
tokens = item.split("=", 1)
|
||||
if len(tokens) == 2:
|
||||
# If we can make it an int, use that, otherwise, use the string.
|
||||
try:
|
||||
token_value = int(tokens[1])
|
||||
except ValueError:
|
||||
token_value = tokens[1]
|
||||
# Set the variable to the supplied value.
|
||||
result[tokens[0]] = token_value
|
||||
else:
|
||||
# No value supplied, treat it as a boolean and set it.
|
||||
result[tokens[0]] = True
|
||||
return result
|
||||
|
||||
|
||||
def ShlexEnv(env_name):
|
||||
flags = os.environ.get(env_name, [])
|
||||
if flags:
|
||||
flags = shlex.split(flags)
|
||||
return flags
|
||||
|
||||
|
||||
def FormatOpt(opt, value):
|
||||
if opt.startswith("--"):
|
||||
return f"{opt}={value}"
|
||||
return opt + value
|
||||
|
||||
|
||||
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
|
||||
"""Regenerate a list of command line flags, for an option of action='append'.
|
||||
|
||||
The |env_name|, if given, is checked in the environment and used to generate
|
||||
an initial list of options, then the options that were specified on the
|
||||
command line (given in |values|) are appended. This matches the handling of
|
||||
environment variables and command line flags where command line flags override
|
||||
the environment, while not requiring the environment to be set when the flags
|
||||
are used again.
|
||||
"""
|
||||
flags = []
|
||||
if options.use_environment and env_name:
|
||||
for flag_value in ShlexEnv(env_name):
|
||||
value = FormatOpt(flag, predicate(flag_value))
|
||||
if value in flags:
|
||||
flags.remove(value)
|
||||
flags.append(value)
|
||||
if values:
|
||||
for flag_value in values:
|
||||
flags.append(FormatOpt(flag, predicate(flag_value)))
|
||||
return flags
|
||||
|
||||
|
||||
def RegenerateFlags(options):
|
||||
"""Given a parsed options object, and taking the environment variables into
|
||||
account, returns a list of flags that should regenerate an equivalent options
|
||||
object (even in the absence of the environment variables.)
|
||||
|
||||
Any path options will be normalized relative to depth.
|
||||
|
||||
The format flag is not included, as it is assumed the calling generator will
|
||||
set that as appropriate.
|
||||
"""
|
||||
|
||||
def FixPath(path):
|
||||
path = gyp.common.FixIfRelativePath(path, options.depth)
|
||||
if not path:
|
||||
return os.path.curdir
|
||||
return path
|
||||
|
||||
def Noop(value):
|
||||
return value
|
||||
|
||||
# We always want to ignore the environment when regenerating, to avoid
|
||||
# duplicate or changed flags in the environment at the time of regeneration.
|
||||
flags = ["--ignore-environment"]
|
||||
for name, metadata in options._regeneration_metadata.items():
|
||||
opt = metadata["opt"]
|
||||
value = getattr(options, name)
|
||||
value_predicate = metadata["type"] == "path" and FixPath or Noop
|
||||
action = metadata["action"]
|
||||
env_name = metadata["env_name"]
|
||||
if action == "append":
|
||||
flags.extend(
|
||||
RegenerateAppendFlag(opt, value, value_predicate, env_name, options)
|
||||
)
|
||||
elif action in ("store", None): # None is a synonym for 'store'.
|
||||
if value:
|
||||
flags.append(FormatOpt(opt, value_predicate(value)))
|
||||
elif options.use_environment and env_name and os.environ.get(env_name):
|
||||
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
|
||||
elif action in ("store_true", "store_false"):
|
||||
if (action == "store_true" and value) or (
|
||||
action == "store_false" and not value
|
||||
):
|
||||
flags.append(opt)
|
||||
elif options.use_environment and env_name:
|
||||
print(
|
||||
"Warning: environment regeneration unimplemented "
|
||||
"for %s flag %r env_name %r" % (action, opt, env_name),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
print(
|
||||
"Warning: regeneration unimplemented for action %r "
|
||||
"flag %r" % (action, opt),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
return flags
|
||||
|
||||
|
||||
class RegeneratableOptionParser(argparse.ArgumentParser):
|
||||
def __init__(self, usage):
|
||||
self.__regeneratable_options = {}
|
||||
argparse.ArgumentParser.__init__(self, usage=usage)
|
||||
|
||||
def add_argument(self, *args, **kw):
|
||||
"""Add an option to the parser.
|
||||
|
||||
This accepts the same arguments as ArgumentParser.add_argument, plus the
|
||||
following:
|
||||
regenerate: can be set to False to prevent this option from being included
|
||||
in regeneration.
|
||||
env_name: name of environment variable that additional values for this
|
||||
option come from.
|
||||
type: adds type='path', to tell the regenerator that the values of
|
||||
this option need to be made relative to options.depth
|
||||
"""
|
||||
env_name = kw.pop("env_name", None)
|
||||
if "dest" in kw and kw.pop("regenerate", True):
|
||||
dest = kw["dest"]
|
||||
|
||||
# The path type is needed for regenerating, for optparse we can just treat
|
||||
# it as a string.
|
||||
type = kw.get("type")
|
||||
if type == "path":
|
||||
kw["type"] = str
|
||||
|
||||
self.__regeneratable_options[dest] = {
|
||||
"action": kw.get("action"),
|
||||
"type": type,
|
||||
"env_name": env_name,
|
||||
"opt": args[0],
|
||||
}
|
||||
|
||||
argparse.ArgumentParser.add_argument(self, *args, **kw)
|
||||
|
||||
def parse_args(self, *args):
|
||||
values, args = argparse.ArgumentParser.parse_known_args(self, *args)
|
||||
values._regeneration_metadata = self.__regeneratable_options
|
||||
return values, args
|
||||
|
||||
|
||||
def gyp_main(args):
|
||||
my_name = os.path.basename(sys.argv[0])
|
||||
usage = "usage: %(prog)s [options ...] [build_file ...]"
|
||||
|
||||
parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s"))
|
||||
parser.add_argument(
|
||||
"--build",
|
||||
dest="configs",
|
||||
action="append",
|
||||
help="configuration for build after project generation",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check", dest="check", action="store_true", help="check format of gyp files"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config-dir",
|
||||
dest="config_dir",
|
||||
action="store",
|
||||
env_name="GYP_CONFIG_DIR",
|
||||
default=None,
|
||||
help="The location for configuration files like " "include.gypi.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debug",
|
||||
dest="debug",
|
||||
metavar="DEBUGMODE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="turn on a debugging "
|
||||
'mode for debugging GYP. Supported modes are "variables", '
|
||||
'"includes" and "general" or "all" for all of them.',
|
||||
)
|
||||
parser.add_argument(
|
||||
"-D",
|
||||
dest="defines",
|
||||
action="append",
|
||||
metavar="VAR=VAL",
|
||||
env_name="GYP_DEFINES",
|
||||
help="sets variable VAR to value VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--depth",
|
||||
dest="depth",
|
||||
metavar="PATH",
|
||||
type="path",
|
||||
help="set DEPTH gyp variable to a relative path to PATH",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
dest="formats",
|
||||
action="append",
|
||||
env_name="GYP_GENERATORS",
|
||||
regenerate=False,
|
||||
help="output formats to generate",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
dest="generator_flags",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="FLAG=VAL",
|
||||
env_name="GYP_GENERATOR_FLAGS",
|
||||
help="sets generator flag FLAG to VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--generator-output",
|
||||
dest="generator_output",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
env_name="GYP_GENERATOR_OUTPUT",
|
||||
help="puts generated build files under DIR",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-environment",
|
||||
dest="use_environment",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="do not read options from environment variables",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-I",
|
||||
"--include",
|
||||
dest="includes",
|
||||
action="append",
|
||||
metavar="INCLUDE",
|
||||
type="path",
|
||||
help="files to include in all loaded .gyp files",
|
||||
)
|
||||
# --no-circular-check disables the check for circular relationships between
|
||||
# .gyp files. These relationships should not exist, but they've only been
|
||||
# observed to be harmful with the Xcode generator. Chromium's .gyp files
|
||||
# currently have some circular relationships on non-Mac platforms, so this
|
||||
# option allows the strict behavior to be used on Macs and the lenient
|
||||
# behavior to be used elsewhere.
|
||||
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
|
||||
parser.add_argument(
|
||||
"--no-circular-check",
|
||||
dest="circular_check",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="don't check for circular relationships between files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-parallel",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable multiprocessing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--suffix",
|
||||
dest="suffix",
|
||||
default="",
|
||||
help="suffix to add to generated files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--toplevel-dir",
|
||||
dest="toplevel_dir",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
help="directory to use as the root of the source tree",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-R",
|
||||
"--root-target",
|
||||
dest="root_targets",
|
||||
action="append",
|
||||
metavar="TARGET",
|
||||
help="include only TARGET and its deep dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-V",
|
||||
"--version",
|
||||
dest="version",
|
||||
action="store_true",
|
||||
help="Show the version and exit.",
|
||||
)
|
||||
|
||||
options, build_files_arg = parser.parse_args(args)
|
||||
if options.version:
|
||||
import pkg_resources
|
||||
print(f"v{pkg_resources.get_distribution('gyp-next').version}")
|
||||
return 0
|
||||
build_files = build_files_arg
|
||||
|
||||
# Set up the configuration directory (defaults to ~/.gyp)
|
||||
if not options.config_dir:
|
||||
home = None
|
||||
home_dot_gyp = None
|
||||
if options.use_environment:
|
||||
home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None)
|
||||
if home_dot_gyp:
|
||||
home_dot_gyp = os.path.expanduser(home_dot_gyp)
|
||||
|
||||
if not home_dot_gyp:
|
||||
home_vars = ["HOME"]
|
||||
if sys.platform in ("cygwin", "win32"):
|
||||
home_vars.append("USERPROFILE")
|
||||
for home_var in home_vars:
|
||||
home = os.getenv(home_var)
|
||||
if home:
|
||||
home_dot_gyp = os.path.join(home, ".gyp")
|
||||
if not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
else:
|
||||
break
|
||||
else:
|
||||
home_dot_gyp = os.path.expanduser(options.config_dir)
|
||||
|
||||
if home_dot_gyp and not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
|
||||
if not options.formats:
|
||||
# If no format was given on the command line, then check the env variable.
|
||||
generate_formats = []
|
||||
if options.use_environment:
|
||||
generate_formats = os.environ.get("GYP_GENERATORS", [])
|
||||
if generate_formats:
|
||||
generate_formats = re.split(r"[\s,]", generate_formats)
|
||||
if generate_formats:
|
||||
options.formats = generate_formats
|
||||
else:
|
||||
# Nothing in the variable, default based on platform.
|
||||
if sys.platform == "darwin":
|
||||
options.formats = ["xcode"]
|
||||
elif sys.platform in ("win32", "cygwin"):
|
||||
options.formats = ["msvs"]
|
||||
else:
|
||||
options.formats = ["make"]
|
||||
|
||||
if not options.generator_output and options.use_environment:
|
||||
g_o = os.environ.get("GYP_GENERATOR_OUTPUT")
|
||||
if g_o:
|
||||
options.generator_output = g_o
|
||||
|
||||
options.parallel = not options.no_parallel
|
||||
|
||||
for mode in options.debug:
|
||||
gyp.debug[mode] = 1
|
||||
|
||||
# Do an extra check to avoid work when we're not debugging.
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL, "running with these options:")
|
||||
for option, value in sorted(options.__dict__.items()):
|
||||
if option[0] == "_":
|
||||
continue
|
||||
if isinstance(value, str):
|
||||
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
|
||||
else:
|
||||
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
|
||||
|
||||
if not build_files:
|
||||
build_files = FindBuildFiles()
|
||||
if not build_files:
|
||||
raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name))
|
||||
|
||||
# TODO(mark): Chromium-specific hack!
|
||||
# For Chromium, the gyp "depth" variable should always be a relative path
|
||||
# to Chromium's top-level "src" directory. If no depth variable was set
|
||||
# on the command line, try to find a "src" directory by looking at the
|
||||
# absolute path to each build file's directory. The first "src" component
|
||||
# found will be treated as though it were the path used for --depth.
|
||||
if not options.depth:
|
||||
for build_file in build_files:
|
||||
build_file_dir = os.path.abspath(os.path.dirname(build_file))
|
||||
build_file_dir_components = build_file_dir.split(os.path.sep)
|
||||
components_len = len(build_file_dir_components)
|
||||
for index in range(components_len - 1, -1, -1):
|
||||
if build_file_dir_components[index] == "src":
|
||||
options.depth = os.path.sep.join(build_file_dir_components)
|
||||
break
|
||||
del build_file_dir_components[index]
|
||||
|
||||
# If the inner loop found something, break without advancing to another
|
||||
# build file.
|
||||
if options.depth:
|
||||
break
|
||||
|
||||
if not options.depth:
|
||||
raise GypError(
|
||||
"Could not automatically locate src directory. This is"
|
||||
"a temporary Chromium feature that will be removed. Use"
|
||||
"--depth as a workaround."
|
||||
)
|
||||
|
||||
# If toplevel-dir is not set, we assume that depth is the root of our source
|
||||
# tree.
|
||||
if not options.toplevel_dir:
|
||||
options.toplevel_dir = options.depth
|
||||
|
||||
# -D on the command line sets variable defaults - D isn't just for define,
|
||||
# it's for default. Perhaps there should be a way to force (-F?) a
|
||||
# variable's value so that it can't be overridden by anything else.
|
||||
cmdline_default_variables = {}
|
||||
defines = []
|
||||
if options.use_environment:
|
||||
defines += ShlexEnv("GYP_DEFINES")
|
||||
if options.defines:
|
||||
defines += options.defines
|
||||
cmdline_default_variables = NameValueListToDict(defines)
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(
|
||||
DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables
|
||||
)
|
||||
|
||||
# Set up includes.
|
||||
includes = []
|
||||
|
||||
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
|
||||
# .gyp file that's loaded, before anything else is included.
|
||||
if home_dot_gyp:
|
||||
default_include = os.path.join(home_dot_gyp, "include.gypi")
|
||||
if os.path.exists(default_include):
|
||||
print("Using overrides found in " + default_include)
|
||||
includes.append(default_include)
|
||||
|
||||
# Command-line --include files come after the default include.
|
||||
if options.includes:
|
||||
includes.extend(options.includes)
|
||||
|
||||
# Generator flags should be prefixed with the target generator since they
|
||||
# are global across all generator runs.
|
||||
gen_flags = []
|
||||
if options.use_environment:
|
||||
gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS")
|
||||
if options.generator_flags:
|
||||
gen_flags += options.generator_flags
|
||||
generator_flags = NameValueListToDict(gen_flags)
|
||||
if DEBUG_GENERAL in gyp.debug.keys():
|
||||
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
|
||||
|
||||
# Generate all requested formats (use a set in case we got one format request
|
||||
# twice)
|
||||
for format in set(options.formats):
|
||||
params = {
|
||||
"options": options,
|
||||
"build_files": build_files,
|
||||
"generator_flags": generator_flags,
|
||||
"cwd": os.getcwd(),
|
||||
"build_files_arg": build_files_arg,
|
||||
"gyp_binary": sys.argv[0],
|
||||
"home_dot_gyp": home_dot_gyp,
|
||||
"parallel": options.parallel,
|
||||
"root_targets": options.root_targets,
|
||||
"target_arch": cmdline_default_variables.get("target_arch", ""),
|
||||
}
|
||||
|
||||
# Start with the default variables from the command line.
|
||||
[generator, flat_list, targets, data] = Load(
|
||||
build_files,
|
||||
format,
|
||||
cmdline_default_variables,
|
||||
includes,
|
||||
options.depth,
|
||||
params,
|
||||
options.check,
|
||||
options.circular_check,
|
||||
)
|
||||
|
||||
# TODO(mark): Pass |data| for now because the generator needs a list of
|
||||
# build files that came in. In the future, maybe it should just accept
|
||||
# a list, and not the whole data dict.
|
||||
# NOTE: flat_list is the flattened dependency graph specifying the order
|
||||
# that targets may be built. Build systems that operate serially or that
|
||||
# need to have dependencies defined before dependents reference them should
|
||||
# generate targets in the order specified in flat_list.
|
||||
generator.GenerateOutput(flat_list, targets, data, params)
|
||||
|
||||
if options.configs:
|
||||
valid_configs = targets[flat_list[0]]["configurations"]
|
||||
for conf in options.configs:
|
||||
if conf not in valid_configs:
|
||||
raise GypError("Invalid config specified via --build: %s" % conf)
|
||||
generator.PerformBuild(data, options.configs, params)
|
||||
|
||||
# Done
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
try:
|
||||
return gyp_main(args)
|
||||
except GypError as e:
|
||||
sys.stderr.write("gyp: %s\n" % e)
|
||||
return 1
|
||||
|
||||
|
||||
# NOTE: setuptools generated console_scripts calls function with no arguments
|
||||
def script_main():
|
||||
return main(sys.argv[1:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(script_main())
|
||||
661
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Normal file
661
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Normal file
@@ -0,0 +1,661 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import filecmp
|
||||
import os.path
|
||||
import re
|
||||
import tempfile
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from collections.abc import MutableSet
|
||||
|
||||
|
||||
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
|
||||
# among other "problems".
|
||||
class memoize:
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, *args):
|
||||
try:
|
||||
return self.cache[args]
|
||||
except KeyError:
|
||||
result = self.func(*args)
|
||||
self.cache[args] = result
|
||||
return result
|
||||
|
||||
|
||||
class GypError(Exception):
|
||||
"""Error class representing an error, which is to be presented
|
||||
to the user. The main entry point will catch and display this.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def ExceptionAppend(e, msg):
|
||||
"""Append a message to the given exception's message."""
|
||||
if not e.args:
|
||||
e.args = (msg,)
|
||||
elif len(e.args) == 1:
|
||||
e.args = (str(e.args[0]) + " " + msg,)
|
||||
else:
|
||||
e.args = (str(e.args[0]) + " " + msg,) + e.args[1:]
|
||||
|
||||
|
||||
def FindQualifiedTargets(target, qualified_list):
|
||||
"""
|
||||
Given a list of qualified targets, return the qualified targets for the
|
||||
specified |target|.
|
||||
"""
|
||||
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
|
||||
|
||||
|
||||
def ParseQualifiedTarget(target):
|
||||
# Splits a qualified target into a build file, target name and toolset.
|
||||
|
||||
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
|
||||
target_split = target.rsplit(":", 1)
|
||||
if len(target_split) == 2:
|
||||
[build_file, target] = target_split
|
||||
else:
|
||||
build_file = None
|
||||
|
||||
target_split = target.rsplit("#", 1)
|
||||
if len(target_split) == 2:
|
||||
[target, toolset] = target_split
|
||||
else:
|
||||
toolset = None
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def ResolveTarget(build_file, target, toolset):
|
||||
# This function resolves a target into a canonical form:
|
||||
# - a fully defined build file, either absolute or relative to the current
|
||||
# directory
|
||||
# - a target name
|
||||
# - a toolset
|
||||
#
|
||||
# build_file is the file relative to which 'target' is defined.
|
||||
# target is the qualified target.
|
||||
# toolset is the default toolset for that target.
|
||||
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
|
||||
|
||||
if parsed_build_file:
|
||||
if build_file:
|
||||
# If a relative path, parsed_build_file is relative to the directory
|
||||
# containing build_file. If build_file is not in the current directory,
|
||||
# parsed_build_file is not a usable path as-is. Resolve it by
|
||||
# interpreting it as relative to build_file. If parsed_build_file is
|
||||
# absolute, it is usable as a path regardless of the current directory,
|
||||
# and os.path.join will return it as-is.
|
||||
build_file = os.path.normpath(
|
||||
os.path.join(os.path.dirname(build_file), parsed_build_file)
|
||||
)
|
||||
# Further (to handle cases like ../cwd), make it relative to cwd)
|
||||
if not os.path.isabs(build_file):
|
||||
build_file = RelativePath(build_file, ".")
|
||||
else:
|
||||
build_file = parsed_build_file
|
||||
|
||||
if parsed_toolset:
|
||||
toolset = parsed_toolset
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def BuildFile(fully_qualified_target):
|
||||
# Extracts the build file from the fully qualified target.
|
||||
return ParseQualifiedTarget(fully_qualified_target)[0]
|
||||
|
||||
|
||||
def GetEnvironFallback(var_list, default):
|
||||
"""Look up a key in the environment, with fallback to secondary keys
|
||||
and finally falling back to a default value."""
|
||||
for var in var_list:
|
||||
if var in os.environ:
|
||||
return os.environ[var]
|
||||
return default
|
||||
|
||||
|
||||
def QualifiedTarget(build_file, target, toolset):
|
||||
# "Qualified" means the file that a target was defined in and the target
|
||||
# name, separated by a colon, suffixed by a # and the toolset name:
|
||||
# /path/to/file.gyp:target_name#toolset
|
||||
fully_qualified = build_file + ":" + target
|
||||
if toolset:
|
||||
fully_qualified = fully_qualified + "#" + toolset
|
||||
return fully_qualified
|
||||
|
||||
|
||||
@memoize
|
||||
def RelativePath(path, relative_to, follow_path_symlink=True):
|
||||
# Assuming both |path| and |relative_to| are relative to the current
|
||||
# directory, returns a relative path that identifies path relative to
|
||||
# relative_to.
|
||||
# If |follow_symlink_path| is true (default) and |path| is a symlink, then
|
||||
# this method returns a path to the real file represented by |path|. If it is
|
||||
# false, this method returns a path to the symlink. If |path| is not a
|
||||
# symlink, this option has no effect.
|
||||
|
||||
# Convert to normalized (and therefore absolute paths).
|
||||
if follow_path_symlink:
|
||||
path = os.path.realpath(path)
|
||||
else:
|
||||
path = os.path.abspath(path)
|
||||
relative_to = os.path.realpath(relative_to)
|
||||
|
||||
# On Windows, we can't create a relative path to a different drive, so just
|
||||
# use the absolute path.
|
||||
if sys.platform == "win32":
|
||||
if (
|
||||
os.path.splitdrive(path)[0].lower()
|
||||
!= os.path.splitdrive(relative_to)[0].lower()
|
||||
):
|
||||
return path
|
||||
|
||||
# Split the paths into components.
|
||||
path_split = path.split(os.path.sep)
|
||||
relative_to_split = relative_to.split(os.path.sep)
|
||||
|
||||
# Determine how much of the prefix the two paths share.
|
||||
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
|
||||
|
||||
# Put enough ".." components to back up out of relative_to to the common
|
||||
# prefix, and then append the part of path_split after the common prefix.
|
||||
relative_split = [os.path.pardir] * (
|
||||
len(relative_to_split) - prefix_len
|
||||
) + path_split[prefix_len:]
|
||||
|
||||
if len(relative_split) == 0:
|
||||
# The paths were the same.
|
||||
return ""
|
||||
|
||||
# Turn it back into a string and we're done.
|
||||
return os.path.join(*relative_split)
|
||||
|
||||
|
||||
@memoize
|
||||
def InvertRelativePath(path, toplevel_dir=None):
|
||||
"""Given a path like foo/bar that is relative to toplevel_dir, return
|
||||
the inverse relative path back to the toplevel_dir.
|
||||
|
||||
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
||||
should always produce the empty string, unless the path contains symlinks.
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
toplevel_dir = "." if toplevel_dir is None else toplevel_dir
|
||||
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
|
||||
|
||||
|
||||
def FixIfRelativePath(path, relative_to):
|
||||
# Like RelativePath but returns |path| unchanged if it is absolute.
|
||||
if os.path.isabs(path):
|
||||
return path
|
||||
return RelativePath(path, relative_to)
|
||||
|
||||
|
||||
def UnrelativePath(path, relative_to):
|
||||
# Assuming that |relative_to| is relative to the current directory, and |path|
|
||||
# is a path relative to the dirname of |relative_to|, returns a path that
|
||||
# identifies |path| relative to the current directory.
|
||||
rel_dir = os.path.dirname(relative_to)
|
||||
return os.path.normpath(os.path.join(rel_dir, path))
|
||||
|
||||
|
||||
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
|
||||
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
|
||||
# and the documentation for various shells.
|
||||
|
||||
# _quote is a pattern that should match any argument that needs to be quoted
|
||||
# with double-quotes by EncodePOSIXShellArgument. It matches the following
|
||||
# characters appearing anywhere in an argument:
|
||||
# \t, \n, space parameter separators
|
||||
# # comments
|
||||
# $ expansions (quoted to always expand within one argument)
|
||||
# % called out by IEEE 1003.1 XCU.2.2
|
||||
# & job control
|
||||
# ' quoting
|
||||
# (, ) subshell execution
|
||||
# *, ?, [ pathname expansion
|
||||
# ; command delimiter
|
||||
# <, >, | redirection
|
||||
# = assignment
|
||||
# {, } brace expansion (bash)
|
||||
# ~ tilde expansion
|
||||
# It also matches the empty string, because "" (or '') is the only way to
|
||||
# represent an empty string literal argument to a POSIX shell.
|
||||
#
|
||||
# This does not match the characters in _escape, because those need to be
|
||||
# backslash-escaped regardless of whether they appear in a double-quoted
|
||||
# string.
|
||||
_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$")
|
||||
|
||||
# _escape is a pattern that should match any character that needs to be
|
||||
# escaped with a backslash, whether or not the argument matched the _quote
|
||||
# pattern. _escape is used with re.sub to backslash anything in _escape's
|
||||
# first match group, hence the (parentheses) in the regular expression.
|
||||
#
|
||||
# _escape matches the following characters appearing anywhere in an argument:
|
||||
# " to prevent POSIX shells from interpreting this character for quoting
|
||||
# \ to prevent POSIX shells from interpreting this character for escaping
|
||||
# ` to prevent POSIX shells from interpreting this character for command
|
||||
# substitution
|
||||
# Missing from this list is $, because the desired behavior of
|
||||
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
|
||||
#
|
||||
# Also missing from this list is !, which bash will interpret as the history
|
||||
# expansion character when history is enabled. bash does not enable history
|
||||
# by default in non-interactive shells, so this is not thought to be a problem.
|
||||
# ! was omitted from this list because bash interprets "\!" as a literal string
|
||||
# including the backslash character (avoiding history expansion but retaining
|
||||
# the backslash), which would not be correct for argument encoding. Handling
|
||||
# this case properly would also be problematic because bash allows the history
|
||||
# character to be changed with the histchars shell variable. Fortunately,
|
||||
# as history is not enabled in non-interactive shells and
|
||||
# EncodePOSIXShellArgument is only expected to encode for non-interactive
|
||||
# shells, there is no room for error here by ignoring !.
|
||||
_escape = re.compile(r'(["\\`])')
|
||||
|
||||
|
||||
def EncodePOSIXShellArgument(argument):
|
||||
"""Encodes |argument| suitably for consumption by POSIX shells.
|
||||
|
||||
argument may be quoted and escaped as necessary to ensure that POSIX shells
|
||||
treat the returned value as a literal representing the argument passed to
|
||||
this function. Parameter (variable) expansions beginning with $ are allowed
|
||||
to remain intact without escaping the $, to allow the argument to contain
|
||||
references to variables to be expanded by the shell.
|
||||
"""
|
||||
|
||||
if not isinstance(argument, str):
|
||||
argument = str(argument)
|
||||
|
||||
if _quote.search(argument):
|
||||
quote = '"'
|
||||
else:
|
||||
quote = ""
|
||||
|
||||
encoded = quote + re.sub(_escape, r"\\\1", argument) + quote
|
||||
|
||||
return encoded
|
||||
|
||||
|
||||
def EncodePOSIXShellList(list):
|
||||
"""Encodes |list| suitably for consumption by POSIX shells.
|
||||
|
||||
Returns EncodePOSIXShellArgument for each item in list, and joins them
|
||||
together using the space character as an argument separator.
|
||||
"""
|
||||
|
||||
encoded_arguments = []
|
||||
for argument in list:
|
||||
encoded_arguments.append(EncodePOSIXShellArgument(argument))
|
||||
return " ".join(encoded_arguments)
|
||||
|
||||
|
||||
def DeepDependencyTargets(target_dicts, roots):
|
||||
"""Returns the recursive list of target dependencies."""
|
||||
dependencies = set()
|
||||
pending = set(roots)
|
||||
while pending:
|
||||
# Pluck out one.
|
||||
r = pending.pop()
|
||||
# Skip if visited already.
|
||||
if r in dependencies:
|
||||
continue
|
||||
# Add it.
|
||||
dependencies.add(r)
|
||||
# Add its children.
|
||||
spec = target_dicts[r]
|
||||
pending.update(set(spec.get("dependencies", [])))
|
||||
pending.update(set(spec.get("dependencies_original", [])))
|
||||
return list(dependencies - set(roots))
|
||||
|
||||
|
||||
def BuildFileTargets(target_list, build_file):
|
||||
"""From a target_list, returns the subset from the specified build_file.
|
||||
"""
|
||||
return [p for p in target_list if BuildFile(p) == build_file]
|
||||
|
||||
|
||||
def AllTargets(target_list, target_dicts, build_file):
|
||||
"""Returns all targets (direct and dependencies) for the specified build_file.
|
||||
"""
|
||||
bftargets = BuildFileTargets(target_list, build_file)
|
||||
deptargets = DeepDependencyTargets(target_dicts, bftargets)
|
||||
return bftargets + deptargets
|
||||
|
||||
|
||||
def WriteOnDiff(filename):
|
||||
"""Write to a file only if the new contents differ.
|
||||
|
||||
Arguments:
|
||||
filename: name of the file to potentially write to.
|
||||
Returns:
|
||||
A file like object which will write to temporary file and only overwrite
|
||||
the target if it differs (on close).
|
||||
"""
|
||||
|
||||
class Writer:
|
||||
"""Wrapper around file which only covers the target if it differs."""
|
||||
|
||||
def __init__(self):
|
||||
# On Cygwin remove the "dir" argument
|
||||
# `C:` prefixed paths are treated as relative,
|
||||
# consequently ending up with current dir "/cygdrive/c/..."
|
||||
# being prefixed to those, which was
|
||||
# obviously a non-existent path,
|
||||
# for example: "/cygdrive/c/<some folder>/C:\<my win style abs path>".
|
||||
# For more details see:
|
||||
# https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp
|
||||
base_temp_dir = "" if IsCygwin() else os.path.dirname(filename)
|
||||
# Pick temporary file.
|
||||
tmp_fd, self.tmp_path = tempfile.mkstemp(
|
||||
suffix=".tmp",
|
||||
prefix=os.path.split(filename)[1] + ".gyp.",
|
||||
dir=base_temp_dir,
|
||||
)
|
||||
try:
|
||||
self.tmp_file = os.fdopen(tmp_fd, "wb")
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
# Delegate everything else to self.tmp_file
|
||||
return getattr(self.tmp_file, attrname)
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
# Close tmp file.
|
||||
self.tmp_file.close()
|
||||
# Determine if different.
|
||||
same = False
|
||||
try:
|
||||
same = filecmp.cmp(self.tmp_path, filename, False)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
if same:
|
||||
# The new file is identical to the old one, just get rid of the new
|
||||
# one.
|
||||
os.unlink(self.tmp_path)
|
||||
else:
|
||||
# The new file is different from the old one,
|
||||
# or there is no old one.
|
||||
# Rename the new file to the permanent name.
|
||||
#
|
||||
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
|
||||
# file that can only be read by the owner, regardless of the umask.
|
||||
# There's no reason to not respect the umask here,
|
||||
# which means that an extra hoop is required
|
||||
# to fetch it and reset the new file's mode.
|
||||
#
|
||||
# No way to get the umask without setting a new one? Set a safe one
|
||||
# and then set it back to the old value.
|
||||
umask = os.umask(0o77)
|
||||
os.umask(umask)
|
||||
os.chmod(self.tmp_path, 0o666 & ~umask)
|
||||
if sys.platform == "win32" and os.path.exists(filename):
|
||||
# NOTE: on windows (but not cygwin) rename will not replace an
|
||||
# existing file, so it must be preceded with a remove.
|
||||
# Sadly there is no way to make the switch atomic.
|
||||
os.remove(filename)
|
||||
os.rename(self.tmp_path, filename)
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def write(self, s):
|
||||
self.tmp_file.write(s.encode("utf-8"))
|
||||
|
||||
return Writer()
|
||||
|
||||
|
||||
def EnsureDirExists(path):
|
||||
"""Make sure the directory for |path| exists."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def GetFlavor(params):
|
||||
"""Returns |params.flavor| if it's set, the system's default flavor else."""
|
||||
flavors = {
|
||||
"cygwin": "win",
|
||||
"win32": "win",
|
||||
"darwin": "mac",
|
||||
}
|
||||
|
||||
if "flavor" in params:
|
||||
return params["flavor"]
|
||||
if sys.platform in flavors:
|
||||
return flavors[sys.platform]
|
||||
if sys.platform.startswith("sunos"):
|
||||
return "solaris"
|
||||
if sys.platform.startswith(("dragonfly", "freebsd")):
|
||||
return "freebsd"
|
||||
if sys.platform.startswith("openbsd"):
|
||||
return "openbsd"
|
||||
if sys.platform.startswith("netbsd"):
|
||||
return "netbsd"
|
||||
if sys.platform.startswith("aix"):
|
||||
return "aix"
|
||||
if sys.platform.startswith(("os390", "zos")):
|
||||
return "zos"
|
||||
if sys.platform == "os400":
|
||||
return "os400"
|
||||
|
||||
return "linux"
|
||||
|
||||
|
||||
def CopyTool(flavor, out_path, generator_flags={}):
|
||||
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
|
||||
to |out_path|."""
|
||||
# aix and solaris just need flock emulation. mac and win use more complicated
|
||||
# support scripts.
|
||||
prefix = {
|
||||
"aix": "flock",
|
||||
"os400": "flock",
|
||||
"solaris": "flock",
|
||||
"mac": "mac",
|
||||
"ios": "mac",
|
||||
"win": "win",
|
||||
}.get(flavor, None)
|
||||
if not prefix:
|
||||
return
|
||||
|
||||
# Slurp input file.
|
||||
source_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix
|
||||
)
|
||||
with open(source_path) as source_file:
|
||||
source = source_file.readlines()
|
||||
|
||||
# Set custom header flags.
|
||||
header = "# Generated by gyp. Do not edit.\n"
|
||||
mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
|
||||
if flavor == "mac" and mac_toolchain_dir:
|
||||
header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir
|
||||
|
||||
# Add header and write it out.
|
||||
tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix)
|
||||
with open(tool_path, "w") as tool_file:
|
||||
tool_file.write("".join([source[0], header] + source[1:]))
|
||||
|
||||
# Make file executable.
|
||||
os.chmod(tool_path, 0o755)
|
||||
|
||||
|
||||
# From Alex Martelli,
|
||||
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
|
||||
# ASPN: Python Cookbook: Remove duplicates from a sequence
|
||||
# First comment, dated 2001/10/13.
|
||||
# (Also in the printed Python Cookbook.)
|
||||
|
||||
|
||||
def uniquer(seq, idfun=lambda x: x):
|
||||
seen = {}
|
||||
result = []
|
||||
for item in seq:
|
||||
marker = idfun(item)
|
||||
if marker in seen:
|
||||
continue
|
||||
seen[marker] = 1
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
# Based on http://code.activestate.com/recipes/576694/.
|
||||
class OrderedSet(MutableSet):
|
||||
def __init__(self, iterable=None):
|
||||
self.end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.map = {} # key --> [key, prev, next]
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
return len(self.map)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
if key not in self.map:
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||
|
||||
def discard(self, key):
|
||||
if key in self.map:
|
||||
key, prev_item, next_item = self.map.pop(key)
|
||||
prev_item[2] = next_item
|
||||
next_item[1] = prev_item
|
||||
|
||||
def __iter__(self):
|
||||
end = self.end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
# The second argument is an addition that causes a pylint warning.
|
||||
def pop(self, last=True): # pylint: disable=W0221
|
||||
if not self:
|
||||
raise KeyError("set is empty")
|
||||
key = self.end[1][0] if last else self.end[2][0]
|
||||
self.discard(key)
|
||||
return key
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return f"{self.__class__.__name__}()"
|
||||
return f"{self.__class__.__name__}({list(self)!r})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedSet):
|
||||
return len(self) == len(other) and list(self) == list(other)
|
||||
return set(self) == set(other)
|
||||
|
||||
# Extensions to the recipe.
|
||||
def update(self, iterable):
|
||||
for i in iterable:
|
||||
if i not in self:
|
||||
self.add(i)
|
||||
|
||||
|
||||
class CycleError(Exception):
|
||||
"""An exception raised when an unexpected cycle is detected."""
|
||||
|
||||
def __init__(self, nodes):
|
||||
self.nodes = nodes
|
||||
|
||||
def __str__(self):
|
||||
return "CycleError: cycle involving: " + str(self.nodes)
|
||||
|
||||
|
||||
def TopologicallySorted(graph, get_edges):
|
||||
r"""Topologically sort based on a user provided edge definition.
|
||||
|
||||
Args:
|
||||
graph: A list of node names.
|
||||
get_edges: A function mapping from node name to a hashable collection
|
||||
of node names which this node has outgoing edges to.
|
||||
Returns:
|
||||
A list containing all of the node in graph in topological order.
|
||||
It is assumed that calling get_edges once for each node and caching is
|
||||
cheaper than repeatedly calling get_edges.
|
||||
Raises:
|
||||
CycleError in the event of a cycle.
|
||||
Example:
|
||||
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
|
||||
def GetEdges(node):
|
||||
return re.findall(r'\$\(([^))]\)', graph[node])
|
||||
print TopologicallySorted(graph.keys(), GetEdges)
|
||||
==>
|
||||
['a', 'c', b']
|
||||
"""
|
||||
get_edges = memoize(get_edges)
|
||||
visited = set()
|
||||
visiting = set()
|
||||
ordered_nodes = []
|
||||
|
||||
def Visit(node):
|
||||
if node in visiting:
|
||||
raise CycleError(visiting)
|
||||
if node in visited:
|
||||
return
|
||||
visited.add(node)
|
||||
visiting.add(node)
|
||||
for neighbor in get_edges(node):
|
||||
Visit(neighbor)
|
||||
visiting.remove(node)
|
||||
ordered_nodes.insert(0, node)
|
||||
|
||||
for node in sorted(graph):
|
||||
Visit(node)
|
||||
return ordered_nodes
|
||||
|
||||
|
||||
def CrossCompileRequested():
|
||||
# TODO: figure out how to not build extra host objects in the
|
||||
# non-cross-compile case when this is enabled, and enable unconditionally.
|
||||
return (
|
||||
os.environ.get("GYP_CROSSCOMPILE")
|
||||
or os.environ.get("AR_host")
|
||||
or os.environ.get("CC_host")
|
||||
or os.environ.get("CXX_host")
|
||||
or os.environ.get("AR_target")
|
||||
or os.environ.get("CC_target")
|
||||
or os.environ.get("CXX_target")
|
||||
)
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout = out.communicate()[0].decode("utf-8")
|
||||
return "CYGWIN" in str(stdout)
|
||||
except Exception:
|
||||
return False
|
||||
78
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Executable file
78
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Executable file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the common.py file."""
|
||||
|
||||
import gyp.common
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestTopologicallySorted(unittest.TestCase):
|
||||
def test_Valid(self):
|
||||
"""Test that sorting works on a valid graph with one possible order."""
|
||||
graph = {
|
||||
"a": ["b", "c"],
|
||||
"b": [],
|
||||
"c": ["d"],
|
||||
"d": ["b"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertEqual(
|
||||
gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"]
|
||||
)
|
||||
|
||||
def test_Cycle(self):
|
||||
"""Test that an exception is thrown on a cyclic graph."""
|
||||
graph = {
|
||||
"a": ["b"],
|
||||
"b": ["c"],
|
||||
"c": ["d"],
|
||||
"d": ["a"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertRaises(
|
||||
gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge
|
||||
)
|
||||
|
||||
|
||||
class TestGetFlavor(unittest.TestCase):
|
||||
"""Test that gyp.common.GetFlavor works as intended"""
|
||||
|
||||
original_platform = ""
|
||||
|
||||
def setUp(self):
|
||||
self.original_platform = sys.platform
|
||||
|
||||
def tearDown(self):
|
||||
sys.platform = self.original_platform
|
||||
|
||||
def assertFlavor(self, expected, argument, param):
|
||||
sys.platform = argument
|
||||
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
||||
|
||||
def test_platform_default(self):
|
||||
self.assertFlavor("freebsd", "freebsd9", {})
|
||||
self.assertFlavor("freebsd", "freebsd10", {})
|
||||
self.assertFlavor("openbsd", "openbsd5", {})
|
||||
self.assertFlavor("solaris", "sunos5", {})
|
||||
self.assertFlavor("solaris", "sunos", {})
|
||||
self.assertFlavor("linux", "linux2", {})
|
||||
self.assertFlavor("linux", "linux3", {})
|
||||
self.assertFlavor("linux", "linux", {})
|
||||
|
||||
def test_param(self):
|
||||
self.assertFlavor("foobar", "linux2", {"flavor": "foobar"})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
165
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Normal file
165
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
import locale
|
||||
from functools import reduce
|
||||
|
||||
|
||||
def XmlToString(content, encoding="utf-8", pretty=False):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Visual Studio files have a lot of pre-defined structures. This function makes
|
||||
it easy to represent these structures as Python data structures, instead of
|
||||
having to create a lot of function calls.
|
||||
|
||||
Each XML element of the content is represented as a list composed of:
|
||||
1. The name of the element, a string,
|
||||
2. The attributes of the element, a dictionary (optional), and
|
||||
3+. The content of the element, if any. Strings are simple text nodes and
|
||||
lists are child elements.
|
||||
|
||||
Example 1:
|
||||
<test/>
|
||||
becomes
|
||||
['test']
|
||||
|
||||
Example 2:
|
||||
<myelement a='value1' b='value2'>
|
||||
<childtype>This is</childtype>
|
||||
<childtype>it!</childtype>
|
||||
</myelement>
|
||||
|
||||
becomes
|
||||
['myelement', {'a':'value1', 'b':'value2'},
|
||||
['childtype', 'This is'],
|
||||
['childtype', 'it!'],
|
||||
]
|
||||
|
||||
Args:
|
||||
content: The structured content to be converted.
|
||||
encoding: The encoding to report on the first XML line.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
|
||||
Returns:
|
||||
The XML content as a string.
|
||||
"""
|
||||
# We create a huge list of all the elements of the file.
|
||||
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
|
||||
if pretty:
|
||||
xml_parts.append("\n")
|
||||
_ConstructContentList(xml_parts, content, pretty)
|
||||
|
||||
# Convert it to a string
|
||||
return "".join(xml_parts)
|
||||
|
||||
|
||||
def _ConstructContentList(xml_parts, specification, pretty, level=0):
|
||||
""" Appends the XML parts corresponding to the specification.
|
||||
|
||||
Args:
|
||||
xml_parts: A list of XML parts to be appended to.
|
||||
specification: The specification of the element. See EasyXml docs.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
level: Indentation level.
|
||||
"""
|
||||
# The first item in a specification is the name of the element.
|
||||
if pretty:
|
||||
indentation = " " * level
|
||||
new_line = "\n"
|
||||
else:
|
||||
indentation = ""
|
||||
new_line = ""
|
||||
name = specification[0]
|
||||
if not isinstance(name, str):
|
||||
raise Exception(
|
||||
"The first item of an EasyXml specification should be "
|
||||
"a string. Specification was " + str(specification)
|
||||
)
|
||||
xml_parts.append(indentation + "<" + name)
|
||||
|
||||
# Optionally in second position is a dictionary of the attributes.
|
||||
rest = specification[1:]
|
||||
if rest and isinstance(rest[0], dict):
|
||||
for at, val in sorted(rest[0].items()):
|
||||
xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"')
|
||||
rest = rest[1:]
|
||||
if rest:
|
||||
xml_parts.append(">")
|
||||
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
|
||||
multi_line = not all_strings
|
||||
if multi_line and new_line:
|
||||
xml_parts.append(new_line)
|
||||
for child_spec in rest:
|
||||
# If it's a string, append a text node.
|
||||
# Otherwise recurse over that child definition
|
||||
if isinstance(child_spec, str):
|
||||
xml_parts.append(_XmlEscape(child_spec))
|
||||
else:
|
||||
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
|
||||
if multi_line and indentation:
|
||||
xml_parts.append(indentation)
|
||||
xml_parts.append(f"</{name}>{new_line}")
|
||||
else:
|
||||
xml_parts.append("/>%s" % new_line)
|
||||
|
||||
|
||||
def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
|
||||
win32=(sys.platform == "win32")):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Args:
|
||||
content: The structured content to be written.
|
||||
path: Location of the file.
|
||||
encoding: The encoding to report on the first line of the XML file.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
"""
|
||||
xml_string = XmlToString(content, encoding, pretty)
|
||||
if win32 and os.linesep != "\r\n":
|
||||
xml_string = xml_string.replace("\n", "\r\n")
|
||||
|
||||
default_encoding = locale.getdefaultlocale()[1]
|
||||
if default_encoding and default_encoding.upper() != encoding.upper():
|
||||
xml_string = xml_string.encode(encoding)
|
||||
|
||||
# Get the old content
|
||||
try:
|
||||
with open(path) as file:
|
||||
existing = file.read()
|
||||
except OSError:
|
||||
existing = None
|
||||
|
||||
# It has changed, write it
|
||||
if existing != xml_string:
|
||||
with open(path, "wb") as file:
|
||||
file.write(xml_string)
|
||||
|
||||
|
||||
_xml_escape_map = {
|
||||
'"': """,
|
||||
"'": "'",
|
||||
"<": "<",
|
||||
">": ">",
|
||||
"&": "&",
|
||||
"\n": "
",
|
||||
"\r": "
",
|
||||
}
|
||||
|
||||
|
||||
_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
|
||||
|
||||
|
||||
def _XmlEscape(value, attr=False):
|
||||
""" Escape a string for inclusion in XML."""
|
||||
|
||||
def replace(match):
|
||||
m = match.string[match.start() : match.end()]
|
||||
# don't replace single quotes in attrs
|
||||
if attr and m == "'":
|
||||
return m
|
||||
return _xml_escape_map[m]
|
||||
|
||||
return _xml_escape_re.sub(replace, value)
|
||||
109
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Executable file
109
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the easy_xml.py file. """
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_EasyXml_simple(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test/>',
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"], encoding="Windows-1252"),
|
||||
'<?xml version="1.0" encoding="Windows-1252"?><test/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_simple_with_attributes(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_escaping(self):
|
||||
original = "<test>'\"\r&\nfoo"
|
||||
converted = "<test>'"
&
foo"
|
||||
converted_apos = converted.replace("'", "'")
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test3", {"a": original}, original]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>'
|
||||
% (converted, converted_apos),
|
||||
)
|
||||
|
||||
def test_EasyXml_pretty(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(
|
||||
["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]],
|
||||
pretty=True,
|
||||
),
|
||||
'<?xml version="1.0" encoding="utf-8"?>\n'
|
||||
"<test3>\n"
|
||||
" <GrandParent>\n"
|
||||
" <Parent1>\n"
|
||||
" <Child/>\n"
|
||||
" </Parent1>\n"
|
||||
" <Parent2/>\n"
|
||||
" </GrandParent>\n"
|
||||
"</test3>\n",
|
||||
)
|
||||
|
||||
def test_EasyXml_complex(self):
|
||||
# We want to create:
|
||||
target = (
|
||||
'<?xml version="1.0" encoding="utf-8"?>'
|
||||
"<Project>"
|
||||
'<PropertyGroup Label="Globals">'
|
||||
"<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>"
|
||||
"<Keyword>Win32Proj</Keyword>"
|
||||
"<RootNamespace>automated_ui_tests</RootNamespace>"
|
||||
"</PropertyGroup>"
|
||||
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
|
||||
"<PropertyGroup "
|
||||
"Condition=\"'$(Configuration)|$(Platform)'=="
|
||||
'\'Debug|Win32\'" Label="Configuration">'
|
||||
"<ConfigurationType>Application</ConfigurationType>"
|
||||
"<CharacterSet>Unicode</CharacterSet>"
|
||||
"</PropertyGroup>"
|
||||
"</Project>"
|
||||
)
|
||||
|
||||
xml = easy_xml.XmlToString(
|
||||
[
|
||||
"Project",
|
||||
[
|
||||
"PropertyGroup",
|
||||
{"Label": "Globals"},
|
||||
["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"],
|
||||
["Keyword", "Win32Proj"],
|
||||
["RootNamespace", "automated_ui_tests"],
|
||||
],
|
||||
["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}],
|
||||
[
|
||||
"PropertyGroup",
|
||||
{
|
||||
"Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'",
|
||||
"Label": "Configuration",
|
||||
},
|
||||
["ConfigurationType", "Application"],
|
||||
["CharacterSet", "Unicode"],
|
||||
],
|
||||
]
|
||||
)
|
||||
self.assertEqual(xml, target)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Executable file
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""These functions are executed via gyp-flock-tool when using the Makefile
|
||||
generator. Used on systems that don't have a built-in flock."""
|
||||
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = FlockTool()
|
||||
executor.Dispatch(args)
|
||||
|
||||
|
||||
class FlockTool:
|
||||
"""This class emulates the 'flock' command."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
# Note that the stock python on SunOS has a bug
|
||||
# where fcntl.flock(fd, LOCK_EX) always fails
|
||||
# with EBADF, that's why we use this F_SETLK
|
||||
# hack instead.
|
||||
fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
if sys.platform.startswith("aix") or sys.platform == "os400":
|
||||
# Python on AIX is compiled with LARGEFILE support, which changes the
|
||||
# struct size.
|
||||
op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
else:
|
||||
op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
0
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Normal file
0
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Normal file
808
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Normal file
808
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Normal file
@@ -0,0 +1,808 @@
|
||||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
|
||||
the generator flag config_path) the path of a json file that dictates the files
|
||||
and targets to search for. The following keys are supported:
|
||||
files: list of paths (relative) of the files to search for.
|
||||
test_targets: unqualified target names to search for. Any target in this list
|
||||
that depends upon a file in |files| is output regardless of the type of target
|
||||
or chain of dependencies.
|
||||
additional_compile_targets: Unqualified targets to search for in addition to
|
||||
test_targets. Targets in the combined list that depend upon a file in |files|
|
||||
are not necessarily output. For example, if the target is of type none then the
|
||||
target is not output (but one of the descendants of the target will be).
|
||||
|
||||
The following is output:
|
||||
error: only supplied if there is an error.
|
||||
compile_targets: minimal set of targets that directly or indirectly (for
|
||||
targets of type none) depend on the files in |files| and is one of the
|
||||
supplied targets or a target that one of the supplied targets depends on.
|
||||
The expectation is this set of targets is passed into a build step. This list
|
||||
always contains the output of test_targets as well.
|
||||
test_targets: set of targets from the supplied |test_targets| that either
|
||||
directly or indirectly depend upon a file in |files|. This list if useful
|
||||
if additional processing needs to be done for certain targets after the
|
||||
build, such as running tests.
|
||||
status: outputs one of three values: none of the supplied files were found,
|
||||
one of the include files changed so that it should be assumed everything
|
||||
changed (in this case test_targets and compile_targets are not output) or at
|
||||
least one file was found.
|
||||
invalid_targets: list of supplied targets that were not found.
|
||||
|
||||
Example:
|
||||
Consider a graph like the following:
|
||||
A D
|
||||
/ \
|
||||
B C
|
||||
A depends upon both B and C, A is of type none and B and C are executables.
|
||||
D is an executable, has no dependencies and nothing depends on it.
|
||||
If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
|
||||
files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
|
||||
the following is output:
|
||||
|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
|
||||
and the supplied target A depends upon it. A is not output as a build_target
|
||||
as it is of type none with no rules and actions.
|
||||
|test_targets| = ["B"] B directly depends upon the change file b.cc.
|
||||
|
||||
Even though the file d.cc, which D depends upon, has changed D is not output
|
||||
as it was not supplied by way of |additional_compile_targets| or |test_targets|.
|
||||
|
||||
If the generator flag analyzer_output_path is specified, output is written
|
||||
there. Otherwise output is written to stdout.
|
||||
|
||||
In Gyp the "all" target is shorthand for the root targets in the files passed
|
||||
to gyp. For example, if file "a.gyp" contains targets "a1" and
|
||||
"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
|
||||
on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
|
||||
Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
|
||||
directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
|
||||
then the "all" target includes "b1" and "b2".
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
debug = False
|
||||
|
||||
found_dependency_string = "Found dependency"
|
||||
no_dependency_string = "No dependencies"
|
||||
# Status when it should be assumed that everything has changed.
|
||||
all_changed_string = "Found dependency (all)"
|
||||
|
||||
# MatchStatus is used indicate if and how a target depends upon the supplied
|
||||
# sources.
|
||||
# The target's sources contain one of the supplied paths.
|
||||
MATCH_STATUS_MATCHES = 1
|
||||
# The target has a dependency on another target that contains one of the
|
||||
# supplied paths.
|
||||
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
|
||||
# The target's sources weren't in the supplied paths and none of the target's
|
||||
# dependencies depend upon a target that matched.
|
||||
MATCH_STATUS_DOESNT_MATCH = 3
|
||||
# The target doesn't contain the source, but the dependent targets have not yet
|
||||
# been visited to determine a more specific status yet.
|
||||
MATCH_STATUS_TBD = 4
|
||||
|
||||
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
generator_default_variables[dirname] = "!!!"
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def _ToGypPath(path):
|
||||
"""Converts a path to the format used by gyp."""
|
||||
if os.sep == "\\" and os.altsep == "/":
|
||||
return path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def _ResolveParent(path, base_path_components):
|
||||
"""Resolves |path|, which starts with at least one '../'. Returns an empty
|
||||
string if the path shouldn't be considered. See _AddSources() for a
|
||||
description of |base_path_components|."""
|
||||
depth = 0
|
||||
while path.startswith("../"):
|
||||
depth += 1
|
||||
path = path[3:]
|
||||
# Relative includes may go outside the source tree. For example, an action may
|
||||
# have inputs in /usr/include, which are not in the source tree.
|
||||
if depth > len(base_path_components):
|
||||
return ""
|
||||
if depth == len(base_path_components):
|
||||
return path
|
||||
return (
|
||||
"/".join(base_path_components[0 : len(base_path_components) - depth])
|
||||
+ "/"
|
||||
+ path
|
||||
)
|
||||
|
||||
|
||||
def _AddSources(sources, base_path, base_path_components, result):
|
||||
"""Extracts valid sources from |sources| and adds them to |result|. Each
|
||||
source file is relative to |base_path|, but may contain '..'. To make
|
||||
resolving '..' easier |base_path_components| contains each of the
|
||||
directories in |base_path|. Additionally each source may contain variables.
|
||||
Such sources are ignored as it is assumed dependencies on them are expressed
|
||||
and tracked in some other means."""
|
||||
# NOTE: gyp paths are always posix style.
|
||||
for source in sources:
|
||||
if not len(source) or source.startswith("!!!") or source.startswith("$"):
|
||||
continue
|
||||
# variable expansion may lead to //.
|
||||
org_source = source
|
||||
source = source[0] + source[1:].replace("//", "/")
|
||||
if source.startswith("../"):
|
||||
source = _ResolveParent(source, base_path_components)
|
||||
if len(source):
|
||||
result.append(source)
|
||||
continue
|
||||
result.append(base_path + source)
|
||||
if debug:
|
||||
print("AddSource", org_source, result[len(result) - 1])
|
||||
|
||||
|
||||
def _ExtractSourcesFromAction(action, base_path, base_path_components, results):
|
||||
if "inputs" in action:
|
||||
_AddSources(action["inputs"], base_path, base_path_components, results)
|
||||
|
||||
|
||||
def _ToLocalPath(toplevel_dir, path):
|
||||
"""Converts |path| to a path relative to |toplevel_dir|."""
|
||||
if path == toplevel_dir:
|
||||
return ""
|
||||
if path.startswith(toplevel_dir + "/"):
|
||||
return path[len(toplevel_dir) + len("/") :]
|
||||
return path
|
||||
|
||||
|
||||
def _ExtractSources(target, target_dict, toplevel_dir):
|
||||
# |target| is either absolute or relative and in the format of the OS. Gyp
|
||||
# source paths are always posix. Convert |target| to a posix path relative to
|
||||
# |toplevel_dir_|. This is done to make it easy to build source paths.
|
||||
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
|
||||
base_path_components = base_path.split("/")
|
||||
|
||||
# Add a trailing '/' so that _AddSources() can easily build paths.
|
||||
if len(base_path):
|
||||
base_path += "/"
|
||||
|
||||
if debug:
|
||||
print("ExtractSources", target, base_path)
|
||||
|
||||
results = []
|
||||
if "sources" in target_dict:
|
||||
_AddSources(target_dict["sources"], base_path, base_path_components, results)
|
||||
# Include the inputs from any actions. Any changes to these affect the
|
||||
# resulting output.
|
||||
if "actions" in target_dict:
|
||||
for action in target_dict["actions"]:
|
||||
_ExtractSourcesFromAction(action, base_path, base_path_components, results)
|
||||
if "rules" in target_dict:
|
||||
for rule in target_dict["rules"]:
|
||||
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class Target:
|
||||
"""Holds information about a particular target:
|
||||
deps: set of Targets this Target depends upon. This is not recursive, only the
|
||||
direct dependent Targets.
|
||||
match_status: one of the MatchStatus values.
|
||||
back_deps: set of Targets that have a dependency on this Target.
|
||||
visited: used during iteration to indicate whether we've visited this target.
|
||||
This is used for two iterations, once in building the set of Targets and
|
||||
again in _GetBuildTargets().
|
||||
name: fully qualified name of the target.
|
||||
requires_build: True if the target type is such that it needs to be built.
|
||||
See _DoesTargetTypeRequireBuild for details.
|
||||
added_to_compile_targets: used when determining if the target was added to the
|
||||
set of targets that needs to be built.
|
||||
in_roots: true if this target is a descendant of one of the root nodes.
|
||||
is_executable: true if the type of target is executable.
|
||||
is_static_library: true if the type of target is static_library.
|
||||
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
|
||||
if there is a target in back_deps that does a link."""
|
||||
|
||||
def __init__(self, name):
|
||||
self.deps = set()
|
||||
self.match_status = MATCH_STATUS_TBD
|
||||
self.back_deps = set()
|
||||
self.name = name
|
||||
# TODO(sky): I don't like hanging this off Target. This state is specific
|
||||
# to certain functions and should be isolated there.
|
||||
self.visited = False
|
||||
self.requires_build = False
|
||||
self.added_to_compile_targets = False
|
||||
self.in_roots = False
|
||||
self.is_executable = False
|
||||
self.is_static_library = False
|
||||
self.is_or_has_linked_ancestor = False
|
||||
|
||||
|
||||
class Config:
|
||||
"""Details what we're looking for
|
||||
files: set of files to search for
|
||||
targets: see file description for details."""
|
||||
|
||||
def __init__(self):
|
||||
self.files = []
|
||||
self.targets = set()
|
||||
self.additional_compile_target_names = set()
|
||||
self.test_target_names = set()
|
||||
|
||||
def Init(self, params):
|
||||
"""Initializes Config. This is a separate method as it raises an exception
|
||||
if there is a parse error."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
config_path = generator_flags.get("config_path", None)
|
||||
if not config_path:
|
||||
return
|
||||
try:
|
||||
f = open(config_path)
|
||||
config = json.load(f)
|
||||
f.close()
|
||||
except OSError:
|
||||
raise Exception("Unable to open file " + config_path)
|
||||
except ValueError as e:
|
||||
raise Exception("Unable to parse config file " + config_path + str(e))
|
||||
if not isinstance(config, dict):
|
||||
raise Exception("config_path must be a JSON file containing a dictionary")
|
||||
self.files = config.get("files", [])
|
||||
self.additional_compile_target_names = set(
|
||||
config.get("additional_compile_targets", [])
|
||||
)
|
||||
self.test_target_names = set(config.get("test_targets", []))
|
||||
|
||||
|
||||
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
|
||||
"""Returns true if the build file |build_file| is either in |files| or
|
||||
one of the files included by |build_file| is in |files|. |toplevel_dir| is
|
||||
the root of the source tree."""
|
||||
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
|
||||
if debug:
|
||||
print("gyp file modified", build_file)
|
||||
return True
|
||||
|
||||
# First element of included_files is the file itself.
|
||||
if len(data[build_file]["included_files"]) <= 1:
|
||||
return False
|
||||
|
||||
for include_file in data[build_file]["included_files"][1:]:
|
||||
# |included_files| are relative to the directory of the |build_file|.
|
||||
rel_include_file = _ToGypPath(
|
||||
gyp.common.UnrelativePath(include_file, build_file)
|
||||
)
|
||||
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
|
||||
if debug:
|
||||
print(
|
||||
"included gyp file modified, gyp_file=",
|
||||
build_file,
|
||||
"included file=",
|
||||
rel_include_file,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _GetOrCreateTargetByName(targets, target_name):
|
||||
"""Creates or returns the Target at targets[target_name]. If there is no
|
||||
Target for |target_name| one is created. Returns a tuple of whether a new
|
||||
Target was created and the Target."""
|
||||
if target_name in targets:
|
||||
return False, targets[target_name]
|
||||
target = Target(target_name)
|
||||
targets[target_name] = target
|
||||
return True, target
|
||||
|
||||
|
||||
def _DoesTargetTypeRequireBuild(target_dict):
|
||||
"""Returns true if the target type is such that it needs to be built."""
|
||||
# If a 'none' target has rules or actions we assume it requires a build.
|
||||
return bool(
|
||||
target_dict["type"] != "none"
|
||||
or target_dict.get("actions")
|
||||
or target_dict.get("rules")
|
||||
)
|
||||
|
||||
|
||||
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
|
||||
"""Returns a tuple of the following:
|
||||
. A dictionary mapping from fully qualified name to Target.
|
||||
. A list of the targets that have a source file in |files|.
|
||||
. Targets that constitute the 'all' target. See description at top of file
|
||||
for details on the 'all' target.
|
||||
This sets the |match_status| of the targets that contain any of the source
|
||||
files in |files| to MATCH_STATUS_MATCHES.
|
||||
|toplevel_dir| is the root of the source tree."""
|
||||
# Maps from target name to Target.
|
||||
name_to_target = {}
|
||||
|
||||
# Targets that matched.
|
||||
matching_targets = []
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
# Maps from build file to a boolean indicating whether the build file is in
|
||||
# |files|.
|
||||
build_file_in_files = {}
|
||||
|
||||
# Root targets across all files.
|
||||
roots = set()
|
||||
|
||||
# Set of Targets in |build_files|.
|
||||
build_file_targets = set()
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target_name = targets_to_visit.pop()
|
||||
created_target, target = _GetOrCreateTargetByName(name_to_target, target_name)
|
||||
if created_target:
|
||||
roots.add(target)
|
||||
elif target.visited:
|
||||
continue
|
||||
|
||||
target.visited = True
|
||||
target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name])
|
||||
target_type = target_dicts[target_name]["type"]
|
||||
target.is_executable = target_type == "executable"
|
||||
target.is_static_library = target_type == "static_library"
|
||||
target.is_or_has_linked_ancestor = (
|
||||
target_type == "executable" or target_type == "shared_library"
|
||||
)
|
||||
|
||||
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
|
||||
if build_file not in build_file_in_files:
|
||||
build_file_in_files[build_file] = _WasBuildFileModified(
|
||||
build_file, data, files, toplevel_dir
|
||||
)
|
||||
|
||||
if build_file in build_files:
|
||||
build_file_targets.add(target)
|
||||
|
||||
# If a build file (or any of its included files) is modified we assume all
|
||||
# targets in the file are modified.
|
||||
if build_file_in_files[build_file]:
|
||||
print("matching target from modified build file", target_name)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
else:
|
||||
sources = _ExtractSources(
|
||||
target_name, target_dicts[target_name], toplevel_dir
|
||||
)
|
||||
for source in sources:
|
||||
if _ToGypPath(os.path.normpath(source)) in files:
|
||||
print("target", target_name, "matches", source)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
break
|
||||
|
||||
# Add dependencies to visit as well as updating back pointers for deps.
|
||||
for dep in target_dicts[target_name].get("dependencies", []):
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
created_dep_target, dep_target = _GetOrCreateTargetByName(
|
||||
name_to_target, dep
|
||||
)
|
||||
if not created_dep_target:
|
||||
roots.discard(dep_target)
|
||||
|
||||
target.deps.add(dep_target)
|
||||
dep_target.back_deps.add(target)
|
||||
|
||||
return name_to_target, matching_targets, roots & build_file_targets
|
||||
|
||||
|
||||
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
|
||||
"""Returns a tuple of the following:
|
||||
. mapping (dictionary) from unqualified name to Target for all the
|
||||
Targets in |to_find|.
|
||||
. any target names not found. If this is empty all targets were found."""
|
||||
result = {}
|
||||
if not to_find:
|
||||
return {}, []
|
||||
to_find = set(to_find)
|
||||
for target_name in all_targets.keys():
|
||||
extracted = gyp.common.ParseQualifiedTarget(target_name)
|
||||
if len(extracted) > 1 and extracted[1] in to_find:
|
||||
to_find.remove(extracted[1])
|
||||
result[extracted[1]] = all_targets[target_name]
|
||||
if not to_find:
|
||||
return result, []
|
||||
return result, [x for x in to_find]
|
||||
|
||||
|
||||
def _DoesTargetDependOnMatchingTargets(target):
|
||||
"""Returns true if |target| or any of its dependencies is one of the
|
||||
targets containing the files supplied as input to analyzer. This updates
|
||||
|matches| of the Targets as it recurses.
|
||||
target: the Target to look for."""
|
||||
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
|
||||
return False
|
||||
if (
|
||||
target.match_status == MATCH_STATUS_MATCHES
|
||||
or target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||
):
|
||||
return True
|
||||
for dep in target.deps:
|
||||
if _DoesTargetDependOnMatchingTargets(dep):
|
||||
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||
print("\t", target.name, "matches by dep", dep.name)
|
||||
return True
|
||||
target.match_status = MATCH_STATUS_DOESNT_MATCH
|
||||
return False
|
||||
|
||||
|
||||
def _GetTargetsDependingOnMatchingTargets(possible_targets):
|
||||
"""Returns the list of Targets in |possible_targets| that depend (either
|
||||
directly on indirectly) on at least one of the targets containing the files
|
||||
supplied as input to analyzer.
|
||||
possible_targets: targets to search from."""
|
||||
found = []
|
||||
print("Targets that matched by dependency:")
|
||||
for target in possible_targets:
|
||||
if _DoesTargetDependOnMatchingTargets(target):
|
||||
found.append(target)
|
||||
return found
|
||||
|
||||
|
||||
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
|
||||
"""Recurses through all targets that depend on |target|, adding all targets
|
||||
that need to be built (and are in |roots|) to |result|.
|
||||
roots: set of root targets.
|
||||
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|
||||
|target| to |result|. |target| must still be in |roots|.
|
||||
result: targets that need to be built are added here."""
|
||||
if target.visited:
|
||||
return
|
||||
|
||||
target.visited = True
|
||||
target.in_roots = target in roots
|
||||
|
||||
for back_dep_target in target.back_deps:
|
||||
_AddCompileTargets(back_dep_target, roots, False, result)
|
||||
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
|
||||
target.in_roots |= back_dep_target.in_roots
|
||||
target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor
|
||||
|
||||
# Always add 'executable' targets. Even though they may be built by other
|
||||
# targets that depend upon them it makes detection of what is going to be
|
||||
# built easier.
|
||||
# And always add static_libraries that have no dependencies on them from
|
||||
# linkables. This is necessary as the other dependencies on them may be
|
||||
# static libraries themselves, which are not compile time dependencies.
|
||||
if target.in_roots and (
|
||||
target.is_executable
|
||||
or (
|
||||
not target.added_to_compile_targets
|
||||
and (add_if_no_ancestor or target.requires_build)
|
||||
)
|
||||
or (
|
||||
target.is_static_library
|
||||
and add_if_no_ancestor
|
||||
and not target.is_or_has_linked_ancestor
|
||||
)
|
||||
):
|
||||
print(
|
||||
"\t\tadding to compile targets",
|
||||
target.name,
|
||||
"executable",
|
||||
target.is_executable,
|
||||
"added_to_compile_targets",
|
||||
target.added_to_compile_targets,
|
||||
"add_if_no_ancestor",
|
||||
add_if_no_ancestor,
|
||||
"requires_build",
|
||||
target.requires_build,
|
||||
"is_static_library",
|
||||
target.is_static_library,
|
||||
"is_or_has_linked_ancestor",
|
||||
target.is_or_has_linked_ancestor,
|
||||
)
|
||||
result.add(target)
|
||||
target.added_to_compile_targets = True
|
||||
|
||||
|
||||
def _GetCompileTargets(matching_targets, supplied_targets):
|
||||
"""Returns the set of Targets that require a build.
|
||||
matching_targets: targets that changed and need to be built.
|
||||
supplied_targets: set of targets supplied to analyzer to search from."""
|
||||
result = set()
|
||||
for target in matching_targets:
|
||||
print("finding compile targets for match", target.name)
|
||||
_AddCompileTargets(target, supplied_targets, True, result)
|
||||
return result
|
||||
|
||||
|
||||
def _WriteOutput(params, **values):
|
||||
"""Writes the output, either to stdout or a file is specified."""
|
||||
if "error" in values:
|
||||
print("Error:", values["error"])
|
||||
if "status" in values:
|
||||
print(values["status"])
|
||||
if "targets" in values:
|
||||
values["targets"].sort()
|
||||
print("Supplied targets that depend on changed files:")
|
||||
for target in values["targets"]:
|
||||
print("\t", target)
|
||||
if "invalid_targets" in values:
|
||||
values["invalid_targets"].sort()
|
||||
print("The following targets were not found:")
|
||||
for target in values["invalid_targets"]:
|
||||
print("\t", target)
|
||||
if "build_targets" in values:
|
||||
values["build_targets"].sort()
|
||||
print("Targets that require a build:")
|
||||
for target in values["build_targets"]:
|
||||
print("\t", target)
|
||||
if "compile_targets" in values:
|
||||
values["compile_targets"].sort()
|
||||
print("Targets that need to be built:")
|
||||
for target in values["compile_targets"]:
|
||||
print("\t", target)
|
||||
if "test_targets" in values:
|
||||
values["test_targets"].sort()
|
||||
print("Test targets:")
|
||||
for target in values["test_targets"]:
|
||||
print("\t", target)
|
||||
|
||||
output_path = params.get("generator_flags", {}).get("analyzer_output_path", None)
|
||||
if not output_path:
|
||||
print(json.dumps(values))
|
||||
return
|
||||
try:
|
||||
f = open(output_path, "w")
|
||||
f.write(json.dumps(values) + "\n")
|
||||
f.close()
|
||||
except OSError as e:
|
||||
print("Error writing to output file", output_path, str(e))
|
||||
|
||||
|
||||
def _WasGypIncludeFileModified(params, files):
|
||||
"""Returns true if one of the files in |files| is in the set of included
|
||||
files."""
|
||||
if params["options"].includes:
|
||||
for include in params["options"].includes:
|
||||
if _ToGypPath(os.path.normpath(include)) in files:
|
||||
print("Include file modified, assuming all changed", include)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _NamesNotIn(names, mapping):
|
||||
"""Returns a list of the values in |names| that are not in |mapping|."""
|
||||
return [name for name in names if name not in mapping]
|
||||
|
||||
|
||||
def _LookupTargets(names, mapping):
|
||||
"""Returns a list of the mapping[name] for each value in |names| that is in
|
||||
|mapping|."""
|
||||
return [mapping[name] for name in names if name in mapping]
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
"""Calculate additional variables for use in the build (called by gyp)."""
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "mac":
|
||||
default_variables.setdefault("OS", "mac")
|
||||
elif flavor == "win":
|
||||
default_variables.setdefault("OS", "win")
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
else:
|
||||
operating_system = flavor
|
||||
if flavor == "android":
|
||||
operating_system = "linux" # Keep this legacy behavior for now.
|
||||
default_variables.setdefault("OS", operating_system)
|
||||
|
||||
|
||||
class TargetCalculator:
|
||||
"""Calculates the matching test_targets and matching compile_targets."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
files,
|
||||
additional_compile_target_names,
|
||||
test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
build_files,
|
||||
):
|
||||
self._additional_compile_target_names = set(additional_compile_target_names)
|
||||
self._test_target_names = set(test_target_names)
|
||||
(
|
||||
self._name_to_target,
|
||||
self._changed_targets,
|
||||
self._root_targets,
|
||||
) = _GenerateTargets(
|
||||
data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files
|
||||
)
|
||||
(
|
||||
self._unqualified_mapping,
|
||||
self.invalid_targets,
|
||||
) = _GetUnqualifiedToTargetMapping(
|
||||
self._name_to_target, self._supplied_target_names_no_all()
|
||||
)
|
||||
|
||||
def _supplied_target_names(self):
|
||||
return self._additional_compile_target_names | self._test_target_names
|
||||
|
||||
def _supplied_target_names_no_all(self):
|
||||
"""Returns the supplied test targets without 'all'."""
|
||||
result = self._supplied_target_names()
|
||||
result.discard("all")
|
||||
return result
|
||||
|
||||
def is_build_impacted(self):
|
||||
"""Returns true if the supplied files impact the build at all."""
|
||||
return self._changed_targets
|
||||
|
||||
def find_matching_test_target_names(self):
|
||||
"""Returns the set of output test targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Find the test targets first. 'all' is special cased to mean all the
|
||||
# root targets. To deal with all the supplied |test_targets| are expanded
|
||||
# to include the root targets during lookup. If any of the root targets
|
||||
# match, we remove it and replace it with 'all'.
|
||||
test_target_names_no_all = set(self._test_target_names)
|
||||
test_target_names_no_all.discard("all")
|
||||
test_targets_no_all = _LookupTargets(
|
||||
test_target_names_no_all, self._unqualified_mapping
|
||||
)
|
||||
test_target_names_contains_all = "all" in self._test_target_names
|
||||
if test_target_names_contains_all:
|
||||
test_targets = [
|
||||
x for x in (set(test_targets_no_all) | set(self._root_targets))
|
||||
]
|
||||
else:
|
||||
test_targets = [x for x in test_targets_no_all]
|
||||
print("supplied test_targets")
|
||||
for target_name in self._test_target_names:
|
||||
print("\t", target_name)
|
||||
print("found test_targets")
|
||||
for target in test_targets:
|
||||
print("\t", target.name)
|
||||
print("searching for matching test targets")
|
||||
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
|
||||
matching_test_targets_contains_all = test_target_names_contains_all and set(
|
||||
matching_test_targets
|
||||
) & set(self._root_targets)
|
||||
if matching_test_targets_contains_all:
|
||||
# Remove any of the targets for all that were not explicitly supplied,
|
||||
# 'all' is subsequentely added to the matching names below.
|
||||
matching_test_targets = [
|
||||
x for x in (set(matching_test_targets) & set(test_targets_no_all))
|
||||
]
|
||||
print("matched test_targets")
|
||||
for target in matching_test_targets:
|
||||
print("\t", target.name)
|
||||
matching_target_names = [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in matching_test_targets
|
||||
]
|
||||
if matching_test_targets_contains_all:
|
||||
matching_target_names.append("all")
|
||||
print("\tall")
|
||||
return matching_target_names
|
||||
|
||||
def find_matching_compile_target_names(self):
|
||||
"""Returns the set of output compile targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Compile targets are found by searching up from changed targets.
|
||||
# Reset the visited status for _GetBuildTargets.
|
||||
for target in self._name_to_target.values():
|
||||
target.visited = False
|
||||
|
||||
supplied_targets = _LookupTargets(
|
||||
self._supplied_target_names_no_all(), self._unqualified_mapping
|
||||
)
|
||||
if "all" in self._supplied_target_names():
|
||||
supplied_targets = [
|
||||
x for x in (set(supplied_targets) | set(self._root_targets))
|
||||
]
|
||||
print("Supplied test_targets & compile_targets")
|
||||
for target in supplied_targets:
|
||||
print("\t", target.name)
|
||||
print("Finding compile targets")
|
||||
compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets)
|
||||
return [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in compile_targets
|
||||
]
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Called by gyp as the final stage. Outputs results."""
|
||||
config = Config()
|
||||
try:
|
||||
config.Init(params)
|
||||
|
||||
if not config.files:
|
||||
raise Exception(
|
||||
"Must specify files to analyze via config_path generator " "flag"
|
||||
)
|
||||
|
||||
toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir))
|
||||
if debug:
|
||||
print("toplevel_dir", toplevel_dir)
|
||||
|
||||
if _WasGypIncludeFileModified(params, config.files):
|
||||
result_dict = {
|
||||
"status": all_changed_string,
|
||||
"test_targets": list(config.test_target_names),
|
||||
"compile_targets": list(
|
||||
config.additional_compile_target_names | config.test_target_names
|
||||
),
|
||||
}
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
calculator = TargetCalculator(
|
||||
config.files,
|
||||
config.additional_compile_target_names,
|
||||
config.test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
params["build_files"],
|
||||
)
|
||||
if not calculator.is_build_impacted():
|
||||
result_dict = {
|
||||
"status": no_dependency_string,
|
||||
"test_targets": [],
|
||||
"compile_targets": [],
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
test_target_names = calculator.find_matching_test_target_names()
|
||||
compile_target_names = calculator.find_matching_compile_target_names()
|
||||
found_at_least_one_target = compile_target_names or test_target_names
|
||||
result_dict = {
|
||||
"test_targets": test_target_names,
|
||||
"status": found_dependency_string
|
||||
if found_at_least_one_target
|
||||
else no_dependency_string,
|
||||
"compile_targets": list(set(compile_target_names) | set(test_target_names)),
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
|
||||
except Exception as e:
|
||||
_WriteOutput(params, error=str(e))
|
||||
1173
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Normal file
1173
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1321
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Normal file
1321
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
120
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Normal file
120
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import gyp.common
|
||||
import gyp.xcode_emulation
|
||||
import json
|
||||
import os
|
||||
|
||||
generator_additional_non_configuration_keys = []
|
||||
generator_additional_path_sections = []
|
||||
generator_extra_sources_for_rules = []
|
||||
generator_filelist_paths = None
|
||||
generator_supports_multiple_toolsets = True
|
||||
generator_wants_sorted_dependencies = False
|
||||
|
||||
# Lifted from make.py. The actual values don't matter much.
|
||||
generator_default_variables = {
|
||||
"CONFIGURATION_NAME": "$(BUILDTYPE)",
|
||||
"EXECUTABLE_PREFIX": "",
|
||||
"EXECUTABLE_SUFFIX": "",
|
||||
"INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni",
|
||||
"PRODUCT_DIR": "$(builddir)",
|
||||
"RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s",
|
||||
"RULE_INPUT_EXT": "$(suffix $<)",
|
||||
"RULE_INPUT_NAME": "$(notdir $<)",
|
||||
"RULE_INPUT_PATH": "$(abspath $<)",
|
||||
"RULE_INPUT_ROOT": "%(INPUT_ROOT)s",
|
||||
"SHARED_INTERMEDIATE_DIR": "$(obj)/gen",
|
||||
"SHARED_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_SUFFIX": ".a",
|
||||
}
|
||||
|
||||
|
||||
def IsMac(params):
|
||||
return "mac" == gyp.common.GetFlavor(params)
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
|
||||
def AddCommandsForTarget(cwd, target, params, per_config_commands):
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, configuration in target["configurations"].items():
|
||||
if IsMac(params):
|
||||
xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
|
||||
cflags = xcode_settings.GetCflags(configuration_name)
|
||||
cflags_c = xcode_settings.GetCflagsC(configuration_name)
|
||||
cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
|
||||
else:
|
||||
cflags = configuration.get("cflags", [])
|
||||
cflags_c = configuration.get("cflags_c", [])
|
||||
cflags_cc = configuration.get("cflags_cc", [])
|
||||
|
||||
cflags_c = cflags + cflags_c
|
||||
cflags_cc = cflags + cflags_cc
|
||||
|
||||
defines = configuration.get("defines", [])
|
||||
defines = ["-D" + s for s in defines]
|
||||
|
||||
# TODO(bnoordhuis) Handle generated source files.
|
||||
extensions = (".c", ".cc", ".cpp", ".cxx")
|
||||
sources = [s for s in target.get("sources", []) if s.endswith(extensions)]
|
||||
|
||||
def resolve(filename):
|
||||
return os.path.abspath(os.path.join(cwd, filename))
|
||||
|
||||
# TODO(bnoordhuis) Handle generated header files.
|
||||
include_dirs = configuration.get("include_dirs", [])
|
||||
include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")]
|
||||
includes = ["-I" + resolve(s) for s in include_dirs]
|
||||
|
||||
defines = gyp.common.EncodePOSIXShellList(defines)
|
||||
includes = gyp.common.EncodePOSIXShellList(includes)
|
||||
cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
|
||||
cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
|
||||
|
||||
commands = per_config_commands.setdefault(configuration_name, [])
|
||||
for source in sources:
|
||||
file = resolve(source)
|
||||
isc = source.endswith(".c")
|
||||
cc = "cc" if isc else "c++"
|
||||
cflags = cflags_c if isc else cflags_cc
|
||||
command = " ".join(
|
||||
(
|
||||
cc,
|
||||
defines,
|
||||
includes,
|
||||
cflags,
|
||||
"-c",
|
||||
gyp.common.EncodePOSIXShellArgument(file),
|
||||
)
|
||||
)
|
||||
commands.append(dict(command=command, directory=output_dir, file=file))
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
per_config_commands = {}
|
||||
for qualified_target, target in target_dicts.items():
|
||||
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
|
||||
qualified_target
|
||||
)
|
||||
if IsMac(params):
|
||||
settings = data[build_file]
|
||||
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
|
||||
cwd = os.path.dirname(build_file)
|
||||
AddCommandsForTarget(cwd, target, params, per_config_commands)
|
||||
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, commands in per_config_commands.items():
|
||||
filename = os.path.join(output_dir, configuration_name, "compile_commands.json")
|
||||
gyp.common.EnsureDirExists(filename)
|
||||
fp = open(filename, "w")
|
||||
json.dump(commands, fp=fp, indent=0, check_circular=False)
|
||||
|
||||
|
||||
def PerformBuild(data, configurations, params):
|
||||
pass
|
||||
103
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Normal file
103
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import os
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import json
|
||||
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_filelist_paths = {}
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
# Some gyp steps fail if these are empty(!).
|
||||
generator_default_variables[dirname] = "dir"
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
toplevel = params["options"].toplevel_dir
|
||||
generator_dir = os.path.relpath(params["options"].generator_output or ".")
|
||||
# output_dir: relative path from generator_dir to the build directory.
|
||||
output_dir = generator_flags.get("output_dir", "out")
|
||||
qualified_out_dir = os.path.normpath(
|
||||
os.path.join(toplevel, generator_dir, output_dir, "gypfiles")
|
||||
)
|
||||
global generator_filelist_paths
|
||||
generator_filelist_paths = {
|
||||
"toplevel": toplevel,
|
||||
"qualified_out_dir": qualified_out_dir,
|
||||
}
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# Map of target -> list of targets it depends on.
|
||||
edges = {}
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target = targets_to_visit.pop()
|
||||
if target in edges:
|
||||
continue
|
||||
edges[target] = []
|
||||
|
||||
for dep in target_dicts[target].get("dependencies", []):
|
||||
edges[target].append(dep)
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
try:
|
||||
filepath = params["generator_flags"]["output_dir"]
|
||||
except KeyError:
|
||||
filepath = "."
|
||||
filename = os.path.join(filepath, "dump.json")
|
||||
f = open(filename, "w")
|
||||
json.dump(edges, f)
|
||||
f.close()
|
||||
print("Wrote json to %s." % filename)
|
||||
464
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Normal file
464
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Normal file
@@ -0,0 +1,464 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""GYP backend that generates Eclipse CDT settings files.
|
||||
|
||||
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
|
||||
files that can be imported into an Eclipse CDT project. The XML file contains a
|
||||
list of include paths and symbols (i.e. defines).
|
||||
|
||||
Because a full .cproject definition is not created by this generator, it's not
|
||||
possible to properly define the include dirs and symbols for each file
|
||||
individually. Instead, one set of includes/symbols is generated for the entire
|
||||
project. This works fairly well (and is a vast improvement in general), but may
|
||||
still result in a few indexer issues here and there.
|
||||
|
||||
This generator has no automated tests, so expect it to be broken.
|
||||
"""
|
||||
|
||||
from xml.sax.saxutils import escape
|
||||
import os.path
|
||||
import subprocess
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import shlex
|
||||
import xml.etree.cElementTree as ET
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]:
|
||||
# Some gyp steps fail if these are empty(!), so we convert them to variables
|
||||
generator_default_variables[dirname] = "$" + dirname
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
|
||||
# part of the path when dealing with generated headers. This value will be
|
||||
# replaced dynamically for each configuration.
|
||||
generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR"
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
default_variables.setdefault("OS", flavor)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
|
||||
def GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
):
|
||||
"""Calculate the set of include directories to be used.
|
||||
|
||||
Returns:
|
||||
A list including all the include_dir's specified for every target followed
|
||||
by any include directories that were added as cflag compiler options.
|
||||
"""
|
||||
|
||||
gyp_includes_set = set()
|
||||
compiler_includes_list = []
|
||||
|
||||
# Find compiler's default include dirs.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-xc++", "-v", "-"])
|
||||
proc = subprocess.Popen(
|
||||
args=command,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
output = proc.communicate()[1].decode("utf-8")
|
||||
# Extract the list of include dirs from the output, which has this format:
|
||||
# ...
|
||||
# #include "..." search starts here:
|
||||
# #include <...> search starts here:
|
||||
# /usr/include/c++/4.6
|
||||
# /usr/local/include
|
||||
# End of search list.
|
||||
# ...
|
||||
in_include_list = False
|
||||
for line in output.splitlines():
|
||||
if line.startswith("#include"):
|
||||
in_include_list = True
|
||||
continue
|
||||
if line.startswith("End of search list."):
|
||||
break
|
||||
if in_include_list:
|
||||
include_dir = line.strip()
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
|
||||
# Look for any include dirs that were explicitly added via cflags. This
|
||||
# may be done in gyp files to force certain includes to come at the end.
|
||||
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
|
||||
# remove this.
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
cflags = msvs_settings.GetCflags(config_name)
|
||||
else:
|
||||
cflags = config["cflags"]
|
||||
for cflag in cflags:
|
||||
if cflag.startswith("-I"):
|
||||
include_dir = cflag[2:]
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
# Find standard gyp include dirs.
|
||||
if "include_dirs" in config:
|
||||
include_dirs = config["include_dirs"]
|
||||
for shared_intermediate_dir in shared_intermediate_dirs:
|
||||
for include_dir in include_dirs:
|
||||
include_dir = include_dir.replace(
|
||||
"$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir
|
||||
)
|
||||
if not os.path.isabs(include_dir):
|
||||
base_dir = os.path.dirname(target_name)
|
||||
|
||||
include_dir = base_dir + "/" + include_dir
|
||||
include_dir = os.path.abspath(include_dir)
|
||||
|
||||
gyp_includes_set.add(include_dir)
|
||||
|
||||
# Generate a list that has all the include dirs.
|
||||
all_includes_list = list(gyp_includes_set)
|
||||
all_includes_list.sort()
|
||||
for compiler_include in compiler_includes_list:
|
||||
if compiler_include not in gyp_includes_set:
|
||||
all_includes_list.append(compiler_include)
|
||||
|
||||
# All done.
|
||||
return all_includes_list
|
||||
|
||||
|
||||
def GetCompilerPath(target_list, data, options):
|
||||
"""Determine a command that can be used to invoke the compiler.
|
||||
|
||||
Returns:
|
||||
If this is a gyp project that has explicit make settings, try to determine
|
||||
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||
CC_target environment variable.
|
||||
"""
|
||||
# First, see if the compiler is configured in make's settings.
|
||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||
make_global_settings_dict = data[build_file].get("make_global_settings", {})
|
||||
for key, value in make_global_settings_dict:
|
||||
if key in ["CC", "CXX"]:
|
||||
return os.path.join(options.toplevel_dir, value)
|
||||
|
||||
# Check to see if the compiler was specified as an environment variable.
|
||||
for key in ["CC_target", "CC", "CXX"]:
|
||||
compiler = os.environ.get(key)
|
||||
if compiler:
|
||||
return compiler
|
||||
|
||||
return "gcc"
|
||||
|
||||
|
||||
def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
|
||||
"""Calculate the defines for a project.
|
||||
|
||||
Returns:
|
||||
A dict that includes explicit defines declared in gyp files along with all
|
||||
of the default defines that the compiler uses.
|
||||
"""
|
||||
|
||||
# Get defines declared in the gyp files.
|
||||
all_defines = {}
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
extra_defines = msvs_settings.GetComputedDefines(config_name)
|
||||
else:
|
||||
extra_defines = []
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
target_defines = config["defines"]
|
||||
else:
|
||||
target_defines = []
|
||||
for define in target_defines + extra_defines:
|
||||
split_define = define.split("=", 1)
|
||||
if len(split_define) == 1:
|
||||
split_define.append("1")
|
||||
if split_define[0].strip() in all_defines:
|
||||
# Already defined
|
||||
continue
|
||||
all_defines[split_define[0].strip()] = split_define[1].strip()
|
||||
# Get default compiler defines (if possible).
|
||||
if flavor == "win":
|
||||
return all_defines # Default defines already processed in the loop above.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-dM", "-"])
|
||||
cpp_proc = subprocess.Popen(
|
||||
args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
|
||||
)
|
||||
cpp_output = cpp_proc.communicate()[0].decode("utf-8")
|
||||
cpp_lines = cpp_output.split("\n")
|
||||
for cpp_line in cpp_lines:
|
||||
if not cpp_line.strip():
|
||||
continue
|
||||
cpp_line_parts = cpp_line.split(" ", 2)
|
||||
key = cpp_line_parts[1]
|
||||
if len(cpp_line_parts) >= 3:
|
||||
val = cpp_line_parts[2]
|
||||
else:
|
||||
val = "1"
|
||||
all_defines[key] = val
|
||||
|
||||
return all_defines
|
||||
|
||||
|
||||
def WriteIncludePaths(out, eclipse_langs, include_dirs):
|
||||
"""Write the includes section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.IncludePaths">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for include_dir in include_dirs:
|
||||
out.write(
|
||||
' <includepath workspace_path="false">%s</includepath>\n'
|
||||
% include_dir
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def WriteMacros(out, eclipse_langs, defines):
|
||||
"""Write the macros section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.Macros">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for key in sorted(defines):
|
||||
out.write(
|
||||
" <macro><name>%s</name><value>%s</value></macro>\n"
|
||||
% (escape(key), escape(defines[key]))
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
|
||||
options = params["options"]
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
|
||||
# build_dir: relative path from source root to our output files.
|
||||
# e.g. "out/Debug"
|
||||
build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name)
|
||||
|
||||
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
||||
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
|
||||
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
|
||||
shared_intermediate_dirs = [
|
||||
os.path.join(toplevel_build, "obj", "gen"),
|
||||
os.path.join(toplevel_build, "gen"),
|
||||
]
|
||||
|
||||
GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
os.path.join(toplevel_build, "eclipse-cdt-settings.xml"),
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
)
|
||||
GenerateClasspathFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
options.toplevel_dir,
|
||||
toplevel_build,
|
||||
os.path.join(toplevel_build, "eclipse-classpath.xml"),
|
||||
)
|
||||
|
||||
|
||||
def GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
out_name,
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
):
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
with open(out_name, "w") as out:
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
out.write("<cdtprojectproperties>\n")
|
||||
|
||||
eclipse_langs = [
|
||||
"C++ Source File",
|
||||
"C Source File",
|
||||
"Assembly Source File",
|
||||
"GNU C++",
|
||||
"GNU C",
|
||||
"Assembly",
|
||||
]
|
||||
compiler_path = GetCompilerPath(target_list, data, options)
|
||||
include_dirs = GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
)
|
||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||
defines = GetAllDefines(
|
||||
target_list, target_dicts, data, config_name, params, compiler_path
|
||||
)
|
||||
WriteMacros(out, eclipse_langs, defines)
|
||||
|
||||
out.write("</cdtprojectproperties>\n")
|
||||
|
||||
|
||||
def GenerateClasspathFile(
|
||||
target_list, target_dicts, toplevel_dir, toplevel_build, out_name
|
||||
):
|
||||
"""Generates a classpath file suitable for symbol navigation and code
|
||||
completion of Java code (such as in Android projects) by finding all
|
||||
.java and .jar files used as action inputs."""
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
result = ET.Element("classpath")
|
||||
|
||||
def AddElements(kind, paths):
|
||||
# First, we need to normalize the paths so they are all relative to the
|
||||
# toplevel dir.
|
||||
rel_paths = set()
|
||||
for path in paths:
|
||||
if os.path.isabs(path):
|
||||
rel_paths.add(os.path.relpath(path, toplevel_dir))
|
||||
else:
|
||||
rel_paths.add(path)
|
||||
|
||||
for path in sorted(rel_paths):
|
||||
entry_element = ET.SubElement(result, "classpathentry")
|
||||
entry_element.set("kind", kind)
|
||||
entry_element.set("path", path)
|
||||
|
||||
AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir))
|
||||
AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
|
||||
# Include the standard JRE container and a dummy out folder
|
||||
AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"])
|
||||
# Include a dummy out folder so that Eclipse doesn't use the default /bin
|
||||
# folder in the root of the project.
|
||||
AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")])
|
||||
|
||||
ET.ElementTree(result).write(out_name)
|
||||
|
||||
|
||||
def GetJavaJars(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all .jars used as inputs."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"):
|
||||
if os.path.isabs(input_):
|
||||
yield input_
|
||||
else:
|
||||
yield os.path.join(os.path.dirname(target_name), input_)
|
||||
|
||||
|
||||
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all likely java package root directories."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".java" and not input_.startswith(
|
||||
"$"
|
||||
):
|
||||
dir_ = os.path.dirname(
|
||||
os.path.join(os.path.dirname(target_name), input_)
|
||||
)
|
||||
# If there is a parent 'src' or 'java' folder, navigate up to it -
|
||||
# these are canonical package root names in Chromium. This will
|
||||
# break if 'src' or 'java' exists in the package structure. This
|
||||
# could be further improved by inspecting the java file for the
|
||||
# package name if this proves to be too fragile in practice.
|
||||
parent_search = dir_
|
||||
while os.path.basename(parent_search) not in ["src", "java"]:
|
||||
parent_search, _ = os.path.split(parent_search)
|
||||
if not parent_search or parent_search == toplevel_dir:
|
||||
# Didn't find a known root, just return the original path
|
||||
yield dir_
|
||||
break
|
||||
else:
|
||||
yield parent_search
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||
|
||||
if params["options"].generator_output:
|
||||
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||
|
||||
user_config = params.get("generator_flags", {}).get("config", None)
|
||||
if user_config:
|
||||
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
||||
else:
|
||||
config_names = target_dicts[target_list[0]]["configurations"]
|
||||
for config_name in config_names:
|
||||
GenerateOutputForConfig(
|
||||
target_list, target_dicts, data, params, config_name
|
||||
)
|
||||
89
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Normal file
89
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypd output module
|
||||
|
||||
This module produces gyp input as its output. Output files are given the
|
||||
.gypd extension to avoid overwriting the .gyp files that they are generated
|
||||
from. Internal references to .gyp files (such as those found in
|
||||
"dependencies" sections) are not adjusted to point to .gypd files instead;
|
||||
unlike other paths, which are relative to the .gyp or .gypd file, such paths
|
||||
are relative to the directory from which gyp was run to create the .gypd file.
|
||||
|
||||
This generator module is intended to be a sample and a debugging aid, hence
|
||||
the "d" for "debug" in .gypd. It is useful to inspect the results of the
|
||||
various merges, expansions, and conditional evaluations performed by gyp
|
||||
and to see a representation of what would be fed to a generator module.
|
||||
|
||||
It's not advisable to rename .gypd files produced by this module to .gyp,
|
||||
because they will have all merges, expansions, and evaluations already
|
||||
performed and the relevant constructs not present in the output; paths to
|
||||
dependencies may be wrong; and various sections that do not belong in .gyp
|
||||
files such as such as "included_files" and "*_excluded" will be present.
|
||||
Output will also be stripped of comments. This is not intended to be a
|
||||
general-purpose gyp pretty-printer; for that, you probably just want to
|
||||
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
|
||||
comments but won't do all of the other things done to this module's output.
|
||||
|
||||
The specific formatting of the output generated by this module is subject
|
||||
to change.
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import pprint
|
||||
|
||||
|
||||
# These variables should just be spit back out as variable references.
|
||||
_generator_identity_variables = [
|
||||
"CONFIGURATION_NAME",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"LIB_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
]
|
||||
|
||||
# gypd doesn't define a default value for OS like many other generator
|
||||
# modules. Specify "-D OS=whatever" on the command line to provide a value.
|
||||
generator_default_variables = {}
|
||||
|
||||
# gypd supports multiple toolsets
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
# TODO(mark): This always uses <, which isn't right. The input module should
|
||||
# notify the generator to tell it which phase it is operating in, and this
|
||||
# module should use < for the early phase and then switch to > for the late
|
||||
# phase. Bonus points for carrying @ back into the output too.
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
output_files = {}
|
||||
for qualified_target in target_list:
|
||||
[input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||
|
||||
if input_file[-4:] != ".gyp":
|
||||
continue
|
||||
input_file_stem = input_file[:-4]
|
||||
output_file = input_file_stem + params["options"].suffix + ".gypd"
|
||||
|
||||
output_files[output_file] = output_files.get(output_file, input_file)
|
||||
|
||||
for output_file, input_file in output_files.items():
|
||||
output = open(output_file, "w")
|
||||
pprint.pprint(data[input_file], output)
|
||||
output.close()
|
||||
58
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Normal file
58
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypsh output module
|
||||
|
||||
gypsh is a GYP shell. It's not really a generator per se. All it does is
|
||||
fire up an interactive Python session with a few local variables set to the
|
||||
variables passed to the generator. Like gypd, it's intended as a debugging
|
||||
aid, to facilitate the exploration of .gyp structures after being processed
|
||||
by the input module.
|
||||
|
||||
The expected usage is "gyp -f gypsh -D OS=desired_os".
|
||||
"""
|
||||
|
||||
|
||||
import code
|
||||
import sys
|
||||
|
||||
|
||||
# All of this stuff about generator variables was lovingly ripped from gypd.py.
|
||||
# That module has a much better description of what's going on and why.
|
||||
_generator_identity_variables = [
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
]
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
locals = {
|
||||
"target_list": target_list,
|
||||
"target_dicts": target_dicts,
|
||||
"data": data,
|
||||
}
|
||||
|
||||
# Use a banner that looks like the stock Python one and like what
|
||||
# code.interact uses by default, but tack on something to indicate what
|
||||
# locals are available, and identify gypsh.
|
||||
banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format(
|
||||
sys.version,
|
||||
sys.platform,
|
||||
repr(sorted(locals.keys())),
|
||||
)
|
||||
|
||||
code.interact(banner, local=locals)
|
||||
2717
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Normal file
2717
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3981
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Normal file
3981
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
44
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Executable file
44
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the msvs.py file. """
|
||||
|
||||
import gyp.generator.msvs as msvs
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_GetLibraries(self):
|
||||
self.assertEqual(msvs._GetLibraries({}), [])
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": []}), [])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"]
|
||||
)
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries(
|
||||
{
|
||||
"libraries": [
|
||||
"a.lib",
|
||||
"b.lib",
|
||||
"c.lib",
|
||||
"-lb.lib",
|
||||
"-lb.lib",
|
||||
"d.lib",
|
||||
"a.lib",
|
||||
]
|
||||
}
|
||||
),
|
||||
["c.lib", "b.lib", "d.lib", "a.lib"],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
2936
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Normal file
2936
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Normal file
55
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the ninja.py file. """
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import gyp.generator.ninja as ninja
|
||||
|
||||
|
||||
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||
def test_BinaryNamesWindows(self):
|
||||
# These cannot run on non-Windows as they require a VS installation to
|
||||
# correctly handle variable expansion.
|
||||
if sys.platform.startswith("win"):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
|
||||
)
|
||||
|
||||
def test_BinaryNamesLinux(self):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
1394
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Normal file
1394
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
25
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Normal file
25
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the xcode.py file. """
|
||||
|
||||
import gyp.generator.xcode as xcode
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestEscapeXcodeDefine(unittest.TestCase):
|
||||
if sys.platform == "darwin":
|
||||
|
||||
def test_InheritedRemainsUnescaped(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)")
|
||||
|
||||
def test_Escaping(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
3130
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Normal file
3130
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
98
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Executable file
98
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Executable file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the input.py file."""
|
||||
|
||||
import gyp.input
|
||||
import unittest
|
||||
|
||||
|
||||
class TestFindCycles(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.nodes = {}
|
||||
for x in ("a", "b", "c", "d", "e"):
|
||||
self.nodes[x] = gyp.input.DependencyGraphNode(x)
|
||||
|
||||
def _create_dependency(self, dependent, dependency):
|
||||
dependent.dependencies.append(dependency)
|
||||
dependency.dependents.append(dependent)
|
||||
|
||||
def test_no_cycle_empty_graph(self):
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_line(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_dag(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["a"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_cycle_self_reference(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles()
|
||||
)
|
||||
|
||||
def test_cycle_two_nodes(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["b"], self.nodes["a"]]],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
self.assertEqual(
|
||||
[[self.nodes["b"], self.nodes["a"], self.nodes["b"]]],
|
||||
self.nodes["b"].FindCycles(),
|
||||
)
|
||||
|
||||
def test_two_cycles(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["b"])
|
||||
|
||||
cycles = self.nodes["a"].FindCycles()
|
||||
self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles)
|
||||
self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles)
|
||||
self.assertEqual(2, len(cycles))
|
||||
|
||||
def test_big_cycle(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
self._create_dependency(self.nodes["d"], self.nodes["e"])
|
||||
self._create_dependency(self.nodes["e"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
self.nodes["a"],
|
||||
self.nodes["b"],
|
||||
self.nodes["c"],
|
||||
self.nodes["d"],
|
||||
self.nodes["e"],
|
||||
self.nodes["a"],
|
||||
]
|
||||
],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
771
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Executable file
771
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Executable file
@@ -0,0 +1,771 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions to perform Xcode-style build steps.
|
||||
|
||||
These functions are executed via gyp-mac-tool when using the Makefile generator.
|
||||
"""
|
||||
|
||||
|
||||
import fcntl
|
||||
import fnmatch
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import plistlib
|
||||
import re
|
||||
import shutil
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = MacTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class MacTool:
|
||||
"""This class performs all the Mac tooling steps. The methods can either be
|
||||
executed directly, or dispatched from an argument list."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
|
||||
"""Copies a resource file to the bundle/Resources directory, performing any
|
||||
necessary compilation on each resource."""
|
||||
convert_to_binary = convert_to_binary == "True"
|
||||
extension = os.path.splitext(source)[1].lower()
|
||||
if os.path.isdir(source):
|
||||
# Copy tree.
|
||||
# TODO(thakis): This copies file attributes like mtime, while the
|
||||
# single-file branch below doesn't. This should probably be changed to
|
||||
# be consistent with the single-file branch.
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
shutil.copytree(source, dest)
|
||||
elif extension == ".xib":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".storyboard":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".strings" and not convert_to_binary:
|
||||
self._CopyStringsFile(source, dest)
|
||||
else:
|
||||
if os.path.exists(dest):
|
||||
os.unlink(dest)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
if convert_to_binary and extension in (".plist", ".strings"):
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _CopyXIBFile(self, source, dest):
|
||||
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
|
||||
|
||||
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
|
||||
base = os.path.dirname(os.path.realpath(__file__))
|
||||
if os.path.relpath(source):
|
||||
source = os.path.join(base, source)
|
||||
if os.path.relpath(dest):
|
||||
dest = os.path.join(base, dest)
|
||||
|
||||
args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"]
|
||||
|
||||
if os.environ["XCODE_VERSION_ACTUAL"] > "0700":
|
||||
args.extend(["--auto-activate-custom-fonts"])
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
else:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
|
||||
args.extend(
|
||||
["--output-format", "human-readable-text", "--compile", dest, source]
|
||||
)
|
||||
|
||||
ibtool_section_re = re.compile(r"/\*.*\*/")
|
||||
ibtool_re = re.compile(r".*note:.*is clipping its content")
|
||||
try:
|
||||
stdout = subprocess.check_output(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output)
|
||||
raise
|
||||
current_section_header = None
|
||||
for line in stdout.splitlines():
|
||||
if ibtool_section_re.match(line):
|
||||
current_section_header = line
|
||||
elif not ibtool_re.match(line):
|
||||
if current_section_header:
|
||||
print(current_section_header)
|
||||
current_section_header = None
|
||||
print(line)
|
||||
return 0
|
||||
|
||||
def _ConvertToBinary(self, dest):
|
||||
subprocess.check_call(
|
||||
["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest]
|
||||
)
|
||||
|
||||
def _CopyStringsFile(self, source, dest):
|
||||
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||
|
||||
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
|
||||
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
|
||||
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||
# semicolon in dictionary.
|
||||
# on invalid files. Do the same kind of validation.
|
||||
import CoreFoundation
|
||||
|
||||
with open(source, "rb") as in_file:
|
||||
s = in_file.read()
|
||||
d = CoreFoundation.CFDataCreate(None, s, len(s))
|
||||
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
|
||||
if error:
|
||||
return
|
||||
|
||||
with open(dest, "wb") as fp:
|
||||
fp.write(s.decode(input_code).encode("UTF-16"))
|
||||
|
||||
def _DetectInputEncoding(self, file_name):
|
||||
"""Reads the first few bytes from file_name and tries to guess the text
|
||||
encoding. Returns None as a guess if it can't detect it."""
|
||||
with open(file_name, "rb") as fp:
|
||||
try:
|
||||
header = fp.read(3)
|
||||
except Exception:
|
||||
return None
|
||||
if header.startswith(b"\xFE\xFF"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xFF\xFE"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xEF\xBB\xBF"):
|
||||
return "UTF-8"
|
||||
else:
|
||||
return None
|
||||
|
||||
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
|
||||
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
||||
# Read the source Info.plist into memory.
|
||||
with open(source) as fd:
|
||||
lines = fd.read()
|
||||
|
||||
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
|
||||
plist = plistlib.readPlistFromString(lines)
|
||||
if keys:
|
||||
plist.update(json.loads(keys[0]))
|
||||
lines = plistlib.writePlistToString(plist)
|
||||
|
||||
# Go through all the environment variables and replace them as variables in
|
||||
# the file.
|
||||
IDENT_RE = re.compile(r"[_/\s]")
|
||||
for key in os.environ:
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
evar = "${%s}" % key
|
||||
evalue = os.environ[key]
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Xcode supports various suffices on environment variables, which are
|
||||
# all undocumented. :rfc1034identifier is used in the standard project
|
||||
# template these days, and :identifier was used earlier. They are used to
|
||||
# convert non-url characters into things that look like valid urls --
|
||||
# except that the replacement character for :identifier, '_' isn't valid
|
||||
# in a URL either -- oops, hence :rfc1034identifier was born.
|
||||
evar = "${%s:identifier}" % key
|
||||
evalue = IDENT_RE.sub("_", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
evar = "${%s:rfc1034identifier}" % key
|
||||
evalue = IDENT_RE.sub("-", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Remove any keys with values that haven't been replaced.
|
||||
lines = lines.splitlines()
|
||||
for i in range(len(lines)):
|
||||
if lines[i].strip().startswith("<string>${"):
|
||||
lines[i] = None
|
||||
lines[i - 1] = None
|
||||
lines = "\n".join(line for line in lines if line is not None)
|
||||
|
||||
# Write out the file with variables replaced.
|
||||
with open(dest, "w") as fd:
|
||||
fd.write(lines)
|
||||
|
||||
# Now write out PkgInfo file now that the Info.plist file has been
|
||||
# "compiled".
|
||||
self._WritePkgInfo(dest)
|
||||
|
||||
if convert_to_binary == "True":
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _WritePkgInfo(self, info_plist):
|
||||
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
||||
plist = plistlib.readPlist(info_plist)
|
||||
if not plist:
|
||||
return
|
||||
|
||||
# Only create PkgInfo for executable types.
|
||||
package_type = plist["CFBundlePackageType"]
|
||||
if package_type != "APPL":
|
||||
return
|
||||
|
||||
# The format of PkgInfo is eight characters, representing the bundle type
|
||||
# and bundle signature, each four characters. If that is missing, four
|
||||
# '?' characters are used instead.
|
||||
signature_code = plist.get("CFBundleSignature", "????")
|
||||
if len(signature_code) != 4: # Wrong length resets everything, too.
|
||||
signature_code = "?" * 4
|
||||
|
||||
dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
|
||||
with open(dest, "w") as fp:
|
||||
fp.write(f"{package_type}{signature_code}")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
def ExecFilterLibtool(self, *cmd_list):
|
||||
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
||||
symbols'."""
|
||||
libtool_re = re.compile(
|
||||
r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$"
|
||||
)
|
||||
libtool_re5 = re.compile(
|
||||
r"^.*libtool: warning for library: "
|
||||
+ r".* the table of contents is empty "
|
||||
+ r"\(no object file members in the library define global symbols\)$"
|
||||
)
|
||||
env = os.environ.copy()
|
||||
# Ref:
|
||||
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
|
||||
# The problem with this flag is that it resets the file mtime on the file to
|
||||
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
|
||||
env["ZERO_AR_DATE"] = "1"
|
||||
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
|
||||
err = libtoolout.communicate()[1].decode("utf-8")
|
||||
for line in err.splitlines():
|
||||
if not libtool_re.match(line) and not libtool_re5.match(line):
|
||||
print(line, file=sys.stderr)
|
||||
# Unconditionally touch the output .a file on the command line if present
|
||||
# and the command succeeded. A bit hacky.
|
||||
if not libtoolout.returncode:
|
||||
for i in range(len(cmd_list) - 1):
|
||||
if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"):
|
||||
os.utime(cmd_list[i + 1], None)
|
||||
break
|
||||
return libtoolout.returncode
|
||||
|
||||
def ExecPackageIosFramework(self, framework):
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
module_path = os.path.join(framework, "Modules")
|
||||
if not os.path.exists(module_path):
|
||||
os.mkdir(module_path)
|
||||
module_template = (
|
||||
"framework module %s {\n"
|
||||
' umbrella header "%s.h"\n'
|
||||
"\n"
|
||||
" export *\n"
|
||||
" module * { export * }\n"
|
||||
"}\n" % (binary, binary)
|
||||
)
|
||||
|
||||
with open(os.path.join(module_path, "module.modulemap"), "w") as module_file:
|
||||
module_file.write(module_template)
|
||||
|
||||
def ExecPackageFramework(self, framework, version):
|
||||
"""Takes a path to Something.framework and the Current version of that and
|
||||
sets up all the symlinks."""
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
|
||||
CURRENT = "Current"
|
||||
RESOURCES = "Resources"
|
||||
VERSIONS = "Versions"
|
||||
|
||||
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
|
||||
# Binary-less frameworks don't seem to contain symlinks (see e.g.
|
||||
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
|
||||
return
|
||||
|
||||
# Move into the framework directory to set the symlinks correctly.
|
||||
pwd = os.getcwd()
|
||||
os.chdir(framework)
|
||||
|
||||
# Set up the Current version.
|
||||
self._Relink(version, os.path.join(VERSIONS, CURRENT))
|
||||
|
||||
# Set up the root symlinks.
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
|
||||
|
||||
# Back to where we were before!
|
||||
os.chdir(pwd)
|
||||
|
||||
def _Relink(self, dest, link):
|
||||
"""Creates a symlink to |dest| named |link|. If |link| already exists,
|
||||
it is overwritten."""
|
||||
if os.path.lexists(link):
|
||||
os.remove(link)
|
||||
os.symlink(dest, link)
|
||||
|
||||
def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
|
||||
framework_name = os.path.basename(framework).split(".")[0]
|
||||
all_headers = [os.path.abspath(header) for header in all_headers]
|
||||
filelist = {}
|
||||
for header in all_headers:
|
||||
filename = os.path.basename(header)
|
||||
filelist[filename] = header
|
||||
filelist[os.path.join(framework_name, filename)] = header
|
||||
WriteHmap(out, filelist)
|
||||
|
||||
def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
|
||||
header_path = os.path.join(framework, "Headers")
|
||||
if not os.path.exists(header_path):
|
||||
os.makedirs(header_path)
|
||||
for header in copy_headers:
|
||||
shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
|
||||
|
||||
def ExecCompileXcassets(self, keys, *inputs):
|
||||
"""Compiles multiple .xcassets files into a single .car file.
|
||||
|
||||
This invokes 'actool' to compile all the inputs .xcassets files. The
|
||||
|keys| arguments is a json-encoded dictionary of extra arguments to
|
||||
pass to 'actool' when the asset catalogs contains an application icon
|
||||
or a launch image.
|
||||
|
||||
Note that 'actool' does not create the Assets.car file if the asset
|
||||
catalogs does not contains imageset.
|
||||
"""
|
||||
command_line = [
|
||||
"xcrun",
|
||||
"actool",
|
||||
"--output-format",
|
||||
"human-readable-text",
|
||||
"--compress-pngs",
|
||||
"--notices",
|
||||
"--warnings",
|
||||
"--errors",
|
||||
]
|
||||
is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ
|
||||
if is_iphone_target:
|
||||
platform = os.environ["CONFIGURATION"].split("-")[-1]
|
||||
if platform not in ("iphoneos", "iphonesimulator"):
|
||||
platform = "iphonesimulator"
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
platform,
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
else:
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
"macosx",
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
if keys:
|
||||
keys = json.loads(keys)
|
||||
for key, value in keys.items():
|
||||
arg_name = "--" + key
|
||||
if isinstance(value, bool):
|
||||
if value:
|
||||
command_line.append(arg_name)
|
||||
elif isinstance(value, list):
|
||||
for v in value:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(v))
|
||||
else:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(value))
|
||||
# Note: actool crashes if inputs path are relative, so use os.path.abspath
|
||||
# to get absolute path name for inputs.
|
||||
command_line.extend(map(os.path.abspath, inputs))
|
||||
subprocess.check_call(command_line)
|
||||
|
||||
def ExecMergeInfoPlist(self, output, *inputs):
|
||||
"""Merge multiple .plist files into a single .plist file."""
|
||||
merged_plist = {}
|
||||
for path in inputs:
|
||||
plist = self._LoadPlistMaybeBinary(path)
|
||||
self._MergePlist(merged_plist, plist)
|
||||
plistlib.writePlist(merged_plist, output)
|
||||
|
||||
def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
|
||||
"""Code sign a bundle.
|
||||
|
||||
This function tries to code sign an iOS bundle, following the same
|
||||
algorithm as Xcode:
|
||||
1. pick the provisioning profile that best match the bundle identifier,
|
||||
and copy it into the bundle as embedded.mobileprovision,
|
||||
2. copy Entitlements.plist from user or SDK next to the bundle,
|
||||
3. code sign the bundle.
|
||||
"""
|
||||
substitutions, overrides = self._InstallProvisioningProfile(
|
||||
provisioning, self._GetCFBundleIdentifier()
|
||||
)
|
||||
entitlements_path = self._InstallEntitlements(
|
||||
entitlements, substitutions, overrides
|
||||
)
|
||||
|
||||
args = ["codesign", "--force", "--sign", key]
|
||||
if preserve == "True":
|
||||
args.extend(["--deep", "--preserve-metadata=identifier,entitlements"])
|
||||
else:
|
||||
args.extend(["--entitlements", entitlements_path])
|
||||
args.extend(["--timestamp=none", path])
|
||||
subprocess.check_call(args)
|
||||
|
||||
def _InstallProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Installs embedded.mobileprovision into the bundle.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple containing two dictionary: variables substitutions and values
|
||||
to overrides when generating the entitlements file.
|
||||
"""
|
||||
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
|
||||
profile, bundle_identifier
|
||||
)
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"],
|
||||
os.environ["CONTENTS_FOLDER_PATH"],
|
||||
"embedded.mobileprovision",
|
||||
)
|
||||
shutil.copy2(source_path, target_path)
|
||||
substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".")
|
||||
return substitutions, provisioning_data["Entitlements"]
|
||||
|
||||
def _FindProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Finds the .mobileprovision file to use for signing the bundle.
|
||||
|
||||
Checks all the installed provisioning profiles (or if the user specified
|
||||
the PROVISIONING_PROFILE variable, only consult it) and select the most
|
||||
specific that correspond to the bundle identifier.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple of the path to the selected provisioning profile, the data of
|
||||
the embedded plist in the provisioning profile and the team identifier
|
||||
to use for code signing.
|
||||
|
||||
Raises:
|
||||
SystemExit: if no .mobileprovision can be used to sign the bundle.
|
||||
"""
|
||||
profiles_dir = os.path.join(
|
||||
os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
|
||||
)
|
||||
if not os.path.isdir(profiles_dir):
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
provisioning_profiles = None
|
||||
if profile:
|
||||
profile_path = os.path.join(profiles_dir, profile + ".mobileprovision")
|
||||
if os.path.exists(profile_path):
|
||||
provisioning_profiles = [profile_path]
|
||||
if not provisioning_profiles:
|
||||
provisioning_profiles = glob.glob(
|
||||
os.path.join(profiles_dir, "*.mobileprovision")
|
||||
)
|
||||
valid_provisioning_profiles = {}
|
||||
for profile_path in provisioning_profiles:
|
||||
profile_data = self._LoadProvisioningProfile(profile_path)
|
||||
app_id_pattern = profile_data.get("Entitlements", {}).get(
|
||||
"application-identifier", ""
|
||||
)
|
||||
for team_identifier in profile_data.get("TeamIdentifier", []):
|
||||
app_id = f"{team_identifier}.{bundle_identifier}"
|
||||
if fnmatch.fnmatch(app_id, app_id_pattern):
|
||||
valid_provisioning_profiles[app_id_pattern] = (
|
||||
profile_path,
|
||||
profile_data,
|
||||
team_identifier,
|
||||
)
|
||||
if not valid_provisioning_profiles:
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
# If the user has multiple provisioning profiles installed that can be
|
||||
# used for ${bundle_identifier}, pick the most specific one (ie. the
|
||||
# provisioning profile whose pattern is the longest).
|
||||
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
|
||||
return valid_provisioning_profiles[selected_key]
|
||||
|
||||
def _LoadProvisioningProfile(self, profile_path):
|
||||
"""Extracts the plist embedded in a provisioning profile.
|
||||
|
||||
Args:
|
||||
profile_path: string, path to the .mobileprovision file
|
||||
|
||||
Returns:
|
||||
Content of the plist embedded in the provisioning profile as a dictionary.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
subprocess.check_call(
|
||||
["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
|
||||
)
|
||||
return self._LoadPlistMaybeBinary(temp.name)
|
||||
|
||||
def _MergePlist(self, merged_plist, plist):
|
||||
"""Merge |plist| into |merged_plist|."""
|
||||
for key, value in plist.items():
|
||||
if isinstance(value, dict):
|
||||
merged_value = merged_plist.get(key, {})
|
||||
if isinstance(merged_value, dict):
|
||||
self._MergePlist(merged_value, value)
|
||||
merged_plist[key] = merged_value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
|
||||
def _LoadPlistMaybeBinary(self, plist_path):
|
||||
"""Loads into a memory a plist possibly encoded in binary format.
|
||||
|
||||
This is a wrapper around plistlib.readPlist that tries to convert the
|
||||
plist to the XML format if it can't be parsed (assuming that it is in
|
||||
the binary format).
|
||||
|
||||
Args:
|
||||
plist_path: string, path to a plist file, in XML or binary format
|
||||
|
||||
Returns:
|
||||
Content of the plist as a dictionary.
|
||||
"""
|
||||
try:
|
||||
# First, try to read the file using plistlib that only supports XML,
|
||||
# and if an exception is raised, convert a temporary copy to XML and
|
||||
# load that copy.
|
||||
return plistlib.readPlist(plist_path)
|
||||
except Exception:
|
||||
pass
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
shutil.copy2(plist_path, temp.name)
|
||||
subprocess.check_call(["plutil", "-convert", "xml1", temp.name])
|
||||
return plistlib.readPlist(temp.name)
|
||||
|
||||
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
|
||||
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
|
||||
|
||||
Args:
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
app_identifier_prefix: string, value for AppIdentifierPrefix
|
||||
|
||||
Returns:
|
||||
Dictionary of substitutions to apply when generating Entitlements.plist.
|
||||
"""
|
||||
return {
|
||||
"CFBundleIdentifier": bundle_identifier,
|
||||
"AppIdentifierPrefix": app_identifier_prefix,
|
||||
}
|
||||
|
||||
def _GetCFBundleIdentifier(self):
|
||||
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
|
||||
|
||||
Returns:
|
||||
Value of CFBundleIdentifier in the Info.plist located in the bundle.
|
||||
"""
|
||||
info_plist_path = os.path.join(
|
||||
os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
|
||||
)
|
||||
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
|
||||
return info_plist_data["CFBundleIdentifier"]
|
||||
|
||||
def _InstallEntitlements(self, entitlements, substitutions, overrides):
|
||||
"""Generates and install the ${BundleName}.xcent entitlements file.
|
||||
|
||||
Expands variables "$(variable)" pattern in the source entitlements file,
|
||||
add extra entitlements defined in the .mobileprovision file and the copy
|
||||
the generated plist to "${BundlePath}.xcent".
|
||||
|
||||
Args:
|
||||
entitlements: string, optional, path to the Entitlements.plist template
|
||||
to use, defaults to "${SDKROOT}/Entitlements.plist"
|
||||
substitutions: dictionary, variable substitutions
|
||||
overrides: dictionary, values to add to the entitlements
|
||||
|
||||
Returns:
|
||||
Path to the generated entitlements file.
|
||||
"""
|
||||
source_path = entitlements
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
|
||||
)
|
||||
if not source_path:
|
||||
source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist")
|
||||
shutil.copy2(source_path, target_path)
|
||||
data = self._LoadPlistMaybeBinary(target_path)
|
||||
data = self._ExpandVariables(data, substitutions)
|
||||
if overrides:
|
||||
for key in overrides:
|
||||
if key not in data:
|
||||
data[key] = overrides[key]
|
||||
plistlib.writePlist(data, target_path)
|
||||
return target_path
|
||||
|
||||
def _ExpandVariables(self, data, substitutions):
|
||||
"""Expands variables "$(variable)" in data.
|
||||
|
||||
Args:
|
||||
data: object, can be either string, list or dictionary
|
||||
substitutions: dictionary, variable substitutions to perform
|
||||
|
||||
Returns:
|
||||
Copy of data where each references to "$(variable)" has been replaced
|
||||
by the corresponding value found in substitutions, or left intact if
|
||||
the key was not found.
|
||||
"""
|
||||
if isinstance(data, str):
|
||||
for key, value in substitutions.items():
|
||||
data = data.replace("$(%s)" % key, value)
|
||||
return data
|
||||
if isinstance(data, list):
|
||||
return [self._ExpandVariables(v, substitutions) for v in data]
|
||||
if isinstance(data, dict):
|
||||
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
|
||||
return data
|
||||
|
||||
|
||||
def NextGreaterPowerOf2(x):
|
||||
return 2 ** (x).bit_length()
|
||||
|
||||
|
||||
def WriteHmap(output_name, filelist):
|
||||
"""Generates a header map based on |filelist|.
|
||||
|
||||
Per Mark Mentovai:
|
||||
A header map is structured essentially as a hash table, keyed by names used
|
||||
in #includes, and providing pathnames to the actual files.
|
||||
|
||||
The implementation below and the comment above comes from inspecting:
|
||||
http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
|
||||
while also looking at the implementation in clang in:
|
||||
https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
|
||||
"""
|
||||
magic = 1751998832
|
||||
version = 1
|
||||
_reserved = 0
|
||||
count = len(filelist)
|
||||
capacity = NextGreaterPowerOf2(count)
|
||||
strings_offset = 24 + (12 * capacity)
|
||||
max_value_length = max(len(value) for value in filelist.values())
|
||||
|
||||
out = open(output_name, "wb")
|
||||
out.write(
|
||||
struct.pack(
|
||||
"<LHHLLLL",
|
||||
magic,
|
||||
version,
|
||||
_reserved,
|
||||
strings_offset,
|
||||
count,
|
||||
capacity,
|
||||
max_value_length,
|
||||
)
|
||||
)
|
||||
|
||||
# Create empty hashmap buckets.
|
||||
buckets = [None] * capacity
|
||||
for file, path in filelist.items():
|
||||
key = 0
|
||||
for c in file:
|
||||
key += ord(c.lower()) * 13
|
||||
|
||||
# Fill next empty bucket.
|
||||
while buckets[key & capacity - 1] is not None:
|
||||
key = key + 1
|
||||
buckets[key & capacity - 1] = (file, path)
|
||||
|
||||
next_offset = 1
|
||||
for bucket in buckets:
|
||||
if bucket is None:
|
||||
out.write(struct.pack("<LLL", 0, 0, 0))
|
||||
else:
|
||||
(file, path) = bucket
|
||||
key_offset = next_offset
|
||||
prefix_offset = key_offset + len(file) + 1
|
||||
suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
|
||||
next_offset = suffix_offset + len(os.path.basename(path)) + 1
|
||||
out.write(struct.pack("<LLL", key_offset, prefix_offset, suffix_offset))
|
||||
|
||||
# Pad byte since next offset starts at 1.
|
||||
out.write(struct.pack("<x"))
|
||||
|
||||
for bucket in buckets:
|
||||
if bucket is not None:
|
||||
(file, path) = bucket
|
||||
out.write(struct.pack("<%ds" % len(file), file))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
base = os.path.dirname(path) + os.sep
|
||||
out.write(struct.pack("<%ds" % len(base), base))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
path = os.path.basename(path)
|
||||
out.write(struct.pack("<%ds" % len(path), path))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
1271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Normal file
1271
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
174
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Normal file
174
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
# This file comes from
|
||||
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
|
||||
# Do not edit! Edit the upstream one instead.
|
||||
|
||||
"""Python module for generating .ninja files.
|
||||
|
||||
Note that this is emphatically not a required piece of Ninja; it's
|
||||
just a helpful utility for build-file-generation systems that already
|
||||
use Python.
|
||||
"""
|
||||
|
||||
import textwrap
|
||||
|
||||
|
||||
def escape_path(word):
|
||||
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||
|
||||
|
||||
class Writer:
|
||||
def __init__(self, output, width=78):
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
self.output.write("\n")
|
||||
|
||||
def comment(self, text):
|
||||
for line in textwrap.wrap(text, self.width - 2):
|
||||
self.output.write("# " + line + "\n")
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = " ".join(filter(None, value)) # Filter out empty strings.
|
||||
self._line(f"{key} = {value}", indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
self._line("pool %s" % name)
|
||||
self.variable("depth", depth, indent=1)
|
||||
|
||||
def rule(
|
||||
self,
|
||||
name,
|
||||
command,
|
||||
description=None,
|
||||
depfile=None,
|
||||
generator=False,
|
||||
pool=None,
|
||||
restat=False,
|
||||
rspfile=None,
|
||||
rspfile_content=None,
|
||||
deps=None,
|
||||
):
|
||||
self._line("rule %s" % name)
|
||||
self.variable("command", command, indent=1)
|
||||
if description:
|
||||
self.variable("description", description, indent=1)
|
||||
if depfile:
|
||||
self.variable("depfile", depfile, indent=1)
|
||||
if generator:
|
||||
self.variable("generator", "1", indent=1)
|
||||
if pool:
|
||||
self.variable("pool", pool, indent=1)
|
||||
if restat:
|
||||
self.variable("restat", "1", indent=1)
|
||||
if rspfile:
|
||||
self.variable("rspfile", rspfile, indent=1)
|
||||
if rspfile_content:
|
||||
self.variable("rspfile_content", rspfile_content, indent=1)
|
||||
if deps:
|
||||
self.variable("deps", deps, indent=1)
|
||||
|
||||
def build(
|
||||
self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None
|
||||
):
|
||||
outputs = self._as_list(outputs)
|
||||
all_inputs = self._as_list(inputs)[:]
|
||||
out_outputs = list(map(escape_path, outputs))
|
||||
all_inputs = list(map(escape_path, all_inputs))
|
||||
|
||||
if implicit:
|
||||
implicit = map(escape_path, self._as_list(implicit))
|
||||
all_inputs.append("|")
|
||||
all_inputs.extend(implicit)
|
||||
if order_only:
|
||||
order_only = map(escape_path, self._as_list(order_only))
|
||||
all_inputs.append("||")
|
||||
all_inputs.extend(order_only)
|
||||
|
||||
self._line(
|
||||
"build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
|
||||
)
|
||||
|
||||
if variables:
|
||||
if isinstance(variables, dict):
|
||||
iterator = iter(variables.items())
|
||||
else:
|
||||
iterator = iter(variables)
|
||||
|
||||
for key, val in iterator:
|
||||
self.variable(key, val, indent=1)
|
||||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
self._line("include %s" % path)
|
||||
|
||||
def subninja(self, path):
|
||||
self._line("subninja %s" % path)
|
||||
|
||||
def default(self, paths):
|
||||
self._line("default %s" % " ".join(self._as_list(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
while dollar_index > 0 and s[dollar_index] == "$":
|
||||
dollar_count += 1
|
||||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = " " * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
# The text is too wide; wrap if possible.
|
||||
|
||||
# Find the rightmost space that would obey our width constraint and
|
||||
# that's not an escaped space.
|
||||
available_space = self.width - len(leading_space) - len(" $")
|
||||
space = available_space
|
||||
while True:
|
||||
space = text.rfind(" ", 0, space)
|
||||
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
|
||||
break
|
||||
|
||||
if space < 0:
|
||||
# No such space; just use the first unescaped space we can find.
|
||||
space = available_space - 1
|
||||
while True:
|
||||
space = text.find(" ", space + 1)
|
||||
if (
|
||||
space < 0
|
||||
or self._count_dollars_before_index(text, space) % 2 == 0
|
||||
):
|
||||
break
|
||||
if space < 0:
|
||||
# Give up on breaking.
|
||||
break
|
||||
|
||||
self.output.write(leading_space + text[0:space] + " $\n")
|
||||
text = text[space + 1 :]
|
||||
|
||||
# Subsequent lines are continuations, so indent them.
|
||||
leading_space = " " * (indent + 2)
|
||||
|
||||
self.output.write(leading_space + text + "\n")
|
||||
|
||||
def _as_list(self, input):
|
||||
if input is None:
|
||||
return []
|
||||
if isinstance(input, list):
|
||||
return input
|
||||
return [input]
|
||||
|
||||
|
||||
def escape(string):
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||
# We only have one special metacharacter: '$'.
|
||||
return string.replace("$", "$$")
|
||||
61
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Normal file
61
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""A clone of the default copy.deepcopy that doesn't handle cyclic
|
||||
structures or complex types except for dicts and lists. This is
|
||||
because gyp copies so large structure that small copy overhead ends up
|
||||
taking seconds in a project the size of Chromium."""
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = ["Error", "deepcopy"]
|
||||
|
||||
|
||||
def deepcopy(x):
|
||||
"""Deep copy operation on gyp objects such as strings, ints, dicts
|
||||
and lists. More than twice as fast as copy.deepcopy but much less
|
||||
generic."""
|
||||
|
||||
try:
|
||||
return _deepcopy_dispatch[type(x)](x)
|
||||
except KeyError:
|
||||
raise Error(
|
||||
"Unsupported type %s for deepcopy. Use copy.deepcopy "
|
||||
+ "or expand simple_copy support." % type(x)
|
||||
)
|
||||
|
||||
|
||||
_deepcopy_dispatch = d = {}
|
||||
|
||||
|
||||
def _deepcopy_atomic(x):
|
||||
return x
|
||||
|
||||
|
||||
types = bool, float, int, str, type, type(None)
|
||||
|
||||
for x in types:
|
||||
d[x] = _deepcopy_atomic
|
||||
|
||||
|
||||
def _deepcopy_list(x):
|
||||
return [deepcopy(a) for a in x]
|
||||
|
||||
|
||||
d[list] = _deepcopy_list
|
||||
|
||||
|
||||
def _deepcopy_dict(x):
|
||||
y = {}
|
||||
for key, value in x.items():
|
||||
y[deepcopy(key)] = deepcopy(value)
|
||||
return y
|
||||
|
||||
|
||||
d[dict] = _deepcopy_dict
|
||||
|
||||
del d
|
||||
374
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Executable file
374
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Executable file
@@ -0,0 +1,374 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions for Windows builds.
|
||||
|
||||
These functions are executed via gyp-win-tool when using the ninja generator.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import stat
|
||||
import string
|
||||
import sys
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# A regex matching an argument corresponding to the output filename passed to
|
||||
# link.exe.
|
||||
_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P<out>.+)$", re.IGNORECASE)
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = WinTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class WinTool:
|
||||
"""This class performs all the Windows tooling steps. The methods can either
|
||||
be executed directly, or dispatched from an argument list."""
|
||||
|
||||
def _UseSeparateMspdbsrv(self, env, args):
|
||||
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
|
||||
shared one."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
if args[0] != "link.exe":
|
||||
return
|
||||
|
||||
# Use the output filename passed to the linker to generate an endpoint name
|
||||
# for mspdbsrv.exe.
|
||||
endpoint_name = None
|
||||
for arg in args:
|
||||
m = _LINK_EXE_OUT_ARG.match(arg)
|
||||
if m:
|
||||
endpoint_name = re.sub(
|
||||
r"\W+", "", "%s_%d" % (m.group("out"), os.getpid())
|
||||
)
|
||||
break
|
||||
|
||||
if endpoint_name is None:
|
||||
return
|
||||
|
||||
# Adds the appropriate environment variable. This will be read by link.exe
|
||||
# to know which instance of mspdbsrv.exe it should connect to (if it's
|
||||
# not set then the default endpoint is used).
|
||||
env["_MSPDBSRV_ENDPOINT_"] = endpoint_name
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def _GetEnv(self, arch):
|
||||
"""Gets the saved environment from a file for a given architecture."""
|
||||
# The environment is saved as an "environment block" (see CreateProcess
|
||||
# and msvs_emulation for details). We convert to a dict here.
|
||||
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
|
||||
pairs = open(arch).read()[:-2].split("\0")
|
||||
kvs = [item.split("=", 1) for item in pairs]
|
||||
return dict(kvs)
|
||||
|
||||
def ExecStamp(self, path):
|
||||
"""Simple stamp command."""
|
||||
open(path, "w").close()
|
||||
|
||||
def ExecRecursiveMirror(self, source, dest):
|
||||
"""Emulation of rm -rf out && cp -af in out."""
|
||||
if os.path.exists(dest):
|
||||
if os.path.isdir(dest):
|
||||
|
||||
def _on_error(fn, path, excinfo):
|
||||
# The operation failed, possibly because the file is set to
|
||||
# read-only. If that's why, make it writable and try the op again.
|
||||
if not os.access(path, os.W_OK):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
fn(path)
|
||||
|
||||
shutil.rmtree(dest, onerror=_on_error)
|
||||
else:
|
||||
if not os.access(dest, os.W_OK):
|
||||
# Attempt to make the file writable before deleting it.
|
||||
os.chmod(dest, stat.S_IWRITE)
|
||||
os.unlink(dest)
|
||||
|
||||
if os.path.isdir(source):
|
||||
shutil.copytree(source, dest)
|
||||
else:
|
||||
shutil.copy2(source, dest)
|
||||
|
||||
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
|
||||
"""Filter diagnostic output from link that looks like:
|
||||
' Creating library ui.dll.lib and object ui.dll.exp'
|
||||
This happens when there are exports from the dll or exe.
|
||||
"""
|
||||
env = self._GetEnv(arch)
|
||||
if use_separate_mspdbsrv == "True":
|
||||
self._UseSeparateMspdbsrv(env, args)
|
||||
if sys.platform == "win32":
|
||||
args = list(args) # *args is a tuple by default, which is read-only.
|
||||
args[0] = args[0].replace("/", "\\")
|
||||
# https://docs.python.org/2/library/subprocess.html:
|
||||
# "On Unix with shell=True [...] if args is a sequence, the first item
|
||||
# specifies the command string, and any additional items will be treated as
|
||||
# additional arguments to the shell itself. That is to say, Popen does the
|
||||
# equivalent of:
|
||||
# Popen(['/bin/sh', '-c', args[0], args[1], ...])"
|
||||
# For that reason, since going through the shell doesn't seem necessary on
|
||||
# non-Windows don't do that there.
|
||||
link = subprocess.Popen(
|
||||
args,
|
||||
shell=sys.platform == "win32",
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
out = link.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith(" Creating library ")
|
||||
and not line.startswith("Generating code")
|
||||
and not line.startswith("Finished generating code")
|
||||
):
|
||||
print(line)
|
||||
return link.returncode
|
||||
|
||||
def ExecLinkWithManifests(
|
||||
self,
|
||||
arch,
|
||||
embed_manifest,
|
||||
out,
|
||||
ldcmd,
|
||||
resname,
|
||||
mt,
|
||||
rc,
|
||||
intermediate_manifest,
|
||||
*manifests
|
||||
):
|
||||
"""A wrapper for handling creating a manifest resource and then executing
|
||||
a link command."""
|
||||
# The 'normal' way to do manifests is to have link generate a manifest
|
||||
# based on gathering dependencies from the object files, then merge that
|
||||
# manifest with other manifests supplied as sources, convert the merged
|
||||
# manifest to a resource, and then *relink*, including the compiled
|
||||
# version of the manifest resource. This breaks incremental linking, and
|
||||
# is generally overly complicated. Instead, we merge all the manifests
|
||||
# provided (along with one that includes what would normally be in the
|
||||
# linker-generated one, see msvs_emulation.py), and include that into the
|
||||
# first and only link. We still tell link to generate a manifest, but we
|
||||
# only use that to assert that our simpler process did not miss anything.
|
||||
variables = {
|
||||
"python": sys.executable,
|
||||
"arch": arch,
|
||||
"out": out,
|
||||
"ldcmd": ldcmd,
|
||||
"resname": resname,
|
||||
"mt": mt,
|
||||
"rc": rc,
|
||||
"intermediate_manifest": intermediate_manifest,
|
||||
"manifests": " ".join(manifests),
|
||||
}
|
||||
add_to_ld = ""
|
||||
if manifests:
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(manifests)s -out:%(out)s.manifest" % variables
|
||||
)
|
||||
if embed_manifest == "True":
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest"
|
||||
" %(out)s.manifest.rc %(resname)s" % variables
|
||||
)
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s "
|
||||
"%(out)s.manifest.rc" % variables
|
||||
)
|
||||
add_to_ld = " %(out)s.manifest.res" % variables
|
||||
subprocess.check_call(ldcmd + add_to_ld)
|
||||
|
||||
# Run mt.exe on the theoretically complete manifest we generated, merging
|
||||
# it with the one the linker generated to confirm that the linker
|
||||
# generated one does not add anything. This is strictly unnecessary for
|
||||
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
|
||||
# used in a #pragma comment.
|
||||
if manifests:
|
||||
# Merge the intermediate one with ours to .assert.manifest, then check
|
||||
# that .assert.manifest is identical to ours.
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(out)s.manifest %(intermediate_manifest)s "
|
||||
"-out:%(out)s.assert.manifest" % variables
|
||||
)
|
||||
assert_manifest = "%(out)s.assert.manifest" % variables
|
||||
our_manifest = "%(out)s.manifest" % variables
|
||||
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
|
||||
# and sometimes doesn't unfortunately.
|
||||
with open(our_manifest) as our_f:
|
||||
with open(assert_manifest) as assert_f:
|
||||
translator = str.maketrans('', '', string.whitespace)
|
||||
our_data = our_f.read().translate(translator)
|
||||
assert_data = assert_f.read().translate(translator)
|
||||
if our_data != assert_data:
|
||||
os.unlink(out)
|
||||
|
||||
def dump(filename):
|
||||
print(filename, file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
with open(filename) as f:
|
||||
print(f.read(), file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
|
||||
dump(intermediate_manifest)
|
||||
dump(our_manifest)
|
||||
dump(assert_manifest)
|
||||
sys.stderr.write(
|
||||
'Linker generated manifest "%s" added to final manifest "%s" '
|
||||
'(result in "%s"). '
|
||||
"Were /MANIFEST switches used in #pragma statements? "
|
||||
% (intermediate_manifest, our_manifest, assert_manifest)
|
||||
)
|
||||
return 1
|
||||
|
||||
def ExecManifestWrapper(self, arch, *args):
|
||||
"""Run manifest tool with environment set. Strip out undesirable warning
|
||||
(some XML blocks are recognized by the OS loader, but not the manifest
|
||||
tool)."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if line and "manifest authoring warning 81010002" not in line:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecManifestToRc(self, arch, *args):
|
||||
"""Creates a resource file pointing a SxS assembly manifest.
|
||||
|args| is tuple containing path to resource file, path to manifest file
|
||||
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
|
||||
manifest_path, resource_path, resource_name = args
|
||||
with open(resource_path, "w") as output:
|
||||
output.write(
|
||||
'#include <windows.h>\n%s RT_MANIFEST "%s"'
|
||||
% (resource_name, os.path.abspath(manifest_path).replace("\\", "/"))
|
||||
)
|
||||
|
||||
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
|
||||
"""Filter noisy filenames output from MIDL compile step that isn't
|
||||
quietable via command line flags.
|
||||
"""
|
||||
args = (
|
||||
["midl", "/nologo"]
|
||||
+ list(flags)
|
||||
+ [
|
||||
"/out",
|
||||
outdir,
|
||||
"/tlb",
|
||||
tlb,
|
||||
"/h",
|
||||
h,
|
||||
"/dlldata",
|
||||
dlldata,
|
||||
"/iid",
|
||||
iid,
|
||||
"/proxy",
|
||||
proxy,
|
||||
idl,
|
||||
]
|
||||
)
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
# Filter junk out of stdout, and write filtered versions. Output we want
|
||||
# to filter is pairs of lines that look like this:
|
||||
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
||||
# objidl.idl
|
||||
lines = out.splitlines()
|
||||
prefixes = ("Processing ", "64 bit Processing ")
|
||||
processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)}
|
||||
for line in lines:
|
||||
if not line.startswith(prefixes) and line not in processing:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecAsmWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of asm.exe."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and not line.startswith("Microsoft (R) Macro Assembler")
|
||||
and not line.startswith(" Assembling: ")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecRcWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of rc.exe. Older versions of RC
|
||||
don't support the /nologo flag."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Microsoft (R) Windows (R) Resource Compiler")
|
||||
and not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecActionWrapper(self, arch, rspfile, *dir):
|
||||
"""Runs an action command line from a response file using the environment
|
||||
for |arch|. If |dir| is supplied, use that as the working directory."""
|
||||
env = self._GetEnv(arch)
|
||||
# TODO(scottmg): This is a temporary hack to get some specific variables
|
||||
# through to actions that are set after gyp-time. http://crbug.com/333738.
|
||||
for k, v in os.environ.items():
|
||||
if k not in env:
|
||||
env[k] = v
|
||||
args = open(rspfile).read()
|
||||
dir = dir[0] if dir else None
|
||||
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
||||
|
||||
def ExecClCompile(self, project_dir, selected_files):
|
||||
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
|
||||
build selected C/C++ files."""
|
||||
project_dir = os.path.relpath(project_dir, BASE_DIR)
|
||||
selected_files = selected_files.split(";")
|
||||
ninja_targets = [
|
||||
os.path.join(project_dir, filename) + "^^" for filename in selected_files
|
||||
]
|
||||
cmd = ["ninja.exe"]
|
||||
cmd.extend(ninja_targets)
|
||||
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
1939
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Normal file
1939
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
302
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Normal file
302
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Normal file
@@ -0,0 +1,302 @@
|
||||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Xcode-ninja wrapper project file generator.
|
||||
|
||||
This updates the data structures passed to the Xcode gyp generator to build
|
||||
with ninja instead. The Xcode project itself is transformed into a list of
|
||||
executable targets, each with a build step to build with ninja, and a target
|
||||
with every source and resource file. This appears to sidestep some of the
|
||||
major performance headaches experienced using complex projects and large number
|
||||
of targets within Xcode.
|
||||
"""
|
||||
|
||||
import errno
|
||||
import gyp.generator.ninja
|
||||
import os
|
||||
import re
|
||||
import xml.sax.saxutils
|
||||
|
||||
|
||||
def _WriteWorkspace(main_gyp, sources_gyp, params):
|
||||
""" Create a workspace to wrap main and sources gyp paths. """
|
||||
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
|
||||
workspace_path = build_file_root + ".xcworkspace"
|
||||
options = params["options"]
|
||||
if options.generator_output:
|
||||
workspace_path = os.path.join(options.generator_output, workspace_path)
|
||||
try:
|
||||
os.makedirs(workspace_path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
output_string = (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n' + '<Workspace version = "1.0">\n'
|
||||
)
|
||||
for gyp_name in [main_gyp, sources_gyp]:
|
||||
name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj"
|
||||
name = xml.sax.saxutils.quoteattr("group:" + name)
|
||||
output_string += " <FileRef location = %s></FileRef>\n" % name
|
||||
output_string += "</Workspace>\n"
|
||||
|
||||
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
|
||||
|
||||
try:
|
||||
with open(workspace_file) as input_file:
|
||||
input_string = input_file.read()
|
||||
if input_string == output_string:
|
||||
return
|
||||
except OSError:
|
||||
# Ignore errors if the file doesn't exist.
|
||||
pass
|
||||
|
||||
with open(workspace_file, "w") as output_file:
|
||||
output_file.write(output_string)
|
||||
|
||||
|
||||
def _TargetFromSpec(old_spec, params):
|
||||
""" Create fake target for xcode-ninja wrapper. """
|
||||
# Determine ninja top level build dir (e.g. /path/to/out).
|
||||
ninja_toplevel = None
|
||||
jobs = 0
|
||||
if params:
|
||||
options = params["options"]
|
||||
ninja_toplevel = os.path.join(
|
||||
options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params)
|
||||
)
|
||||
jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0)
|
||||
|
||||
target_name = old_spec.get("target_name")
|
||||
product_name = old_spec.get("product_name", target_name)
|
||||
product_extension = old_spec.get("product_extension")
|
||||
|
||||
ninja_target = {}
|
||||
ninja_target["target_name"] = target_name
|
||||
ninja_target["product_name"] = product_name
|
||||
if product_extension:
|
||||
ninja_target["product_extension"] = product_extension
|
||||
ninja_target["toolset"] = old_spec.get("toolset")
|
||||
ninja_target["default_configuration"] = old_spec.get("default_configuration")
|
||||
ninja_target["configurations"] = {}
|
||||
|
||||
# Tell Xcode to look in |ninja_toplevel| for build products.
|
||||
new_xcode_settings = {}
|
||||
if ninja_toplevel:
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"] = (
|
||||
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
|
||||
)
|
||||
|
||||
if "configurations" in old_spec:
|
||||
for config in old_spec["configurations"]:
|
||||
old_xcode_settings = old_spec["configurations"][config].get(
|
||||
"xcode_settings", {}
|
||||
)
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings:
|
||||
new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO"
|
||||
new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[
|
||||
"IPHONEOS_DEPLOYMENT_TARGET"
|
||||
]
|
||||
for key in ["BUNDLE_LOADER", "TEST_HOST"]:
|
||||
if key in old_xcode_settings:
|
||||
new_xcode_settings[key] = old_xcode_settings[key]
|
||||
|
||||
ninja_target["configurations"][config] = {}
|
||||
ninja_target["configurations"][config][
|
||||
"xcode_settings"
|
||||
] = new_xcode_settings
|
||||
|
||||
ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0)
|
||||
ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0)
|
||||
ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0)
|
||||
ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0)
|
||||
ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0)
|
||||
ninja_target["type"] = old_spec["type"]
|
||||
if ninja_toplevel:
|
||||
ninja_target["actions"] = [
|
||||
{
|
||||
"action_name": "Compile and copy %s via ninja" % target_name,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"action": [
|
||||
"env",
|
||||
"PATH=%s" % os.environ["PATH"],
|
||||
"ninja",
|
||||
"-C",
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"],
|
||||
target_name,
|
||||
],
|
||||
"message": "Compile and copy %s via ninja" % target_name,
|
||||
},
|
||||
]
|
||||
if jobs > 0:
|
||||
ninja_target["actions"][0]["action"].extend(("-j", jobs))
|
||||
return ninja_target
|
||||
|
||||
|
||||
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
"""Limit targets for Xcode wrapper.
|
||||
|
||||
Xcode sometimes performs poorly with too many targets, so only include
|
||||
proper executable targets, with filters to customize.
|
||||
Arguments:
|
||||
target_extras: Regular expression to always add, matching any target.
|
||||
executable_target_pattern: Regular expression limiting executable targets.
|
||||
spec: Specifications for target.
|
||||
"""
|
||||
target_name = spec.get("target_name")
|
||||
# Always include targets matching target_extras.
|
||||
if target_extras is not None and re.search(target_extras, target_name):
|
||||
return True
|
||||
|
||||
# Otherwise just show executable targets and xc_tests.
|
||||
if int(spec.get("mac_xctest_bundle", 0)) != 0 or (
|
||||
spec.get("type", "") == "executable"
|
||||
and spec.get("product_extension", "") != "bundle"
|
||||
):
|
||||
|
||||
# If there is a filter and the target does not match, exclude the target.
|
||||
if executable_target_pattern is not None:
|
||||
if not re.search(executable_target_pattern, target_name):
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def CreateWrapper(target_list, target_dicts, data, params):
|
||||
"""Initialize targets for the ninja wrapper.
|
||||
|
||||
This sets up the necessary variables in the targets to generate Xcode projects
|
||||
that use ninja as an external builder.
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
data: Dict of flattened build files keyed on gyp path.
|
||||
params: Dict of global options for gyp.
|
||||
"""
|
||||
orig_gyp = params["build_files"][0]
|
||||
for gyp_name, gyp_dict in data.items():
|
||||
if gyp_name == orig_gyp:
|
||||
depth = gyp_dict["_DEPTH"]
|
||||
|
||||
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
|
||||
# and prepend .ninja before the .gyp extension.
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
main_gyp = generator_flags.get("xcode_ninja_main_gyp", None)
|
||||
if main_gyp is None:
|
||||
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
|
||||
main_gyp = build_file_root + ".ninja" + build_file_ext
|
||||
|
||||
# Create new |target_list|, |target_dicts| and |data| data structures.
|
||||
new_target_list = []
|
||||
new_target_dicts = {}
|
||||
new_data = {}
|
||||
|
||||
# Set base keys needed for |data|.
|
||||
new_data[main_gyp] = {}
|
||||
new_data[main_gyp]["included_files"] = []
|
||||
new_data[main_gyp]["targets"] = []
|
||||
new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
|
||||
# Normally the xcode-ninja generator includes only valid executable targets.
|
||||
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
|
||||
# executable targets that match the pattern. (Default all)
|
||||
executable_target_pattern = generator_flags.get(
|
||||
"xcode_ninja_executable_target_pattern", None
|
||||
)
|
||||
|
||||
# For including other non-executable targets, add the matching target name
|
||||
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
|
||||
target_extras = generator_flags.get("xcode_ninja_target_pattern", None)
|
||||
|
||||
for old_qualified_target in target_list:
|
||||
spec = target_dicts[old_qualified_target]
|
||||
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
# Add to new_target_list.
|
||||
target_name = spec.get("target_name")
|
||||
new_target_name = f"{main_gyp}:{target_name}#target"
|
||||
new_target_list.append(new_target_name)
|
||||
|
||||
# Add to new_target_dicts.
|
||||
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
|
||||
|
||||
# Add to new_data.
|
||||
for old_target in data[old_qualified_target.split(":")[0]]["targets"]:
|
||||
if old_target["target_name"] == target_name:
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = old_target["target_name"]
|
||||
new_data_target["toolset"] = old_target["toolset"]
|
||||
new_data[main_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Create sources target.
|
||||
sources_target_name = "sources_for_indexing"
|
||||
sources_target = _TargetFromSpec(
|
||||
{
|
||||
"target_name": sources_target_name,
|
||||
"toolset": "target",
|
||||
"default_configuration": "Default",
|
||||
"mac_bundle": "0",
|
||||
"type": "executable",
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
||||
# Tell Xcode to look everywhere for headers.
|
||||
sources_target["configurations"] = {"Default": {"include_dirs": [depth]}}
|
||||
|
||||
# Put excluded files into the sources target so they can be opened in Xcode.
|
||||
skip_excluded_files = not generator_flags.get(
|
||||
"xcode_ninja_list_excluded_files", True
|
||||
)
|
||||
|
||||
sources = []
|
||||
for target, target_dict in target_dicts.items():
|
||||
base = os.path.dirname(target)
|
||||
files = target_dict.get("sources", []) + target_dict.get(
|
||||
"mac_bundle_resources", []
|
||||
)
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(
|
||||
target_dict.get("sources_excluded", [])
|
||||
+ target_dict.get("mac_bundle_resources_excluded", [])
|
||||
)
|
||||
|
||||
for action in target_dict.get("actions", []):
|
||||
files.extend(action.get("inputs", []))
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(action.get("inputs_excluded", []))
|
||||
|
||||
# Remove files starting with $. These are mostly intermediate files for the
|
||||
# build system.
|
||||
files = [file for file in files if not file.startswith("$")]
|
||||
|
||||
# Make sources relative to root build file.
|
||||
relative_path = os.path.dirname(main_gyp)
|
||||
sources += [
|
||||
os.path.relpath(os.path.join(base, file), relative_path) for file in files
|
||||
]
|
||||
|
||||
sources_target["sources"] = sorted(set(sources))
|
||||
|
||||
# Put sources_to_index in it's own gyp.
|
||||
sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
|
||||
fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target"
|
||||
|
||||
# Add to new_target_list, new_target_dicts and new_data.
|
||||
new_target_list.append(fully_qualified_target_name)
|
||||
new_target_dicts[fully_qualified_target_name] = sources_target
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = sources_target["target_name"]
|
||||
new_data_target["_DEPTH"] = depth
|
||||
new_data_target["toolset"] = "target"
|
||||
new_data[sources_gyp] = {}
|
||||
new_data[sources_gyp]["targets"] = []
|
||||
new_data[sources_gyp]["included_files"] = []
|
||||
new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
new_data[sources_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Write workspace to file.
|
||||
_WriteWorkspace(main_gyp, sources_gyp, params)
|
||||
return (new_target_list, new_target_dicts, new_data)
|
||||
3197
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Normal file
3197
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
65
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Normal file
65
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Applies a fix to CR LF TAB handling in xml.dom.
|
||||
|
||||
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
|
||||
Working around this: http://bugs.python.org/issue5752
|
||||
TODO(bradnelson): Consider dropping this when we drop XP support.
|
||||
"""
|
||||
|
||||
|
||||
import xml.dom.minidom
|
||||
|
||||
|
||||
def _Replacement_write_data(writer, data, is_attrib=False):
|
||||
"""Writes datachars to writer."""
|
||||
data = data.replace("&", "&").replace("<", "<")
|
||||
data = data.replace('"', """).replace(">", ">")
|
||||
if is_attrib:
|
||||
data = data.replace("\r", "
").replace("\n", "
").replace("\t", "	")
|
||||
writer.write(data)
|
||||
|
||||
|
||||
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
|
||||
# indent = current indentation
|
||||
# addindent = indentation to add to higher levels
|
||||
# newl = newline string
|
||||
writer.write(indent + "<" + self.tagName)
|
||||
|
||||
attrs = self._get_attributes()
|
||||
a_names = sorted(attrs.keys())
|
||||
|
||||
for a_name in a_names:
|
||||
writer.write(' %s="' % a_name)
|
||||
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
|
||||
writer.write('"')
|
||||
if self.childNodes:
|
||||
writer.write(">%s" % newl)
|
||||
for node in self.childNodes:
|
||||
node.writexml(writer, indent + addindent, addindent, newl)
|
||||
writer.write(f"{indent}</{self.tagName}>{newl}")
|
||||
else:
|
||||
writer.write("/>%s" % newl)
|
||||
|
||||
|
||||
class XmlFix:
|
||||
"""Object to manage temporary patching of xml.dom.minidom."""
|
||||
|
||||
def __init__(self):
|
||||
# Preserve current xml.dom.minidom functions.
|
||||
self.write_data = xml.dom.minidom._write_data
|
||||
self.writexml = xml.dom.minidom.Element.writexml
|
||||
# Inject replacement versions of a function and a method.
|
||||
xml.dom.minidom._write_data = _Replacement_write_data
|
||||
xml.dom.minidom.Element.writexml = _Replacement_writexml
|
||||
|
||||
def Cleanup(self):
|
||||
if self.write_data:
|
||||
xml.dom.minidom._write_data = self.write_data
|
||||
xml.dom.minidom.Element.writexml = self.writexml
|
||||
self.write_data = None
|
||||
|
||||
def __del__(self):
|
||||
self.Cleanup()
|
||||
41
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Normal file
41
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "gyp-next"
|
||||
version = "0.14.0"
|
||||
authors = [
|
||||
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
|
||||
]
|
||||
description = "A fork of the GYP build system for use in the Node.js projects"
|
||||
readme = "README.md"
|
||||
license = { file="LICENSE" }
|
||||
requires-python = ">=3.6"
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["flake8", "pytest"]
|
||||
|
||||
[project.scripts]
|
||||
gyp = "gyp:script_main"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/nodejs/gyp-next"
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "pylib"}
|
||||
packages = ["gyp", "gyp.generator"]
|
||||
261
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Executable file
261
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Executable file
@@ -0,0 +1,261 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gyptest.py -- test runner for GYP tests."""
|
||||
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def is_test_name(f):
|
||||
return f.startswith("gyptest") and f.endswith(".py")
|
||||
|
||||
|
||||
def find_all_gyptest_files(directory):
|
||||
result = []
|
||||
for root, dirs, files in os.walk(directory):
|
||||
result.extend([os.path.join(root, f) for f in files if is_test_name(f)])
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-a", "--all", action="store_true", help="run all tests")
|
||||
parser.add_argument("-C", "--chdir", action="store", help="change to directory")
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
action="store",
|
||||
default="",
|
||||
help="run tests with the specified formats",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
"--gyp_option",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Add -G options to the gyp command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--list", action="store_true", help="list available tests and exit"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--no-exec",
|
||||
action="store_true",
|
||||
help="no execute, just print the command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path", action="append", default=[], help="additional $PATH directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="quiet, don't print anything unless there are failures",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="print configuration info and test results.",
|
||||
)
|
||||
parser.add_argument("tests", nargs="*")
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
if args.chdir:
|
||||
os.chdir(args.chdir)
|
||||
|
||||
if args.path:
|
||||
extra_path = [os.path.abspath(p) for p in args.path]
|
||||
extra_path = os.pathsep.join(extra_path)
|
||||
os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"]
|
||||
|
||||
if not args.tests:
|
||||
if not args.all:
|
||||
sys.stderr.write("Specify -a to get all tests.\n")
|
||||
return 1
|
||||
args.tests = ["test"]
|
||||
|
||||
tests = []
|
||||
for arg in args.tests:
|
||||
if os.path.isdir(arg):
|
||||
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
|
||||
else:
|
||||
if not is_test_name(os.path.basename(arg)):
|
||||
print(arg, "is not a valid gyp test name.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
tests.append(arg)
|
||||
|
||||
if args.list:
|
||||
for test in tests:
|
||||
print(test)
|
||||
sys.exit(0)
|
||||
|
||||
os.environ["PYTHONPATH"] = os.path.abspath("test/lib")
|
||||
|
||||
if args.verbose:
|
||||
print_configuration_info()
|
||||
|
||||
if args.gyp_option and not args.quiet:
|
||||
print("Extra Gyp options: %s\n" % args.gyp_option)
|
||||
|
||||
if args.format:
|
||||
format_list = args.format.split(",")
|
||||
else:
|
||||
format_list = {
|
||||
"aix5": ["make"],
|
||||
"os400": ["make"],
|
||||
"freebsd7": ["make"],
|
||||
"freebsd8": ["make"],
|
||||
"openbsd5": ["make"],
|
||||
"cygwin": ["msvs"],
|
||||
"win32": ["msvs", "ninja"],
|
||||
"linux": ["make", "ninja"],
|
||||
"linux2": ["make", "ninja"],
|
||||
"linux3": ["make", "ninja"],
|
||||
# TODO: Re-enable xcode-ninja.
|
||||
# https://bugs.chromium.org/p/gyp/issues/detail?id=530
|
||||
# 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
|
||||
"darwin": ["make", "ninja", "xcode"],
|
||||
}[sys.platform]
|
||||
|
||||
gyp_options = []
|
||||
for option in args.gyp_option:
|
||||
gyp_options += ["-G", option]
|
||||
|
||||
runner = Runner(format_list, tests, gyp_options, args.verbose)
|
||||
runner.run()
|
||||
|
||||
if not args.quiet:
|
||||
runner.print_results()
|
||||
|
||||
return 1 if runner.failures else 0
|
||||
|
||||
|
||||
def print_configuration_info():
|
||||
print("Test configuration:")
|
||||
if sys.platform == "darwin":
|
||||
sys.path.append(os.path.abspath("test/lib"))
|
||||
import TestMac
|
||||
|
||||
print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
|
||||
print(f" Xcode {TestMac.Xcode.Version()}")
|
||||
elif sys.platform == "win32":
|
||||
sys.path.append(os.path.abspath("pylib"))
|
||||
import gyp.MSVSVersion
|
||||
|
||||
print(" Win %s %s\n" % platform.win32_ver()[0:2])
|
||||
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
|
||||
elif sys.platform in ("linux", "linux2"):
|
||||
print(" Linux %s" % " ".join(platform.linux_distribution()))
|
||||
print(f" Python {platform.python_version()}")
|
||||
print(f" PYTHONPATH={os.environ['PYTHONPATH']}")
|
||||
print()
|
||||
|
||||
|
||||
class Runner:
|
||||
def __init__(self, formats, tests, gyp_options, verbose):
|
||||
self.formats = formats
|
||||
self.tests = tests
|
||||
self.verbose = verbose
|
||||
self.gyp_options = gyp_options
|
||||
self.failures = []
|
||||
self.num_tests = len(formats) * len(tests)
|
||||
num_digits = len(str(self.num_tests))
|
||||
self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits)
|
||||
self.isatty = sys.stdout.isatty() and not self.verbose
|
||||
self.env = os.environ.copy()
|
||||
self.hpos = 0
|
||||
|
||||
def run(self):
|
||||
run_start = time.time()
|
||||
|
||||
i = 1
|
||||
for fmt in self.formats:
|
||||
for test in self.tests:
|
||||
self.run_test(test, fmt, i)
|
||||
i += 1
|
||||
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
self.took = time.time() - run_start
|
||||
|
||||
def run_test(self, test, fmt, i):
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
msg = self.fmt_str % (i, self.num_tests, fmt, test)
|
||||
self.print_(msg)
|
||||
|
||||
start = time.time()
|
||||
cmd = [sys.executable, test] + self.gyp_options
|
||||
self.env["TESTGYP_FORMAT"] = fmt
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env
|
||||
)
|
||||
proc.wait()
|
||||
took = time.time() - start
|
||||
|
||||
stdout = proc.stdout.read().decode("utf8")
|
||||
if proc.returncode == 2:
|
||||
res = "skipped"
|
||||
elif proc.returncode:
|
||||
res = "failed"
|
||||
self.failures.append(f"({test}) {fmt}")
|
||||
else:
|
||||
res = "passed"
|
||||
res_msg = f" {res} {took:.3f}s"
|
||||
self.print_(res_msg)
|
||||
|
||||
if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")):
|
||||
print()
|
||||
print("\n".join(f" {line}" for line in stdout.splitlines()))
|
||||
elif not self.isatty:
|
||||
print()
|
||||
|
||||
def print_(self, msg):
|
||||
print(msg, end="")
|
||||
index = msg.rfind("\n")
|
||||
if index == -1:
|
||||
self.hpos += len(msg)
|
||||
else:
|
||||
self.hpos = len(msg) - index
|
||||
sys.stdout.flush()
|
||||
|
||||
def erase_current_line(self):
|
||||
print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="")
|
||||
sys.stdout.flush()
|
||||
self.hpos = 0
|
||||
|
||||
def print_results(self):
|
||||
num_failures = len(self.failures)
|
||||
if num_failures:
|
||||
print()
|
||||
if num_failures == 1:
|
||||
print("Failed the following test:")
|
||||
else:
|
||||
print("Failed the following %d tests:" % num_failures)
|
||||
print("\t" + "\n\t".join(sorted(self.failures)))
|
||||
print()
|
||||
print(
|
||||
"Ran %d tests in %.3fs, %d failed."
|
||||
% (self.num_tests, self.took, num_failures)
|
||||
)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
15
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/README
generated
vendored
Normal file
15
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/README
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
pretty_vcproj:
|
||||
Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
|
||||
|
||||
They key/value pair are used to resolve vsprops name.
|
||||
|
||||
For example, if I want to diff the base.vcproj project:
|
||||
|
||||
pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > original.txt
|
||||
pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
|
||||
|
||||
And you can use your favorite diff tool to see the changes.
|
||||
|
||||
Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
|
||||
I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
|
||||
before you perform the diff.
|
||||
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/README
generated
vendored
Normal file
5
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/README
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in
|
||||
|
||||
~/Library/Application Support/Developer/Shared/Xcode/Specifications/
|
||||
|
||||
and restart Xcode.
|
||||
27
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
generated
vendored
Normal file
27
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
/*
|
||||
gyp.pbfilespec
|
||||
GYP source file spec for Xcode 3
|
||||
|
||||
There is not much documentation available regarding the format
|
||||
of .pbfilespec files. As a starting point, see for instance the
|
||||
outdated documentation at:
|
||||
http://maxao.free.fr/xcode-plugin-interface/specifications.html
|
||||
and the files in:
|
||||
/Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
|
||||
|
||||
Place this file in directory:
|
||||
~/Library/Application Support/Developer/Shared/Xcode/Specifications/
|
||||
*/
|
||||
|
||||
(
|
||||
{
|
||||
Identifier = sourcecode.gyp;
|
||||
BasedOn = sourcecode;
|
||||
Name = "GYP Files";
|
||||
Extensions = ("gyp", "gypi");
|
||||
MIMETypes = ("text/gyp");
|
||||
Language = "xcode.lang.gyp";
|
||||
IsTextFile = YES;
|
||||
IsSourceFile = YES;
|
||||
}
|
||||
)
|
||||
226
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
generated
vendored
Normal file
226
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
generated
vendored
Normal file
@@ -0,0 +1,226 @@
|
||||
/*
|
||||
Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
Use of this source code is governed by a BSD-style license that can be
|
||||
found in the LICENSE file.
|
||||
|
||||
gyp.xclangspec
|
||||
GYP language specification for Xcode 3
|
||||
|
||||
There is not much documentation available regarding the format
|
||||
of .xclangspec files. As a starting point, see for instance the
|
||||
outdated documentation at:
|
||||
http://maxao.free.fr/xcode-plugin-interface/specifications.html
|
||||
and the files in:
|
||||
/Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
|
||||
|
||||
Place this file in directory:
|
||||
~/Library/Application Support/Developer/Shared/Xcode/Specifications/
|
||||
*/
|
||||
|
||||
(
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.keyword";
|
||||
Syntax = {
|
||||
Words = (
|
||||
"and",
|
||||
"or",
|
||||
"<!",
|
||||
"<",
|
||||
);
|
||||
Type = "xcode.syntax.keyword";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.target.declarator";
|
||||
Syntax = {
|
||||
Words = (
|
||||
"'target_name'",
|
||||
);
|
||||
Type = "xcode.syntax.identifier.type";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.string.singlequote";
|
||||
Syntax = {
|
||||
IncludeRules = (
|
||||
"xcode.lang.string",
|
||||
"xcode.lang.gyp.keyword",
|
||||
"xcode.lang.number",
|
||||
);
|
||||
Start = "'";
|
||||
End = "'";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.comma";
|
||||
Syntax = {
|
||||
Words = ( ",", );
|
||||
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp";
|
||||
Description = "GYP Coloring";
|
||||
BasedOn = "xcode.lang.simpleColoring";
|
||||
IncludeInMenu = YES;
|
||||
Name = "GYP";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer.toplevel";
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.dictionary",
|
||||
);
|
||||
Type = "xcode.syntax.plain";
|
||||
};
|
||||
},
|
||||
|
||||
// The following rule returns tokens to the other rules
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.lexer";
|
||||
Syntax = {
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.comment",
|
||||
"xcode.lang.string",
|
||||
'xcode.lang.gyp.targetname.declarator',
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
"xcode.lang.number",
|
||||
"xcode.lang.gyp.comma",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.lexer.toplevel";
|
||||
Syntax = {
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.comment",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.assignment";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.assignment.lhs",
|
||||
":",
|
||||
"xcode.lang.gyp.assignment.rhs",
|
||||
);
|
||||
};
|
||||
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.target.declaration";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.target.declarator",
|
||||
":",
|
||||
"xcode.lang.gyp.target.name",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.target.name";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
);
|
||||
Type = "xcode.syntax.definition.function";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.assignment.lhs";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
);
|
||||
Type = "xcode.syntax.identifier.type";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.assignment.rhs";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Rules = (
|
||||
"xcode.lang.gyp.string.singlequote?",
|
||||
"xcode.lang.gyp.array?",
|
||||
"xcode.lang.gyp.dictionary?",
|
||||
"xcode.lang.number?",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.dictionary";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Start = "{";
|
||||
End = "}";
|
||||
Foldable = YES;
|
||||
Recursive = YES;
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.target.declaration",
|
||||
"xcode.lang.gyp.assignment",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.array";
|
||||
Syntax = {
|
||||
Tokenizer = "xcode.lang.gyp.lexer";
|
||||
Start = "[";
|
||||
End = "]";
|
||||
Foldable = YES;
|
||||
Recursive = YES;
|
||||
IncludeRules = (
|
||||
"xcode.lang.gyp.array",
|
||||
"xcode.lang.gyp.dictionary",
|
||||
"xcode.lang.gyp.string.singlequote",
|
||||
);
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.todo.mark";
|
||||
Syntax = {
|
||||
StartChars = "T";
|
||||
Match = (
|
||||
"^\(TODO\(.*\):[ \t]+.*\)$", // include "TODO: " in the markers list
|
||||
);
|
||||
// This is the order of captures. All of the match strings above need the same order.
|
||||
CaptureTypes = (
|
||||
"xcode.syntax.mark"
|
||||
);
|
||||
Type = "xcode.syntax.comment";
|
||||
};
|
||||
},
|
||||
|
||||
{
|
||||
Identifier = "xcode.lang.gyp.comment";
|
||||
BasedOn = "xcode.lang.comment"; // for text macros
|
||||
Syntax = {
|
||||
Start = "#";
|
||||
End = "\n";
|
||||
IncludeRules = (
|
||||
"xcode.lang.url",
|
||||
"xcode.lang.url.mail",
|
||||
"xcode.lang.comment.mark",
|
||||
"xcode.lang.gyp.todo.mark",
|
||||
);
|
||||
Type = "xcode.syntax.comment";
|
||||
};
|
||||
},
|
||||
)
|
||||
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/README
generated
vendored
Normal file
12
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/README
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
How to install gyp-mode for emacs:
|
||||
|
||||
Add the following to your ~/.emacs (replace ... with the path to your gyp
|
||||
checkout).
|
||||
|
||||
(setq load-path (cons ".../tools/emacs" load-path))
|
||||
(require 'gyp)
|
||||
|
||||
Restart emacs (or eval-region the added lines) and you should be all set.
|
||||
|
||||
Please note that ert is required for running the tests, which is included in
|
||||
Emacs 24, or available separately from https://github.com/ohler/ert
|
||||
63
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
generated
vendored
Normal file
63
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
;;; gyp-tests.el - unit tests for gyp-mode.
|
||||
|
||||
;; Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
;; Use of this source code is governed by a BSD-style license that can be
|
||||
;; found in the LICENSE file.
|
||||
|
||||
;; The recommended way to run these tests is to run them from the command-line,
|
||||
;; with the run-unit-tests.sh script.
|
||||
|
||||
(require 'cl)
|
||||
(require 'ert)
|
||||
(require 'gyp)
|
||||
|
||||
(defconst samples (directory-files "testdata" t ".gyp$")
|
||||
"List of golden samples to check")
|
||||
|
||||
(defun fontify (filename)
|
||||
(with-temp-buffer
|
||||
(insert-file-contents-literally filename)
|
||||
(gyp-mode)
|
||||
(font-lock-fontify-buffer)
|
||||
(buffer-string)))
|
||||
|
||||
(defun read-golden-sample (filename)
|
||||
(with-temp-buffer
|
||||
(insert-file-contents-literally (concat filename ".fontified"))
|
||||
(read (current-buffer))))
|
||||
|
||||
(defun equivalent-face (face)
|
||||
"For the purposes of face comparison, we're not interested in the
|
||||
differences between certain faces. For example, the difference between
|
||||
font-lock-comment-delimiter and font-lock-comment-face."
|
||||
(cl-case face
|
||||
((font-lock-comment-delimiter-face) font-lock-comment-face)
|
||||
(t face)))
|
||||
|
||||
(defun text-face-properties (s)
|
||||
"Extract the text properties from s"
|
||||
(let ((result (list t)))
|
||||
(dotimes (i (length s))
|
||||
(setq result (cons (equivalent-face (get-text-property i 'face s))
|
||||
result)))
|
||||
(nreverse result)))
|
||||
|
||||
(ert-deftest test-golden-samples ()
|
||||
"Check that fontification produces the same results as the golden samples"
|
||||
(dolist (sample samples)
|
||||
(let ((golden (read-golden-sample sample))
|
||||
(fontified (fontify sample)))
|
||||
(should (equal golden fontified))
|
||||
(should (equal (text-face-properties golden)
|
||||
(text-face-properties fontified))))))
|
||||
|
||||
(defun create-golden-sample (filename)
|
||||
"Create a golden sample by fontifying filename and writing out the printable
|
||||
representation of the fontified buffer (with text properties) to the
|
||||
FILENAME.fontified"
|
||||
(with-temp-file (concat filename ".fontified")
|
||||
(print (fontify filename) (current-buffer))))
|
||||
|
||||
(defun create-golden-samples ()
|
||||
"Recreate the golden samples"
|
||||
(dolist (sample samples) (create-golden-sample sample)))
|
||||
275
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp.el
generated
vendored
Normal file
275
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/gyp.el
generated
vendored
Normal file
@@ -0,0 +1,275 @@
|
||||
;;; gyp.el - font-lock-mode support for gyp files.
|
||||
|
||||
;; Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
;; Use of this source code is governed by a BSD-style license that can be
|
||||
;; found in the LICENSE file.
|
||||
|
||||
;; Put this somewhere in your load-path and
|
||||
;; (require 'gyp)
|
||||
|
||||
(require 'python)
|
||||
(require 'cl)
|
||||
|
||||
(when (string-match "python-mode.el" (symbol-file 'python-mode 'defun))
|
||||
(error (concat "python-mode must be loaded from python.el (bundled with "
|
||||
"recent emacsen), not from the older and less maintained "
|
||||
"python-mode.el")))
|
||||
|
||||
(defadvice python-indent-calculate-levels (after gyp-outdent-closing-parens
|
||||
activate)
|
||||
"De-indent closing parens, braces, and brackets in gyp-mode."
|
||||
(when (and (eq major-mode 'gyp-mode)
|
||||
(string-match "^ *[])}][],)}]* *$"
|
||||
(buffer-substring-no-properties
|
||||
(line-beginning-position) (line-end-position))))
|
||||
(setf (first python-indent-levels)
|
||||
(- (first python-indent-levels) python-continuation-offset))))
|
||||
|
||||
(defadvice python-indent-guess-indent-offset (around
|
||||
gyp-indent-guess-indent-offset
|
||||
activate)
|
||||
"Guess correct indent offset in gyp-mode."
|
||||
(or (and (not (eq major-mode 'gyp-mode))
|
||||
ad-do-it)
|
||||
(save-excursion
|
||||
(save-restriction
|
||||
(widen)
|
||||
(goto-char (point-min))
|
||||
;; Find first line ending with an opening brace that is not a comment.
|
||||
(or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
|
||||
(forward-line)
|
||||
(/= (current-indentation) 0)
|
||||
(set (make-local-variable 'python-indent-offset)
|
||||
(current-indentation))
|
||||
(set (make-local-variable 'python-continuation-offset)
|
||||
(current-indentation)))
|
||||
(message "Can't guess gyp indent offset, using default: %s"
|
||||
python-continuation-offset))))))
|
||||
|
||||
(define-derived-mode gyp-mode python-mode "Gyp"
|
||||
"Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
|
||||
;; gyp-parse-history is a stack of (POSITION . PARSE-STATE) tuples,
|
||||
;; with greater positions at the top of the stack. PARSE-STATE
|
||||
;; is a list of section symbols (see gyp-section-name and gyp-parse-to)
|
||||
;; with most nested section symbol at the front of the list.
|
||||
(set (make-local-variable 'gyp-parse-history) '((1 . (list))))
|
||||
(gyp-add-font-lock-keywords))
|
||||
|
||||
(defun gyp-set-indentation ()
|
||||
"Hook function to configure python indentation to suit gyp mode."
|
||||
(set (make-local-variable 'python-indent-offset) 2)
|
||||
(set (make-local-variable 'python-continuation-offset) 2)
|
||||
(set (make-local-variable 'python-indent-guess-indent-offset) t)
|
||||
(python-indent-guess-indent-offset))
|
||||
|
||||
(add-hook 'gyp-mode-hook 'gyp-set-indentation)
|
||||
|
||||
(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
|
||||
(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
|
||||
(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
|
||||
|
||||
;;; Font-lock support
|
||||
|
||||
(defconst gyp-dependencies-regexp
|
||||
(regexp-opt (list "dependencies" "export_dependent_settings"))
|
||||
"Regular expression to introduce 'dependencies' section")
|
||||
|
||||
(defconst gyp-sources-regexp
|
||||
(regexp-opt (list "action" "files" "include_dirs" "includes" "inputs"
|
||||
"libraries" "outputs" "sources"))
|
||||
"Regular expression to introduce 'sources' sections")
|
||||
|
||||
(defconst gyp-conditions-regexp
|
||||
(regexp-opt (list "conditions" "target_conditions"))
|
||||
"Regular expression to introduce conditions sections")
|
||||
|
||||
(defconst gyp-variables-regexp
|
||||
"^variables"
|
||||
"Regular expression to introduce variables sections")
|
||||
|
||||
(defconst gyp-defines-regexp
|
||||
"^defines"
|
||||
"Regular expression to introduce 'defines' sections")
|
||||
|
||||
(defconst gyp-targets-regexp
|
||||
"^targets"
|
||||
"Regular expression to introduce 'targets' sections")
|
||||
|
||||
(defun gyp-section-name (section)
|
||||
"Map the sections we are interested in from SECTION to symbol.
|
||||
|
||||
SECTION is a string from the buffer that introduces a section. The result is
|
||||
a symbol representing the kind of section.
|
||||
|
||||
This allows us to treat (for the purposes of font-lock) several different
|
||||
section names as the same kind of section. For example, a 'sources section
|
||||
can be introduced by the 'sources', 'inputs', 'outputs' keyword.
|
||||
|
||||
'other is the default section kind when a more specific match is not made."
|
||||
(cond ((string-match-p gyp-dependencies-regexp section) 'dependencies)
|
||||
((string-match-p gyp-sources-regexp section) 'sources)
|
||||
((string-match-p gyp-variables-regexp section) 'variables)
|
||||
((string-match-p gyp-conditions-regexp section) 'conditions)
|
||||
((string-match-p gyp-targets-regexp section) 'targets)
|
||||
((string-match-p gyp-defines-regexp section) 'defines)
|
||||
(t 'other)))
|
||||
|
||||
(defun gyp-invalidate-parse-states-after (target-point)
|
||||
"Erase any parse information after target-point."
|
||||
(while (> (caar gyp-parse-history) target-point)
|
||||
(setq gyp-parse-history (cdr gyp-parse-history))))
|
||||
|
||||
(defun gyp-parse-point ()
|
||||
"The point of the last parse state added by gyp-parse-to."
|
||||
(caar gyp-parse-history))
|
||||
|
||||
(defun gyp-parse-sections ()
|
||||
"A list of section symbols holding at the last parse state point."
|
||||
(cdar gyp-parse-history))
|
||||
|
||||
(defun gyp-inside-dictionary-p ()
|
||||
"Predicate returning true if the parser is inside a dictionary."
|
||||
(not (eq (cadar gyp-parse-history) 'list)))
|
||||
|
||||
(defun gyp-add-parse-history (point sections)
|
||||
"Add parse state SECTIONS to the parse history at POINT so that parsing can be
|
||||
resumed instantly."
|
||||
(while (>= (caar gyp-parse-history) point)
|
||||
(setq gyp-parse-history (cdr gyp-parse-history)))
|
||||
(setq gyp-parse-history (cons (cons point sections) gyp-parse-history)))
|
||||
|
||||
(defun gyp-parse-to (target-point)
|
||||
"Parses from (point) to TARGET-POINT adding the parse state information to
|
||||
gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a
|
||||
string literal has been parsed. Returns nil if no further parsing can be
|
||||
done, otherwise returns the position of the start of a parsed string, leaving
|
||||
the point at the end of the string."
|
||||
(let ((parsing t)
|
||||
string-start)
|
||||
(while parsing
|
||||
(setq string-start nil)
|
||||
;; Parse up to a character that starts a sexp, or if the nesting
|
||||
;; level decreases.
|
||||
(let ((state (parse-partial-sexp (gyp-parse-point)
|
||||
target-point
|
||||
-1
|
||||
t))
|
||||
(sections (gyp-parse-sections)))
|
||||
(if (= (nth 0 state) -1)
|
||||
(setq sections (cdr sections)) ; pop out a level
|
||||
(cond ((looking-at-p "['\"]") ; a string
|
||||
(setq string-start (point))
|
||||
(goto-char (scan-sexps (point) 1))
|
||||
(if (gyp-inside-dictionary-p)
|
||||
;; Look for sections inside a dictionary
|
||||
(let ((section (gyp-section-name
|
||||
(buffer-substring-no-properties
|
||||
(+ 1 string-start)
|
||||
(- (point) 1)))))
|
||||
(setq sections (cons section (cdr sections)))))
|
||||
;; Stop after the string so it can be fontified.
|
||||
(setq target-point (point)))
|
||||
((looking-at-p "{")
|
||||
;; Inside a dictionary. Increase nesting.
|
||||
(forward-char 1)
|
||||
(setq sections (cons 'unknown sections)))
|
||||
((looking-at-p "\\[")
|
||||
;; Inside a list. Increase nesting
|
||||
(forward-char 1)
|
||||
(setq sections (cons 'list sections)))
|
||||
((not (eobp))
|
||||
;; other
|
||||
(forward-char 1))))
|
||||
(gyp-add-parse-history (point) sections)
|
||||
(setq parsing (< (point) target-point))))
|
||||
string-start))
|
||||
|
||||
(defun gyp-section-at-point ()
|
||||
"Transform the last parse state, which is a list of nested sections and return
|
||||
the section symbol that should be used to determine font-lock information for
|
||||
the string. Can return nil indicating the string should not have any attached
|
||||
section."
|
||||
(let ((sections (gyp-parse-sections)))
|
||||
(cond
|
||||
((eq (car sections) 'conditions)
|
||||
;; conditions can occur in a variables section, but we still want to
|
||||
;; highlight it as a keyword.
|
||||
nil)
|
||||
((and (eq (car sections) 'list)
|
||||
(eq (cadr sections) 'list))
|
||||
;; conditions and sources can have items in [[ ]]
|
||||
(caddr sections))
|
||||
(t (cadr sections)))))
|
||||
|
||||
(defun gyp-section-match (limit)
|
||||
"Parse from (point) to LIMIT returning by means of match data what was
|
||||
matched. The group of the match indicates what style font-lock should apply.
|
||||
See also `gyp-add-font-lock-keywords'."
|
||||
(gyp-invalidate-parse-states-after (point))
|
||||
(let ((group nil)
|
||||
(string-start t))
|
||||
(while (and (< (point) limit)
|
||||
(not group)
|
||||
string-start)
|
||||
(setq string-start (gyp-parse-to limit))
|
||||
(if string-start
|
||||
(setq group (cl-case (gyp-section-at-point)
|
||||
('dependencies 1)
|
||||
('variables 2)
|
||||
('conditions 2)
|
||||
('sources 3)
|
||||
('defines 4)
|
||||
(nil nil)))))
|
||||
(if group
|
||||
(progn
|
||||
;; Set the match data to indicate to the font-lock mechanism the
|
||||
;; highlighting to be performed.
|
||||
(set-match-data (append (list string-start (point))
|
||||
(make-list (* (1- group) 2) nil)
|
||||
(list (1+ string-start) (1- (point)))))
|
||||
t))))
|
||||
|
||||
;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for
|
||||
;;; canonical list of keywords.
|
||||
(defun gyp-add-font-lock-keywords ()
|
||||
"Add gyp-mode keywords to font-lock mechanism."
|
||||
;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match
|
||||
;; so that we can do the font-locking in a single font-lock pass.
|
||||
(font-lock-add-keywords
|
||||
nil
|
||||
(list
|
||||
;; Top-level keywords
|
||||
(list (concat "['\"]\\("
|
||||
(regexp-opt (list "action" "action_name" "actions" "cflags"
|
||||
"cflags_cc" "conditions" "configurations"
|
||||
"copies" "defines" "dependencies" "destination"
|
||||
"direct_dependent_settings"
|
||||
"export_dependent_settings" "extension" "files"
|
||||
"include_dirs" "includes" "inputs" "ldflags" "libraries"
|
||||
"link_settings" "mac_bundle" "message"
|
||||
"msvs_external_rule" "outputs" "product_name"
|
||||
"process_outputs_as_sources" "rules" "rule_name"
|
||||
"sources" "suppress_wildcard"
|
||||
"target_conditions" "target_defaults"
|
||||
"target_defines" "target_name" "toolsets"
|
||||
"targets" "type" "variables" "xcode_settings"))
|
||||
"[!/+=]?\\)") 1 'font-lock-keyword-face t)
|
||||
;; Type of target
|
||||
(list (concat "['\"]\\("
|
||||
(regexp-opt (list "loadable_module" "static_library"
|
||||
"shared_library" "executable" "none"))
|
||||
"\\)") 1 'font-lock-type-face t)
|
||||
(list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1
|
||||
'font-lock-function-name-face t)
|
||||
(list 'gyp-section-match
|
||||
(list 1 'font-lock-function-name-face t t) ; dependencies
|
||||
(list 2 'font-lock-variable-name-face t t) ; variables, conditions
|
||||
(list 3 'font-lock-constant-face t t) ; sources
|
||||
(list 4 'font-lock-preprocessor-face t t)) ; preprocessor
|
||||
;; Variable expansion
|
||||
(list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
|
||||
;; Command expansion
|
||||
(list "<!@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
|
||||
)))
|
||||
|
||||
(provide 'gyp)
|
||||
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
generated
vendored
Executable file
7
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
generated
vendored
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/sh
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
emacs --no-site-file --no-init-file --batch \
|
||||
--load ert.el --load gyp.el --load gyp-tests.el \
|
||||
-f ert-run-tests-batch-and-exit
|
||||
1105
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
generated
vendored
Normal file
1105
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1107
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
generated
vendored
Normal file
1107
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
102
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/graphviz.py
generated
vendored
Executable file
102
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/graphviz.py
generated
vendored
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Using the JSON dumped by the dump-dependency-json generator,
|
||||
generate input suitable for graphviz to render a dependency graph of
|
||||
targets."""
|
||||
|
||||
|
||||
import collections
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
def ParseTarget(target):
|
||||
target, _, suffix = target.partition("#")
|
||||
filename, _, target = target.partition(":")
|
||||
return filename, target, suffix
|
||||
|
||||
|
||||
def LoadEdges(filename, targets):
|
||||
"""Load the edges map from the dump file, and filter it to only
|
||||
show targets in |targets| and their depedendents."""
|
||||
|
||||
file = open("dump.json")
|
||||
edges = json.load(file)
|
||||
file.close()
|
||||
|
||||
# Copy out only the edges we're interested in from the full edge list.
|
||||
target_edges = {}
|
||||
to_visit = targets[:]
|
||||
while to_visit:
|
||||
src = to_visit.pop()
|
||||
if src in target_edges:
|
||||
continue
|
||||
target_edges[src] = edges[src]
|
||||
to_visit.extend(edges[src])
|
||||
|
||||
return target_edges
|
||||
|
||||
|
||||
def WriteGraph(edges):
|
||||
"""Print a graphviz graph to stdout.
|
||||
|edges| is a map of target to a list of other targets it depends on."""
|
||||
|
||||
# Bucket targets by file.
|
||||
files = collections.defaultdict(list)
|
||||
for src, dst in edges.items():
|
||||
build_file, target_name, toolset = ParseTarget(src)
|
||||
files[build_file].append(src)
|
||||
|
||||
print("digraph D {")
|
||||
print(" fontsize=8") # Used by subgraphs.
|
||||
print(" node [fontsize=8]")
|
||||
|
||||
# Output nodes by file. We must first write out each node within
|
||||
# its file grouping before writing out any edges that may refer
|
||||
# to those nodes.
|
||||
for filename, targets in files.items():
|
||||
if len(targets) == 1:
|
||||
# If there's only one node for this file, simplify
|
||||
# the display by making it a box without an internal node.
|
||||
target = targets[0]
|
||||
build_file, target_name, toolset = ParseTarget(target)
|
||||
print(
|
||||
f' "{target}" [shape=box, label="{filename}\\n{target_name}"]'
|
||||
)
|
||||
else:
|
||||
# Group multiple nodes together in a subgraph.
|
||||
print(' subgraph "cluster_%s" {' % filename)
|
||||
print(' label = "%s"' % filename)
|
||||
for target in targets:
|
||||
build_file, target_name, toolset = ParseTarget(target)
|
||||
print(f' "{target}" [label="{target_name}"]')
|
||||
print(" }")
|
||||
|
||||
# Now that we've placed all the nodes within subgraphs, output all
|
||||
# the edges between nodes.
|
||||
for src, dsts in edges.items():
|
||||
for dst in dsts:
|
||||
print(f' "{src}" -> "{dst}"')
|
||||
|
||||
print("}")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print(__doc__, file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
print("usage: %s target1 target2..." % (sys.argv[0]), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
edges = LoadEdges("dump.json", sys.argv[1:])
|
||||
|
||||
WriteGraph(edges)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
156
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_gyp.py
generated
vendored
Executable file
156
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_gyp.py
generated
vendored
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Pretty-prints the contents of a GYP file."""
|
||||
|
||||
|
||||
import sys
|
||||
import re
|
||||
|
||||
|
||||
# Regex to remove comments when we're counting braces.
|
||||
COMMENT_RE = re.compile(r"\s*#.*")
|
||||
|
||||
# Regex to remove quoted strings when we're counting braces.
|
||||
# It takes into account quoted quotes, and makes sure that the quotes match.
|
||||
# NOTE: It does not handle quotes that span more than one line, or
|
||||
# cases where an escaped quote is preceded by an escaped backslash.
|
||||
QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
|
||||
QUOTE_RE = re.compile(QUOTE_RE_STR)
|
||||
|
||||
|
||||
def comment_replace(matchobj):
|
||||
return matchobj.group(1) + matchobj.group(2) + "#" * len(matchobj.group(3))
|
||||
|
||||
|
||||
def mask_comments(input):
|
||||
"""Mask the quoted strings so we skip braces inside quoted strings."""
|
||||
search_re = re.compile(r"(.*?)(#)(.*)")
|
||||
return [search_re.sub(comment_replace, line) for line in input]
|
||||
|
||||
|
||||
def quote_replace(matchobj):
|
||||
return "{}{}{}{}".format(
|
||||
matchobj.group(1),
|
||||
matchobj.group(2),
|
||||
"x" * len(matchobj.group(3)),
|
||||
matchobj.group(2),
|
||||
)
|
||||
|
||||
|
||||
def mask_quotes(input):
|
||||
"""Mask the quoted strings so we skip braces inside quoted strings."""
|
||||
search_re = re.compile(r"(.*?)" + QUOTE_RE_STR)
|
||||
return [search_re.sub(quote_replace, line) for line in input]
|
||||
|
||||
|
||||
def do_split(input, masked_input, search_re):
|
||||
output = []
|
||||
mask_output = []
|
||||
for (line, masked_line) in zip(input, masked_input):
|
||||
m = search_re.match(masked_line)
|
||||
while m:
|
||||
split = len(m.group(1))
|
||||
line = line[:split] + r"\n" + line[split:]
|
||||
masked_line = masked_line[:split] + r"\n" + masked_line[split:]
|
||||
m = search_re.match(masked_line)
|
||||
output.extend(line.split(r"\n"))
|
||||
mask_output.extend(masked_line.split(r"\n"))
|
||||
return (output, mask_output)
|
||||
|
||||
|
||||
def split_double_braces(input):
|
||||
"""Masks out the quotes and comments, and then splits appropriate
|
||||
lines (lines that matche the double_*_brace re's above) before
|
||||
indenting them below.
|
||||
|
||||
These are used to split lines which have multiple braces on them, so
|
||||
that the indentation looks prettier when all laid out (e.g. closing
|
||||
braces make a nice diagonal line).
|
||||
"""
|
||||
double_open_brace_re = re.compile(r"(.*?[\[\{\(,])(\s*)([\[\{\(])")
|
||||
double_close_brace_re = re.compile(r"(.*?[\]\}\)],?)(\s*)([\]\}\)])")
|
||||
|
||||
masked_input = mask_quotes(input)
|
||||
masked_input = mask_comments(masked_input)
|
||||
|
||||
(output, mask_output) = do_split(input, masked_input, double_open_brace_re)
|
||||
(output, mask_output) = do_split(output, mask_output, double_close_brace_re)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def count_braces(line):
|
||||
"""keeps track of the number of braces on a given line and returns the result.
|
||||
|
||||
It starts at zero and subtracts for closed braces, and adds for open braces.
|
||||
"""
|
||||
open_braces = ["[", "(", "{"]
|
||||
close_braces = ["]", ")", "}"]
|
||||
closing_prefix_re = re.compile(r"[^\s\]\}\)]\s*[\]\}\)]+,?\s*$")
|
||||
cnt = 0
|
||||
stripline = COMMENT_RE.sub(r"", line)
|
||||
stripline = QUOTE_RE.sub(r"''", stripline)
|
||||
for char in stripline:
|
||||
for brace in open_braces:
|
||||
if char == brace:
|
||||
cnt += 1
|
||||
for brace in close_braces:
|
||||
if char == brace:
|
||||
cnt -= 1
|
||||
|
||||
after = False
|
||||
if cnt > 0:
|
||||
after = True
|
||||
|
||||
# This catches the special case of a closing brace having something
|
||||
# other than just whitespace ahead of it -- we don't want to
|
||||
# unindent that until after this line is printed so it stays with
|
||||
# the previous indentation level.
|
||||
if cnt < 0 and closing_prefix_re.match(stripline):
|
||||
after = True
|
||||
return (cnt, after)
|
||||
|
||||
|
||||
def prettyprint_input(lines):
|
||||
"""Does the main work of indenting the input based on the brace counts."""
|
||||
indent = 0
|
||||
basic_offset = 2
|
||||
for line in lines:
|
||||
if COMMENT_RE.match(line):
|
||||
print(line)
|
||||
else:
|
||||
line = line.strip("\r\n\t ") # Otherwise doesn't strip \r on Unix.
|
||||
if len(line) > 0:
|
||||
(brace_diff, after) = count_braces(line)
|
||||
if brace_diff != 0:
|
||||
if after:
|
||||
print(" " * (basic_offset * indent) + line)
|
||||
indent += brace_diff
|
||||
else:
|
||||
indent += brace_diff
|
||||
print(" " * (basic_offset * indent) + line)
|
||||
else:
|
||||
print(" " * (basic_offset * indent) + line)
|
||||
else:
|
||||
print("")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1:
|
||||
data = open(sys.argv[1]).read().splitlines()
|
||||
else:
|
||||
data = sys.stdin.read().splitlines()
|
||||
# Split up the double braces.
|
||||
lines = split_double_braces(data)
|
||||
|
||||
# Indent and print the output.
|
||||
prettyprint_input(lines)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
181
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_sln.py
generated
vendored
Executable file
181
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_sln.py
generated
vendored
Executable file
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Prints the information in a sln file in a diffable way.
|
||||
|
||||
It first outputs each projects in alphabetical order with their
|
||||
dependencies.
|
||||
|
||||
Then it outputs a possible build order.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import pretty_vcproj
|
||||
|
||||
__author__ = "nsylvain (Nicolas Sylvain)"
|
||||
|
||||
|
||||
def BuildProject(project, built, projects, deps):
|
||||
# if all dependencies are done, we can build it, otherwise we try to build the
|
||||
# dependency.
|
||||
# This is not infinite-recursion proof.
|
||||
for dep in deps[project]:
|
||||
if dep not in built:
|
||||
BuildProject(dep, built, projects, deps)
|
||||
print(project)
|
||||
built.append(project)
|
||||
|
||||
|
||||
def ParseSolution(solution_file):
|
||||
# All projects, their clsid and paths.
|
||||
projects = dict()
|
||||
|
||||
# A list of dependencies associated with a project.
|
||||
dependencies = dict()
|
||||
|
||||
# Regular expressions that matches the SLN format.
|
||||
# The first line of a project definition.
|
||||
begin_project = re.compile(
|
||||
r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
|
||||
r'}"\) = "(.*)", "(.*)", "(.*)"$'
|
||||
)
|
||||
# The last line of a project definition.
|
||||
end_project = re.compile("^EndProject$")
|
||||
# The first line of a dependency list.
|
||||
begin_dep = re.compile(r"ProjectSection\(ProjectDependencies\) = postProject$")
|
||||
# The last line of a dependency list.
|
||||
end_dep = re.compile("EndProjectSection$")
|
||||
# A line describing a dependency.
|
||||
dep_line = re.compile(" *({.*}) = ({.*})$")
|
||||
|
||||
in_deps = False
|
||||
solution = open(solution_file)
|
||||
for line in solution:
|
||||
results = begin_project.search(line)
|
||||
if results:
|
||||
# Hack to remove icu because the diff is too different.
|
||||
if results.group(1).find("icu") != -1:
|
||||
continue
|
||||
# We remove "_gyp" from the names because it helps to diff them.
|
||||
current_project = results.group(1).replace("_gyp", "")
|
||||
projects[current_project] = [
|
||||
results.group(2).replace("_gyp", ""),
|
||||
results.group(3),
|
||||
results.group(2),
|
||||
]
|
||||
dependencies[current_project] = []
|
||||
continue
|
||||
|
||||
results = end_project.search(line)
|
||||
if results:
|
||||
current_project = None
|
||||
continue
|
||||
|
||||
results = begin_dep.search(line)
|
||||
if results:
|
||||
in_deps = True
|
||||
continue
|
||||
|
||||
results = end_dep.search(line)
|
||||
if results:
|
||||
in_deps = False
|
||||
continue
|
||||
|
||||
results = dep_line.search(line)
|
||||
if results and in_deps and current_project:
|
||||
dependencies[current_project].append(results.group(1))
|
||||
continue
|
||||
|
||||
# Change all dependencies clsid to name instead.
|
||||
for project in dependencies:
|
||||
# For each dependencies in this project
|
||||
new_dep_array = []
|
||||
for dep in dependencies[project]:
|
||||
# Look for the project name matching this cldis
|
||||
for project_info in projects:
|
||||
if projects[project_info][1] == dep:
|
||||
new_dep_array.append(project_info)
|
||||
dependencies[project] = sorted(new_dep_array)
|
||||
|
||||
return (projects, dependencies)
|
||||
|
||||
|
||||
def PrintDependencies(projects, deps):
|
||||
print("---------------------------------------")
|
||||
print("Dependencies for all projects")
|
||||
print("---------------------------------------")
|
||||
print("-- --")
|
||||
|
||||
for (project, dep_list) in sorted(deps.items()):
|
||||
print("Project : %s" % project)
|
||||
print("Path : %s" % projects[project][0])
|
||||
if dep_list:
|
||||
for dep in dep_list:
|
||||
print(" - %s" % dep)
|
||||
print("")
|
||||
|
||||
print("-- --")
|
||||
|
||||
|
||||
def PrintBuildOrder(projects, deps):
|
||||
print("---------------------------------------")
|
||||
print("Build order ")
|
||||
print("---------------------------------------")
|
||||
print("-- --")
|
||||
|
||||
built = []
|
||||
for (project, _) in sorted(deps.items()):
|
||||
if project not in built:
|
||||
BuildProject(project, built, projects, deps)
|
||||
|
||||
print("-- --")
|
||||
|
||||
|
||||
def PrintVCProj(projects):
|
||||
|
||||
for project in projects:
|
||||
print("-------------------------------------")
|
||||
print("-------------------------------------")
|
||||
print(project)
|
||||
print(project)
|
||||
print(project)
|
||||
print("-------------------------------------")
|
||||
print("-------------------------------------")
|
||||
|
||||
project_path = os.path.abspath(
|
||||
os.path.join(os.path.dirname(sys.argv[1]), projects[project][2])
|
||||
)
|
||||
|
||||
pretty = pretty_vcproj
|
||||
argv = [
|
||||
"",
|
||||
project_path,
|
||||
"$(SolutionDir)=%s\\" % os.path.dirname(sys.argv[1]),
|
||||
]
|
||||
argv.extend(sys.argv[3:])
|
||||
pretty.main(argv)
|
||||
|
||||
|
||||
def main():
|
||||
# check if we have exactly 1 parameter.
|
||||
if len(sys.argv) < 2:
|
||||
print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0])
|
||||
return 1
|
||||
|
||||
(projects, deps) = ParseSolution(sys.argv[1])
|
||||
PrintDependencies(projects, deps)
|
||||
PrintBuildOrder(projects, deps)
|
||||
|
||||
if "--recursive" in sys.argv:
|
||||
PrintVCProj(projects)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
339
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
generated
vendored
Executable file
339
.yarn/unplugged/node-gyp-npm-9.3.0-21c41a4dfd/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
generated
vendored
Executable file
@@ -0,0 +1,339 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Make the format of a vcproj really pretty.
|
||||
|
||||
This script normalize and sort an xml. It also fetches all the properties
|
||||
inside linked vsprops and include them explicitly in the vcproj.
|
||||
|
||||
It outputs the resulting xml to stdout.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from xml.dom.minidom import parse
|
||||
from xml.dom.minidom import Node
|
||||
|
||||
__author__ = "nsylvain (Nicolas Sylvain)"
|
||||
ARGUMENTS = None
|
||||
REPLACEMENTS = dict()
|
||||
|
||||
|
||||
def cmp(x, y):
|
||||
return (x > y) - (x < y)
|
||||
|
||||
|
||||
class CmpTuple:
|
||||
"""Compare function between 2 tuple."""
|
||||
|
||||
def __call__(self, x, y):
|
||||
return cmp(x[0], y[0])
|
||||
|
||||
|
||||
class CmpNode:
|
||||
"""Compare function between 2 xml nodes."""
|
||||
|
||||
def __call__(self, x, y):
|
||||
def get_string(node):
|
||||
node_string = "node"
|
||||
node_string += node.nodeName
|
||||
if node.nodeValue:
|
||||
node_string += node.nodeValue
|
||||
|
||||
if node.attributes:
|
||||
# We first sort by name, if present.
|
||||
node_string += node.getAttribute("Name")
|
||||
|
||||
all_nodes = []
|
||||
for (name, value) in node.attributes.items():
|
||||
all_nodes.append((name, value))
|
||||
|
||||
all_nodes.sort(CmpTuple())
|
||||
for (name, value) in all_nodes:
|
||||
node_string += name
|
||||
node_string += value
|
||||
|
||||
return node_string
|
||||
|
||||
return cmp(get_string(x), get_string(y))
|
||||
|
||||
|
||||
def PrettyPrintNode(node, indent=0):
|
||||
if node.nodeType == Node.TEXT_NODE:
|
||||
if node.data.strip():
|
||||
print("{}{}".format(" " * indent, node.data.strip()))
|
||||
return
|
||||
|
||||
if node.childNodes:
|
||||
node.normalize()
|
||||
# Get the number of attributes
|
||||
attr_count = 0
|
||||
if node.attributes:
|
||||
attr_count = node.attributes.length
|
||||
|
||||
# Print the main tag
|
||||
if attr_count == 0:
|
||||
print("{}<{}>".format(" " * indent, node.nodeName))
|
||||
else:
|
||||
print("{}<{}".format(" " * indent, node.nodeName))
|
||||
|
||||
all_attributes = []
|
||||
for (name, value) in node.attributes.items():
|
||||
all_attributes.append((name, value))
|
||||
all_attributes.sort(CmpTuple())
|
||||
for (name, value) in all_attributes:
|
||||
print('{} {}="{}"'.format(" " * indent, name, value))
|
||||
print("%s>" % (" " * indent))
|
||||
if node.nodeValue:
|
||||
print("{} {}".format(" " * indent, node.nodeValue))
|
||||
|
||||
for sub_node in node.childNodes:
|
||||
PrettyPrintNode(sub_node, indent=indent + 2)
|
||||
print("{}</{}>".format(" " * indent, node.nodeName))
|
||||
|
||||
|
||||
def FlattenFilter(node):
|
||||
"""Returns a list of all the node and sub nodes."""
|
||||
node_list = []
|
||||
|
||||
if node.attributes and node.getAttribute("Name") == "_excluded_files":
|
||||
# We don't add the "_excluded_files" filter.
|
||||
return []
|
||||
|
||||
for current in node.childNodes:
|
||||
if current.nodeName == "Filter":
|
||||
node_list.extend(FlattenFilter(current))
|
||||
else:
|
||||
node_list.append(current)
|
||||
|
||||
return node_list
|
||||
|
||||
|
||||
def FixFilenames(filenames, current_directory):
|
||||
new_list = []
|
||||
for filename in filenames:
|
||||
if filename:
|
||||
for key in REPLACEMENTS:
|
||||
filename = filename.replace(key, REPLACEMENTS[key])
|
||||
os.chdir(current_directory)
|
||||
filename = filename.strip("\"' ")
|
||||
if filename.startswith("$"):
|
||||
new_list.append(filename)
|
||||
else:
|
||||
new_list.append(os.path.abspath(filename))
|
||||
return new_list
|
||||
|
||||
|
||||
def AbsoluteNode(node):
|
||||
"""Makes all the properties we know about in this node absolute."""
|
||||
if node.attributes:
|
||||
for (name, value) in node.attributes.items():
|
||||
if name in [
|
||||
"InheritedPropertySheets",
|
||||
"RelativePath",
|
||||
"AdditionalIncludeDirectories",
|
||||
"IntermediateDirectory",
|
||||
"OutputDirectory",
|
||||
"AdditionalLibraryDirectories",
|
||||
]:
|
||||
# We want to fix up these paths
|
||||
path_list = value.split(";")
|
||||
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
|
||||
node.setAttribute(name, ";".join(new_list))
|
||||
if not value:
|
||||
node.removeAttribute(name)
|
||||
|
||||
|
||||
def CleanupVcproj(node):
|
||||
"""For each sub node, we call recursively this function."""
|
||||
for sub_node in node.childNodes:
|
||||
AbsoluteNode(sub_node)
|
||||
CleanupVcproj(sub_node)
|
||||
|
||||
# Normalize the node, and remove all extraneous whitespaces.
|
||||
for sub_node in node.childNodes:
|
||||
if sub_node.nodeType == Node.TEXT_NODE:
|
||||
sub_node.data = sub_node.data.replace("\r", "")
|
||||
sub_node.data = sub_node.data.replace("\n", "")
|
||||
sub_node.data = sub_node.data.rstrip()
|
||||
|
||||
# Fix all the semicolon separated attributes to be sorted, and we also
|
||||
# remove the dups.
|
||||
if node.attributes:
|
||||
for (name, value) in node.attributes.items():
|
||||
sorted_list = sorted(value.split(";"))
|
||||
unique_list = []
|
||||
for i in sorted_list:
|
||||
if not unique_list.count(i):
|
||||
unique_list.append(i)
|
||||
node.setAttribute(name, ";".join(unique_list))
|
||||
if not value:
|
||||
node.removeAttribute(name)
|
||||
|
||||
if node.childNodes:
|
||||
node.normalize()
|
||||
|
||||
# For each node, take a copy, and remove it from the list.
|
||||
node_array = []
|
||||
while node.childNodes and node.childNodes[0]:
|
||||
# Take a copy of the node and remove it from the list.
|
||||
current = node.childNodes[0]
|
||||
node.removeChild(current)
|
||||
|
||||
# If the child is a filter, we want to append all its children
|
||||
# to this same list.
|
||||
if current.nodeName == "Filter":
|
||||
node_array.extend(FlattenFilter(current))
|
||||
else:
|
||||
node_array.append(current)
|
||||
|
||||
# Sort the list.
|
||||
node_array.sort(CmpNode())
|
||||
|
||||
# Insert the nodes in the correct order.
|
||||
for new_node in node_array:
|
||||
# But don't append empty tool node.
|
||||
if new_node.nodeName == "Tool":
|
||||
if new_node.attributes and new_node.attributes.length == 1:
|
||||
# This one was empty.
|
||||
continue
|
||||
if new_node.nodeName == "UserMacro":
|
||||
continue
|
||||
node.appendChild(new_node)
|
||||
|
||||
|
||||
def GetConfiguationNodes(vcproj):
|
||||
# TODO(nsylvain): Find a better way to navigate the xml.
|
||||
nodes = []
|
||||
for node in vcproj.childNodes:
|
||||
if node.nodeName == "Configurations":
|
||||
for sub_node in node.childNodes:
|
||||
if sub_node.nodeName == "Configuration":
|
||||
nodes.append(sub_node)
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def GetChildrenVsprops(filename):
|
||||
dom = parse(filename)
|
||||
if dom.documentElement.attributes:
|
||||
vsprops = dom.documentElement.getAttribute("InheritedPropertySheets")
|
||||
return FixFilenames(vsprops.split(";"), os.path.dirname(filename))
|
||||
return []
|
||||
|
||||
|
||||
def SeekToNode(node1, child2):
|
||||
# A text node does not have properties.
|
||||
if child2.nodeType == Node.TEXT_NODE:
|
||||
return None
|
||||
|
||||
# Get the name of the current node.
|
||||
current_name = child2.getAttribute("Name")
|
||||
if not current_name:
|
||||
# There is no name. We don't know how to merge.
|
||||
return None
|
||||
|
||||
# Look through all the nodes to find a match.
|
||||
for sub_node in node1.childNodes:
|
||||
if sub_node.nodeName == child2.nodeName:
|
||||
name = sub_node.getAttribute("Name")
|
||||
if name == current_name:
|
||||
return sub_node
|
||||
|
||||
# No match. We give up.
|
||||
return None
|
||||
|
||||
|
||||
def MergeAttributes(node1, node2):
|
||||
# No attributes to merge?
|
||||
if not node2.attributes:
|
||||
return
|
||||
|
||||
for (name, value2) in node2.attributes.items():
|
||||
# Don't merge the 'Name' attribute.
|
||||
if name == "Name":
|
||||
continue
|
||||
value1 = node1.getAttribute(name)
|
||||
if value1:
|
||||
# The attribute exist in the main node. If it's equal, we leave it
|
||||
# untouched, otherwise we concatenate it.
|
||||
if value1 != value2:
|
||||
node1.setAttribute(name, ";".join([value1, value2]))
|
||||
else:
|
||||
# The attribute does not exist in the main node. We append this one.
|
||||
node1.setAttribute(name, value2)
|
||||
|
||||
# If the attribute was a property sheet attributes, we remove it, since
|
||||
# they are useless.
|
||||
if name == "InheritedPropertySheets":
|
||||
node1.removeAttribute(name)
|
||||
|
||||
|
||||
def MergeProperties(node1, node2):
|
||||
MergeAttributes(node1, node2)
|
||||
for child2 in node2.childNodes:
|
||||
child1 = SeekToNode(node1, child2)
|
||||
if child1:
|
||||
MergeProperties(child1, child2)
|
||||
else:
|
||||
node1.appendChild(child2.cloneNode(True))
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""Main function of this vcproj prettifier."""
|
||||
global ARGUMENTS
|
||||
ARGUMENTS = argv
|
||||
|
||||
# check if we have exactly 1 parameter.
|
||||
if len(argv) < 2:
|
||||
print(
|
||||
'Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
|
||||
"[key2=value2]" % argv[0]
|
||||
)
|
||||
return 1
|
||||
|
||||
# Parse the keys
|
||||
for i in range(2, len(argv)):
|
||||
(key, value) = argv[i].split("=")
|
||||
REPLACEMENTS[key] = value
|
||||
|
||||
# Open the vcproj and parse the xml.
|
||||
dom = parse(argv[1])
|
||||
|
||||
# First thing we need to do is find the Configuration Node and merge them
|
||||
# with the vsprops they include.
|
||||
for configuration_node in GetConfiguationNodes(dom.documentElement):
|
||||
# Get the property sheets associated with this configuration.
|
||||
vsprops = configuration_node.getAttribute("InheritedPropertySheets")
|
||||
|
||||
# Fix the filenames to be absolute.
|
||||
vsprops_list = FixFilenames(
|
||||
vsprops.strip().split(";"), os.path.dirname(argv[1])
|
||||
)
|
||||
|
||||
# Extend the list of vsprops with all vsprops contained in the current
|
||||
# vsprops.
|
||||
for current_vsprops in vsprops_list:
|
||||
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
|
||||
|
||||
# Now that we have all the vsprops, we need to merge them.
|
||||
for current_vsprops in vsprops_list:
|
||||
MergeProperties(configuration_node, parse(current_vsprops).documentElement)
|
||||
|
||||
# Now that everything is merged, we need to cleanup the xml.
|
||||
CleanupVcproj(dom.documentElement)
|
||||
|
||||
# Finally, we use the prett xml function to print the vcproj back to the
|
||||
# user.
|
||||
# print dom.toprettyxml(newl="\n")
|
||||
PrettyPrintNode(dom.documentElement)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
Reference in New Issue
Block a user