forked from GitHubMirrors/silverbullet-icalendar
Compare commits
208 Commits
v0.1.0
...
7e848feeee
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e848feeee | ||
| c9a703975d | |||
| 842586c129 | |||
| c6532e5aca | |||
| 42fb8be61c | |||
| f8a6fbafda | |||
| dbc7ef29aa | |||
| 7edc0997b2 | |||
| c3fd3aee20 | |||
| e5b063269f | |||
| 31ca364a7c | |||
| e7c69aa3f7 | |||
| 255988e6f3 | |||
| ada9f6694c | |||
| cd0fdf5f98 | |||
|
|
523b49dd3a | ||
| 6fc4282536 | |||
| 0a3b5aeaba | |||
| 12c417c506 | |||
| ca727c83d2 | |||
| f922f59145 | |||
| be74e906d2 | |||
| 10d334c732 | |||
| 9499dbffb2 | |||
| 9ea6f7961e | |||
| 7cc59ff5f9 | |||
| 974c75f01b | |||
| dcaf4d36a5 | |||
| 150fe04410 | |||
| 6b621083b9 | |||
| 4237fcfd30 | |||
|
|
33ca122583 | ||
| 221ca30af3 | |||
| c5129120a4 | |||
| c046c183b7 | |||
| 658bf69e91 | |||
| 6a8791879f | |||
| 2dbd286498 | |||
| 580cfcd646 | |||
| 51a1e8a3e1 | |||
| 426d6d1dc6 | |||
| 6b12c26497 | |||
| 78c1747141 | |||
| 8a5dd0a4bd | |||
| a2239d28d5 | |||
| 028ae7d9f9 | |||
| 0b1ef83999 | |||
| 201afb3f67 | |||
| dcc8f841a3 | |||
| 2bc719eb6e | |||
| d4150ae024 | |||
| 3ecec2a64b | |||
| d1e0a7fee7 | |||
| 68c18e5d18 | |||
| 682ebdf013 | |||
| bea0a23a0e | |||
|
|
a5aac39361 | ||
| 335c859e65 | |||
| 0f04df1435 | |||
| e8d74e4622 | |||
| 657f4f2c3a | |||
| ea85b56c5c | |||
| df8a0e12c2 | |||
| 9b53b77929 | |||
| cafdaf7006 | |||
| f8640533be | |||
| cecaac6638 | |||
| 54bb7a8540 | |||
| 6eda06aca6 | |||
| b5c718f286 | |||
| 0bea770814 | |||
|
|
ac2d1971be | ||
| 102b05f534 | |||
| 5f9d062d09 | |||
| 3ec078f18e | |||
| 08b5019452 | |||
|
|
fe88bada15 | ||
| b2b109b923 | |||
| bc0afad261 | |||
| 9af3e436aa | |||
| 533c240c07 | |||
| 780e90b1f0 | |||
| ffaef28332 | |||
| 4128c046d0 | |||
| 10a6db5893 | |||
| 85691d1df5 | |||
| b8bf269de8 | |||
| b94ebd30a2 | |||
| 9b54e2d8a8 | |||
| 3c69a3567b | |||
| 53a3c0e5db | |||
| 606fca25a8 | |||
| 35229aa941 | |||
| 8e6d0d9f88 | |||
| f06799419c | |||
|
|
d1079bd302 | ||
| 46d8e5f3a0 | |||
| 48e6e945e1 | |||
| 1cd6fd490b | |||
| 598b097b13 | |||
| a11aecfd1b | |||
| b786978804 | |||
| 4030c3fef0 | |||
| 2f4499a068 | |||
| 1ea0e020f9 | |||
| 8ffdebb673 | |||
| 5f9afac9d8 | |||
| 03907f3789 | |||
| 17f6308585 | |||
| 7d690cdb2a | |||
| 98c3b64659 | |||
|
|
188cbf1254 | ||
| d8f6f0396f | |||
| 170de52e6b | |||
|
|
76be84b487 | ||
| 70e6a4ef82 | |||
| cd194de3f7 | |||
| ced95d2a7a | |||
| a7180995b0 | |||
| a11c6bd906 | |||
| 62d4ba1006 | |||
| d4b8fea8f9 | |||
| 4aa83f4a0b | |||
| bf90d8bda2 | |||
| ae3935f048 | |||
| 9f7f1b6d2a | |||
| 02e29e7da4 | |||
| 32933f9a34 | |||
|
|
b53fc0acf8 | ||
| 070b10843e | |||
|
|
8d66834a48 | ||
| 6480b56875 | |||
| d28c206862 | |||
|
|
2ea763e145 | ||
| 66f60bc9ae | |||
|
|
0e7e89091d | ||
| 81d5e8738e | |||
|
|
899ee62693 | ||
| 90f317be6e | |||
|
|
b50cded6c9 | ||
| 124a780b65 | |||
|
|
415cd7e215 | ||
| 9e54f0320e | |||
|
|
c422f0fae7 | ||
| ab0db17a47 | |||
|
|
8087031220 | ||
| 56b6e7d0bf | |||
|
|
2131bf4051 | ||
| cdfea5f3b2 | |||
|
|
3cc449a7c6 | ||
| 80cd15c1b5 | |||
|
|
3b348d8257 | ||
| adf638379d | |||
|
|
4b4aacbfd9 | ||
| 45ab0e8d95 | |||
|
|
e79349d7c0 | ||
| 86824991a6 | |||
| e3fcf743f8 | |||
|
|
da835727d4 | ||
| dbffe7fb24 | |||
| 5a7a7aaa18 | |||
|
|
7aba023818 | ||
| c382ab93ab | |||
| 4d9943ed72 | |||
| 10286625cc | |||
| 7031d15833 | |||
| ab303c694e | |||
| 31fdf3f42b | |||
| cb4f2c03c0 | |||
|
|
74177dc4b5 | ||
| f2fedb690c | |||
| 099374e878 | |||
|
|
479c096587 | ||
| 57cb085982 | |||
| f847ad53bc | |||
| 6a862a5563 | |||
| 3fa0bd553b | |||
| af12466721 | |||
| 17ba5aa701 | |||
| b8497c09d3 | |||
| 0a58c16705 | |||
| b59aabd115 | |||
| c39b869795 | |||
| e33be08320 | |||
| 1ce9011d60 | |||
| 56e11f748b | |||
| bb1b9a93ad | |||
| 6641f03519 | |||
| 44079d525a | |||
|
|
a09bfd805a | ||
| 19826c1678 | |||
| 651a1107d1 | |||
| daab3cf2f3 | |||
| 5ba0445eeb | |||
| 31fddc1e26 | |||
| 7ff19185e2 | |||
| 606340058e | |||
| 1107571bf1 | |||
| 1d2fd52715 | |||
|
|
deb30ab6b3 | ||
|
|
904c1b9d94 | ||
|
|
34bbe69569 | ||
|
|
38dd97c25c | ||
|
|
d3e4fc021b | ||
|
|
8a7c9700ee | ||
|
|
e12420aba3 | ||
|
|
4df5a1f8a8 | ||
|
|
e13e6e2bc2 |
40
.github/workflows/publish.yml
vendored
Normal file
40
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: Build SilverBullet Plug
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: v2.x
|
||||
|
||||
- name: Build Plug
|
||||
run: |
|
||||
deno task build -- --no-check
|
||||
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git add icalendar.plug.js
|
||||
if git diff --quiet --staged; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
git commit -m "Build and update icalendar.plug.js [skip ci]"
|
||||
git push origin main
|
||||
fi
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
deno.lock
|
||||
*.plug.js
|
||||
test_space
|
||||
.env
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"deno.enable": true,
|
||||
"editor.formatOnSave": true,
|
||||
"deno.config": "deno.jsonc"
|
||||
"deno.config": "deno.json"
|
||||
}
|
||||
|
||||
7
LICENSE
Normal file
7
LICENSE
Normal file
@@ -0,0 +1,7 @@
|
||||
Copyright 2025 Marek S. Lukasiewicz
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
12
Makefile
Normal file
12
Makefile
Normal file
@@ -0,0 +1,12 @@
|
||||
# Check version consistency
|
||||
check-versions:
|
||||
./check_versions.sh
|
||||
|
||||
# Build the plug using a Docker container with Deno
|
||||
build: check-versions
|
||||
docker run --rm -v /home/sstent/Projects/silverbullet-icalendar:/app -w /app denoland/deno:latest task build
|
||||
|
||||
# Helper to build and copy to a local test space (if needed)
|
||||
deploy-test: build
|
||||
mkdir -p test_space/_plug
|
||||
cp icalendar.plug.js test_space/_plug/
|
||||
8
PLUG.md
Normal file
8
PLUG.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
name: Library/sstent/icalendar
|
||||
version: "0.3.32"
|
||||
tags: meta/library
|
||||
files:
|
||||
- icalendar.plug.js
|
||||
---
|
||||
iCalendar sync plug for SilverBullet.
|
||||
65
README.md
65
README.md
@@ -3,19 +3,21 @@
|
||||
`silverbullet-icalendar` is a [Plug](https://silverbullet.md/Plugs) for [SilverBullet](https://silverbullet.md/) which I made for my girlfriend.
|
||||
It reads external [iCalendar](https://en.wikipedia.org/wiki/ICalendar) data, also known as iCal and `.ics` format, used in CalDAV protocol.
|
||||
|
||||
**Note**: This version (0.2.0+) is compatible with **SilverBullet v2 only**. For SilverBullet v1, use version 0.1.0.
|
||||
|
||||
## Installation
|
||||
|
||||
Run the {[Plugs: Add]} command in SilverBullet and add paste this URI into the dialog box:
|
||||
Run the {[Library: Install]} command and paste the following URL:
|
||||
`https://github.com/Maarrk/silverbullet-icalendar/blob/main/PLUG.md`
|
||||
|
||||
```
|
||||
ghr:Maarrk/silverbullet-icalendar
|
||||
```
|
||||
Alternatively, you can use the older way with the {[Plugs: Add]} command:
|
||||
`ghr:Maarrk/silverbullet-icalendar`
|
||||
|
||||
Then run the {[Plugs: Update]} command and off you go!
|
||||
|
||||
### Configuration
|
||||
|
||||
This plug can be configured with [Space Config](https://silverbullet.md/Space%20Config), these are the default values and their usage:
|
||||
This plug is configured with [Space Config](https://silverbullet.md/Space%20Config), short example:
|
||||
|
||||
```yaml
|
||||
icalendar:
|
||||
@@ -37,57 +39,72 @@ Instructions to get the source URL for some calendar services:
|
||||
- Calendar settings (pencil icon to the right of the name)
|
||||
- Settings and Sharing, scroll down to Integrate calendar
|
||||
- Copy the link for Secret address in iCal format
|
||||
|
||||
|
||||

|
||||
|
||||
## Usage
|
||||
|
||||
The plug provides the query source `ical-event`, which corresponds to `VEVENT` object
|
||||
After configuration, run the `{[iCalendar: Sync]}` command to synchronize calendar events. The plug will cache the results for 6 hours by default (configurable via `cacheDuration` in config).
|
||||
|
||||
To bypass the cache and force an immediate sync, use the `{[iCalendar: Force Sync]}` command.
|
||||
|
||||
To completely clear all indexed events and cache (useful for troubleshooting), use the `{[iCalendar: Clear All Events]}` command.
|
||||
|
||||
Events are indexed with the tag `ical-event` and can be queried using Lua Integrated Query (LIQ).
|
||||
|
||||
### Examples
|
||||
|
||||
Select events that start on a given date
|
||||
Select events that start on a given date:
|
||||
|
||||
~~~
|
||||
```query
|
||||
ical-event
|
||||
where start =~ /^2024-01-04/
|
||||
select summary, description
|
||||
```md
|
||||
${query[[
|
||||
from index.tag "ical-event"
|
||||
where start:startsWith "2024-01-04"
|
||||
select {summary=summary, description=description}
|
||||
]]}
|
||||
```
|
||||
~~~
|
||||
|
||||
Get the next 5 upcoming events:
|
||||
```md
|
||||
${query[[
|
||||
from index.tag "ical-event"
|
||||
where start > os.date("%Y-%m-%d")
|
||||
order by start
|
||||
limit 5
|
||||
]]}
|
||||
```
|
||||
~~~
|
||||
|
||||
## Roadmap
|
||||
|
||||
- Cache the calendar according to `REFRESH-INTERVAL` or `X-PUBLISHED-TTL`, command for manual update
|
||||
- More query sources:
|
||||
- Cache the calendar according to `REFRESH-INTERVAL` or `X-PUBLISHED-TTL`
|
||||
- More indexed object types:
|
||||
- `ical-todo` for `VTODO` components
|
||||
- `ical-calendar` showing information about configured calendars
|
||||
- Describe the properties of query results
|
||||
- Support `file://` URL scheme (use an external script or filesystem instead of authentication on CalDAV)
|
||||
|
||||
## Contributing
|
||||
|
||||
Pull requests with short instructions for various calendar services are welcome.
|
||||
If you find bugs, report them on the [issue tracker on GitHub](https://github.com/Maarrk/silverbullet-icalendar/issues).
|
||||
|
||||
### Building from source
|
||||
|
||||
To build this plug, make sure you have [SilverBullet installed](https://silverbullet.md/Install). Then, build the plug with:
|
||||
To build this plug, you need [Deno](https://deno.land/) installed. Then, build the plug with:
|
||||
|
||||
```shell
|
||||
deno task build
|
||||
```
|
||||
|
||||
Or to watch for changes and rebuild automatically
|
||||
Or to watch for changes and rebuild automatically:
|
||||
|
||||
```shell
|
||||
deno task watch
|
||||
```
|
||||
|
||||
Then, copy the resulting `.plug.js` file into your space's `_plug` folder. Or build and copy in one command:
|
||||
|
||||
```shell
|
||||
deno task build && cp *.plug.js /my/space/_plug/
|
||||
```
|
||||
|
||||
SilverBullet will automatically sync and load the new version of the plug (or speed up this process by running the {[Sync: Now]} command).
|
||||
The compiled plug will be written to `icalendar.plug.js`. This file is tracked by Git in this repository to allow for easy installation via the `PLUG.md` file.
|
||||
|
||||
## License
|
||||
|
||||
|
||||
152393
SilverBullet_digest.md
Normal file
152393
SilverBullet_digest.md
Normal file
File diff suppressed because one or more lines are too long
19
check_versions.sh
Executable file
19
check_versions.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Extract versions
|
||||
TS_VERSION=$(grep "const VERSION =" icalendar.ts | cut -d'"' -f2)
|
||||
YAML_VERSION=$(grep "version:" icalendar.plug.yaml | head -n 1 | awk '{print $2}')
|
||||
PLUG_MD_VERSION=$(grep "version:" PLUG.md | head -n 1 | awk '{print $2}')
|
||||
|
||||
echo "Checking versions..."
|
||||
echo "icalendar.ts: $TS_VERSION"
|
||||
echo "icalendar.plug.yaml: $YAML_VERSION"
|
||||
echo "PLUG.md: $PLUG_MD_VERSION"
|
||||
|
||||
if [ "$TS_VERSION" == "$YAML_VERSION" ] && [ "$YAML_VERSION" == "$PLUG_MD_VERSION" ]; then
|
||||
echo "✅ All versions match."
|
||||
exit 0
|
||||
else
|
||||
echo "❌ Version mismatch detected!"
|
||||
exit 1
|
||||
fi
|
||||
@@ -0,0 +1,5 @@
|
||||
# Track fix_recurring_visibility_20260219 Context
|
||||
|
||||
- [Specification](./spec.md)
|
||||
- [Implementation Plan](./plan.md)
|
||||
- [Metadata](./metadata.json)
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"track_id": "fix_recurring_visibility_20260219",
|
||||
"type": "bug",
|
||||
"status": "new",
|
||||
"created_at": "2026-02-19T00:00:00Z",
|
||||
"updated_at": "2026-02-19T00:00:00Z",
|
||||
"description": "Fix issue where recurring meetings are not showing up."
|
||||
}
|
||||
24
conductor/archive/fix_recurring_visibility_20260219/plan.md
Normal file
24
conductor/archive/fix_recurring_visibility_20260219/plan.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Implementation Plan - Fix Recurring Meetings Visibility
|
||||
|
||||
## Phase 1: Investigation & Reproduction [checkpoint: 8137d63]
|
||||
- [x] Task: Create validation test case 6122599
|
||||
- [x] Add a test in `icalendar_test.ts` with a valid weekly recurring event starting in the past.
|
||||
- [x] Assert that it returns multiple occurrences within the 30-day window.
|
||||
- [x] Run the test to see if it fails (confirming the bug).
|
||||
- [x] Task: Investigate Object RRULE 6122599
|
||||
- [x] The logs show `Invalid rrule type (object)`. This means `ts-ics` is parsing RRULE into an object, not a string.
|
||||
- [x] Create a test case where `rrule` is an object (mocking `ts-ics` output).
|
||||
- [x] Verify that it returns only 1 event (the bug).
|
||||
- [x] Task: Conductor - User Manual Verification 'Investigation & Reproduction' (Protocol in workflow.md)
|
||||
|
||||
## Phase 2: Fix Logic [checkpoint: a4acfa1]
|
||||
- [x] Task: Support Object RRULE in `expandRecurrences` f7f6028
|
||||
- [x] Modify `expandRecurrences` to handle `rrule` as an object.
|
||||
- [x] It likely needs to be converted back to a string or used directly if `rrule` library supports it.
|
||||
- [x] Run the new test case to confirm the fix.
|
||||
- [x] Task: Conductor - User Manual Verification 'Fix Logic' (Protocol in workflow.md)
|
||||
|
||||
## Phase 3: Cleanup & Verification [checkpoint: 9d07e7a]
|
||||
- [x] Task: Full Regression Check f1bafb6
|
||||
- [x] Run all tests in `icalendar_test.ts`.
|
||||
- [x] Task: Conductor - User Manual Verification 'Cleanup & Verification' (Protocol in workflow.md)
|
||||
18
conductor/archive/fix_recurring_visibility_20260219/spec.md
Normal file
18
conductor/archive/fix_recurring_visibility_20260219/spec.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# Specification: Fix Recurring Meetings Visibility
|
||||
|
||||
## Overview
|
||||
Users report that all recurring meetings are missing from calendar views without any error messages. This suggests an issue with the expansion or indexing logic for recurring events, possibly introduced by recent changes.
|
||||
|
||||
## Functional Requirements
|
||||
- **Visibility:** Recurring events must appear in the calendar views.
|
||||
- **Expansion:** The `expandRecurrences` function must correctly expand valid RRULE strings into occurrences within the specified window.
|
||||
|
||||
## Implementation Steps
|
||||
1. **Investigation:** Create a test case with a *valid* recurring event (unlike the previous invalid one) and verify if `expandRecurrences` produces the expected occurrences.
|
||||
2. **Debugging:** Inspect the `filter` logic in `expandRecurrences` (specifically the `filterStart` and `windowEnd` logic).
|
||||
3. **Fix:** Adjust the logic to ensure valid occurrences are returned.
|
||||
4. **Verify:** Confirm the fix with the new test case.
|
||||
|
||||
## Acceptance Criteria
|
||||
- [ ] A new test case with a valid recurring event passes and returns the expected number of occurrences.
|
||||
- [ ] Recurring events are visible in the calendar view (manual verification).
|
||||
5
conductor/archive/fix_version_mismatch_20260219/index.md
Normal file
5
conductor/archive/fix_version_mismatch_20260219/index.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Track fix_version_mismatch_20260219 Context
|
||||
|
||||
- [Specification](./spec.md)
|
||||
- [Implementation Plan](./plan.md)
|
||||
- [Metadata](./metadata.json)
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"track_id": "fix_version_mismatch_20260219",
|
||||
"type": "chore",
|
||||
"status": "new",
|
||||
"created_at": "2026-02-19T00:00:00Z",
|
||||
"updated_at": "2026-02-19T00:00:00Z",
|
||||
"description": "Fix version inconsistency in PLUG.md and icalendar.plug.yaml and investigate plug-manager error."
|
||||
}
|
||||
9
conductor/archive/fix_version_mismatch_20260219/plan.md
Normal file
9
conductor/archive/fix_version_mismatch_20260219/plan.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Implementation Plan - Fix Version Mismatch
|
||||
|
||||
## Phase 1: Fix & Process Update [checkpoint: 944ce96]
|
||||
- [x] Task: Run version sync script e17bf4d
|
||||
- [x] Execute `deno task sync-version` (via Docker) to automatically update `icalendar.ts`, `icalendar.plug.yaml`, and `PLUG.md` to match `deno.json`.
|
||||
- [x] Task: Update Workflow Protocol 9bcfaa7
|
||||
- [x] Modify `conductor/workflow.md` -> "Track Completion Protocol" -> "Version Bump" section.
|
||||
- [x] Replace manual file list with instruction: "Update `deno.json` version and run `deno task sync-version`".
|
||||
- [x] Task: Conductor - User Manual Verification 'Fix & Process Update' (Protocol in workflow.md)
|
||||
19
conductor/archive/fix_version_mismatch_20260219/spec.md
Normal file
19
conductor/archive/fix_version_mismatch_20260219/spec.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Specification: Fix Version Inconsistency and Plug Metadata
|
||||
|
||||
## Overview
|
||||
The recent version bump to `0.3.29` was incomplete. It updated `icalendar.ts` and `deno.json` but missed `PLUG.md` and `icalendar.plug.yaml`. This mismatch can cause update issues. Additionally, a user reported a `plug-manager` error stating "No plug name provided" when fetching `PLUG.md`, which needs investigation.
|
||||
|
||||
## Functional Requirements
|
||||
- **Version Consistency:** Ensure `icalendar.ts`, `deno.json`, `PLUG.md`, and `icalendar.plug.yaml` all reflect version `0.3.29`.
|
||||
- **Metadata Verification:** Verify `PLUG.md` contains the correct YAML frontmatter expected by SilverBullet's `plug-manager`.
|
||||
- **Process Improvement:** Update the `conductor/workflow.md` to explicitly list all files that must be updated during a version bump.
|
||||
|
||||
## Implementation Steps
|
||||
1. **Update Versions:** Update `version` in `PLUG.md` and `icalendar.plug.yaml` to `0.3.29`.
|
||||
2. **Update Workflow:** Modify `conductor/workflow.md` -> "Track Completion Protocol" -> "Version Bump" section to list all required files (`PLUG.md`, `icalendar.plug.yaml`, `icalendar.ts`, `deno.json`).
|
||||
3. **Commit & Push:** Commit these fixes and push.
|
||||
|
||||
## Acceptance Criteria
|
||||
- [ ] `PLUG.md` version is `0.3.29`.
|
||||
- [ ] `icalendar.plug.yaml` version is `0.3.29`.
|
||||
- [ ] `conductor/workflow.md` lists all 4 files in the Version Bump protocol.
|
||||
23
conductor/code_styleguides/general.md
Normal file
23
conductor/code_styleguides/general.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# General Code Style Principles
|
||||
|
||||
This document outlines general coding principles that apply across all languages and frameworks used in this project.
|
||||
|
||||
## Readability
|
||||
- Code should be easy to read and understand by humans.
|
||||
- Avoid overly clever or obscure constructs.
|
||||
|
||||
## Consistency
|
||||
- Follow existing patterns in the codebase.
|
||||
- Maintain consistent formatting, naming, and structure.
|
||||
|
||||
## Simplicity
|
||||
- Prefer simple solutions over complex ones.
|
||||
- Break down complex problems into smaller, manageable parts.
|
||||
|
||||
## Maintainability
|
||||
- Write code that is easy to modify and extend.
|
||||
- Minimize dependencies and coupling.
|
||||
|
||||
## Documentation
|
||||
- Document *why* something is done, not just *what*.
|
||||
- Keep documentation up-to-date with code changes.
|
||||
43
conductor/code_styleguides/typescript.md
Normal file
43
conductor/code_styleguides/typescript.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Google TypeScript Style Guide Summary
|
||||
|
||||
This document summarizes key rules and best practices from the Google TypeScript Style Guide, which is enforced by the `gts` tool.
|
||||
|
||||
## 1. Language Features
|
||||
- **Variable Declarations:** Always use `const` or `let`. **`var` is forbidden.** Use `const` by default.
|
||||
- **Modules:** Use ES6 modules (`import`/`export`). **Do not use `namespace`.**
|
||||
- **Exports:** Use named exports (`export {MyClass};`). **Do not use default exports.**
|
||||
- **Classes:**
|
||||
- **Do not use `#private` fields.** Use TypeScript's `private` visibility modifier.
|
||||
- Mark properties never reassigned outside the constructor with `readonly`.
|
||||
- **Never use the `public` modifier** (it's the default). Restrict visibility with `private` or `protected` where possible.
|
||||
- **Functions:** Prefer function declarations for named functions. Use arrow functions for anonymous functions/callbacks.
|
||||
- **String Literals:** Use single quotes (`'`). Use template literals (`` ` ``) for interpolation and multi-line strings.
|
||||
- **Equality Checks:** Always use triple equals (`===`) and not equals (`!==`).
|
||||
- **Type Assertions:** **Avoid type assertions (`x as SomeType`) and non-nullability assertions (`y!`)**. If you must use them, provide a clear justification.
|
||||
|
||||
## 2. Disallowed Features
|
||||
- **`any` Type:** **Avoid `any`**. Prefer `unknown` or a more specific type.
|
||||
- **Wrapper Objects:** Do not instantiate `String`, `Boolean`, or `Number` wrapper classes.
|
||||
- **Automatic Semicolon Insertion (ASI):** Do not rely on it. **Explicitly end all statements with a semicolon.**
|
||||
- **`const enum`:** Do not use `const enum`. Use plain `enum` instead.
|
||||
- **`eval()` and `Function(...string)`:** Forbidden.
|
||||
|
||||
## 3. Naming
|
||||
- **`UpperCamelCase`:** For classes, interfaces, types, enums, and decorators.
|
||||
- **`lowerCamelCase`:** For variables, parameters, functions, methods, and properties.
|
||||
- **`CONSTANT_CASE`:** For global constant values, including enum values.
|
||||
- **`_` Prefix/Suffix:** **Do not use `_` as a prefix or suffix** for identifiers, including for private properties.
|
||||
|
||||
## 4. Type System
|
||||
- **Type Inference:** Rely on type inference for simple, obvious types. Be explicit for complex types.
|
||||
- **`undefined` and `null`:** Both are supported. Be consistent within your project.
|
||||
- **Optional vs. `|undefined`:** Prefer optional parameters and fields (`?`) over adding `|undefined` to the type.
|
||||
- **`Array<T>` Type:** Use `T[]` for simple types. Use `Array<T>` for more complex union types (e.g., `Array<string | number>`).
|
||||
- **`{}` Type:** **Do not use `{}`**. Prefer `unknown`, `Record<string, unknown>`, or `object`.
|
||||
|
||||
## 5. Comments and Documentation
|
||||
- **JSDoc:** Use `/** JSDoc */` for documentation, `//` for implementation comments.
|
||||
- **Redundancy:** **Do not declare types in `@param` or `@return` blocks** (e.g., `/** @param {string} user */`). This is redundant in TypeScript.
|
||||
- **Add Information:** Comments must add information, not just restate the code.
|
||||
|
||||
*Source: [Google TypeScript Style Guide](https://google.github.io/styleguide/tsguide.html)*
|
||||
14
conductor/index.md
Normal file
14
conductor/index.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Project Context
|
||||
|
||||
## Definition
|
||||
- [Product Definition](./product.md)
|
||||
- [Product Guidelines](./product-guidelines.md)
|
||||
- [Tech Stack](./tech-stack.md)
|
||||
|
||||
## Workflow
|
||||
- [Workflow](./workflow.md)
|
||||
- [Code Style Guides](./code_styleguides/)
|
||||
|
||||
## Management
|
||||
- [Tracks Registry](./tracks.md)
|
||||
- [Tracks Directory](./tracks/)
|
||||
17
conductor/product-guidelines.md
Normal file
17
conductor/product-guidelines.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Product Guidelines - SilverBullet iCalendar Plug
|
||||
|
||||
## Documentation and Communication Style
|
||||
- **Technical and Concise:** All documentation, configuration examples, and user-facing messages should be accurate, brief, and focused on providing high value to the user. Avoid unnecessary fluff or conversational filler.
|
||||
- **Example-Driven:** Prioritize clear, copy-pasteable configuration snippets and query examples to help users get started quickly.
|
||||
|
||||
## Visual Identity and User Interface
|
||||
- **Native SilverBullet Integration:** The plug should feel like a core part of the SilverBullet experience. Commands, notifications, and any future UI elements must strictly adhere to SilverBullet's design patterns and aesthetic.
|
||||
- **Informative and Actionable Feedback:**
|
||||
- Notifications should provide immediate clarity on the outcome of actions (e.g., "Synced 194 events", "Sync failed: HTTP 404").
|
||||
- Error messages should be descriptive enough to aid in troubleshooting (e.g., specifying which source failed).
|
||||
- **Subtle Consistency:** Use consistent naming conventions for commands (`iCalendar: Sync`, `iCalendar: Force Sync`, etc.) to maintain a professional and organized command palette.
|
||||
|
||||
## Code and Maintenance Guidelines (Inferred)
|
||||
- **Robust Error Handling:** Always catch and log errors during fetch and parse operations to prevent the entire sync process from crashing.
|
||||
- **Performance First:** Efficiently process large `.ics` files and avoid redundant indexing operations.
|
||||
- **Version Alignment:** Ensure the version number is synchronized across `deno.json`, `icalendar.plug.yaml`, `PLUG.md`, and the TypeScript source code.
|
||||
23
conductor/product.md
Normal file
23
conductor/product.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# Initial Concept
|
||||
`silverbullet-icalendar` is a Plug for SilverBullet that reads external iCalendar data (.ics format) and integrates it into the SilverBullet environment.
|
||||
|
||||
# Product Definition - SilverBullet iCalendar Plug
|
||||
|
||||
## Vision
|
||||
A reliable and seamless bridge between external iCalendar services and the SilverBullet knowledge management environment, enabling users to consolidate their scheduling data within their personal workspace.
|
||||
|
||||
## Target Audience
|
||||
- SilverBullet users who need to integrate external calendars (Google, Nextcloud, Outlook, etc.) directly into their notes and queries.
|
||||
|
||||
## Core Goals & Features
|
||||
- **Reliable Multi-Source Synchronization:** Support for fetching and parsing `.ics` data from various providers like Google Calendar and Nextcloud.
|
||||
- **SilverBullet Index Integration:** Seamlessly index calendar events using the `ical-event` tag, making them instantly queryable using SilverBullet's Lua Integrated Query (LIQ).
|
||||
- **Robust Timezone Handling:** Accurate conversion and shifting of event times to ensure consistency regardless of the source provider's configuration.
|
||||
- **Cache Management:** Efficient local caching of calendar data with user-configurable durations and force-sync capabilities.
|
||||
- **Clean Indexing:** Sanitization of complex iCalendar objects into flat, query-friendly metadata.
|
||||
|
||||
## Technology Stack (Inferred)
|
||||
- **Language:** TypeScript
|
||||
- **Runtime:** Deno
|
||||
- **Platform:** SilverBullet Plug API
|
||||
- **Parsing Library:** `ts-ics`
|
||||
1
conductor/setup_state.json
Normal file
1
conductor/setup_state.json
Normal file
@@ -0,0 +1 @@
|
||||
{"last_successful_step": "2.5_workflow"}
|
||||
19
conductor/tech-stack.md
Normal file
19
conductor/tech-stack.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Technology Stack - SilverBullet iCalendar Plug
|
||||
|
||||
## Core Runtime & Language
|
||||
- **Language:** [TypeScript](https://www.typescriptlang.org/) - Provides type safety and modern JavaScript features for robust plug development.
|
||||
- **Runtime:** [Deno](https://deno.com/) - A secure-by-default runtime for JavaScript and TypeScript, used for building and running the plug's development tasks.
|
||||
|
||||
## Platform & API
|
||||
- **Platform:** [SilverBullet Plug API](https://silverbullet.md/Plugs) - The official API for extending SilverBullet functionality.
|
||||
- **Dependency Management:** [JSR](https://jsr.io/) and [ESM.sh](https://esm.sh/) - Used for importing the SilverBullet syscalls and external libraries like `ts-ics`.
|
||||
|
||||
## Libraries
|
||||
- **iCalendar Parsing:** [`ts-ics`](https://www.npmjs.com/package/ts-ics) (v2.4.0) - A library for parsing iCalendar data into structured JavaScript objects.
|
||||
- **Recurrence Expansion:** [`rrule`](https://www.npmjs.com/package/rrule) (v2.8.1) - A library for expanding recurring event rules (RRULE) into individual occurrences.
|
||||
|
||||
## Build & Development Tools
|
||||
- **Task Orchestration:** Deno Tasks (defined in `deno.json`) - Handles version synchronization and plug compilation.
|
||||
- **Compiler:** `plug-compile.js` - The standard SilverBullet utility for bundling the TypeScript source and manifest into a `.plug.js` file.
|
||||
- **Version Control:** Git - For source code management and integration with Gitea Actions.
|
||||
- **CI/CD:** Gitea Actions - Automates the build and deployment process upon pushes to the repository.
|
||||
23
conductor/tracks.md
Normal file
23
conductor/tracks.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# Project Tracks
|
||||
|
||||
This file tracks all major tracks for the project. Each track has its own detailed plan in its respective folder.
|
||||
|
||||
---
|
||||
|
||||
- [x] **Track: Upgrade the SilverBullet iCalendar plug to use DST-aware timezone resolution and add recurring event support using rrule.**
|
||||
*Link: [./tracks/timezone_rrule_20260218/](./tracks/timezone_rrule_20260218/)*
|
||||
|
||||
---
|
||||
|
||||
- [x] **Track: Fix TypeError: r.replace is not a function in icalendar.ts**
|
||||
*Link: [./tracks/fix_rrule_type_error_20260219/](./tracks/fix_rrule_type_error_20260219/)*
|
||||
|
||||
---
|
||||
|
||||
- [x] **Track: Fix RRULE object expansion error by correctly mapping object keys to standard iCalendar RRULE properties.**
|
||||
*Link: [./tracks/fix_rrule_object_mapping_20260219/](./tracks/fix_rrule_object_mapping_20260219/)*
|
||||
|
||||
---
|
||||
|
||||
- [x] **Track: Fix RRULE UNTIL object conversion error: Invalid UNTIL value: [object Object]**
|
||||
*Link: [./tracks/fix_rrule_until_conversion_20260219/](./tracks/fix_rrule_until_conversion_20260219/)*
|
||||
@@ -0,0 +1,5 @@
|
||||
# Track fix_rrule_object_mapping_20260219 Context
|
||||
|
||||
- [Specification](./spec.md)
|
||||
- [Implementation Plan](./plan.md)
|
||||
- [Metadata](./metadata.json)
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"track_id": "fix_rrule_object_mapping_20260219",
|
||||
"type": "bug",
|
||||
"status": "new",
|
||||
"created_at": "2026-02-19T00:00:00Z",
|
||||
"updated_at": "2026-02-19T00:00:00Z",
|
||||
"description": "Fix RRULE object expansion error by correctly mapping object keys to standard iCalendar RRULE properties."
|
||||
}
|
||||
19
conductor/tracks/fix_rrule_object_mapping_20260219/plan.md
Normal file
19
conductor/tracks/fix_rrule_object_mapping_20260219/plan.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Implementation Plan - Fix RRULE Object Mapping
|
||||
|
||||
## Phase 1: Reproduction [checkpoint: 2fbb260]
|
||||
- [x] Task: Reproduce `Unknown RRULE property` error 0b67cbb
|
||||
- [x] Modify the test case in `icalendar_test.ts` to use `frequency` instead of `freq` in the mock object.
|
||||
- [x] Run the test and confirm it fails with the expected error.
|
||||
- [x] Task: Conductor - User Manual Verification 'Reproduction' (Protocol in workflow.md)
|
||||
|
||||
## Phase 2: Fix Implementation [checkpoint: fe28a5c]
|
||||
- [x] Task: Implement mapping logic in `icalendar.ts` b8bc6cc
|
||||
- [x] Create a mapping object for verbose keys to iCal keys.
|
||||
- [x] Update `expandRecurrences` to use this mapping.
|
||||
- [x] Run the test to confirm it passes.
|
||||
- [x] Task: Conductor - User Manual Verification 'Fix Implementation' (Protocol in workflow.md)
|
||||
|
||||
## Phase 3: Verification & Cleanup [checkpoint: 793326a]
|
||||
- [x] Task: Full Regression Check 7f9a618
|
||||
- [x] Run all tests in `icalendar_test.ts`.
|
||||
- [x] Task: Conductor - User Manual Verification 'Verification & Cleanup' (Protocol in workflow.md)
|
||||
32
conductor/tracks/fix_rrule_object_mapping_20260219/spec.md
Normal file
32
conductor/tracks/fix_rrule_object_mapping_20260219/spec.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# Specification: Fix RRULE Object Expansion Error
|
||||
|
||||
## Overview
|
||||
The previous fix for handling object-type `rrule` properties (returned by `ts-ics`) introduced a regression. The conversion logic used uppercase full names (e.g., `FREQUENCY`), but the `rrule` library's `parseString` method expects standard iCalendar shortened keys (e.g., `FREQ`). This results in an `Error: Unknown RRULE property 'FREQUENCY'`.
|
||||
|
||||
## Functional Requirements
|
||||
- **Correct Key Mapping:** The logic that converts an `rrule` object back to a string must use standard iCalendar RRULE property keys.
|
||||
- **Mapping Table:**
|
||||
- `frequency` -> `FREQ`
|
||||
- `until` -> `UNTIL`
|
||||
- `count` -> `COUNT`
|
||||
- `interval` -> `INTERVAL`
|
||||
- `bysecond` -> `BYSECOND`
|
||||
- `byminute` -> `BYMINUTE`
|
||||
- `byhour` -> `BYHOUR`
|
||||
- `byday` -> `BYDAY`
|
||||
- `bymonthday` -> `BYMONTHDAY`
|
||||
- `byyearday` -> `BYYEARDAY`
|
||||
- `byweekno` -> `BYWEEKNO`
|
||||
- `bymonth` -> `BYMONTH`
|
||||
- `bysetpos` -> `BYSETPOS`
|
||||
- `wkst` -> `WKST`
|
||||
- **Case Insensitivity:** The mapping should be case-insensitive for the input object keys.
|
||||
|
||||
## Implementation Steps
|
||||
1. **Reproduce:** Update the existing `expandRecurrences - object rrule` test case to use the key `frequency` and verify it fails with the reported error.
|
||||
2. **Fix:** Implement a mapping function in `icalendar.ts` to translate object keys to standard RRULE keys before stringifying.
|
||||
3. **Verify:** Run the test case to confirm it now passes.
|
||||
|
||||
## Acceptance Criteria
|
||||
- [ ] Test `expandRecurrences - object rrule` passes with an object using `frequency` key.
|
||||
- [ ] No "Unknown RRULE property" errors are logged for valid RRULE objects.
|
||||
5
conductor/tracks/fix_rrule_type_error_20260219/index.md
Normal file
5
conductor/tracks/fix_rrule_type_error_20260219/index.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Track fix_rrule_type_error_20260219 Context
|
||||
|
||||
- [Specification](./spec.md)
|
||||
- [Implementation Plan](./plan.md)
|
||||
- [Metadata](./metadata.json)
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"track_id": "fix_rrule_type_error_20260219",
|
||||
"type": "bug",
|
||||
"status": "new",
|
||||
"created_at": "2026-02-19T00:00:00Z",
|
||||
"updated_at": "2026-02-19T00:00:00Z",
|
||||
"description": "Fix TypeError: r.replace is not a function in icalendar.ts"
|
||||
}
|
||||
20
conductor/tracks/fix_rrule_type_error_20260219/plan.md
Normal file
20
conductor/tracks/fix_rrule_type_error_20260219/plan.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Implementation Plan - Fix `r.replace is not a function`
|
||||
|
||||
## Phase 1: Reproduction & Test Setup [checkpoint: df0ddaf]
|
||||
- [x] Task: Create a reproduction test case 1a36c64
|
||||
- [x] Create a new test case in `icalendar_test.ts` that mocks an event with a non-string `rrule` property (e.g., an object or number).
|
||||
- [x] Run the test to confirm it fails with the expected `TypeError`.
|
||||
- [x] Task: Conductor - User Manual Verification 'Reproduction & Test Setup' (Protocol in workflow.md)
|
||||
|
||||
## Phase 2: Implementation [checkpoint: 1c48f78]
|
||||
- [x] Task: Implement defensive check in `icalendar.ts` d7401dd
|
||||
- [x] Modify `expandRecurrences` function to check if `rruleStr` is a string before calling `.replace()`.
|
||||
- [x] If `rruleStr` is not a string, log a warning and return the original event (non-recurring fallback).
|
||||
- [x] Run the reproduction test again to confirm it passes.
|
||||
- [x] Task: Conductor - User Manual Verification 'Implementation' (Protocol in workflow.md)
|
||||
|
||||
## Phase 3: Verification & Cleanup
|
||||
- [x] Task: Verify fix and check for regressions
|
||||
- [x] Run all tests in `icalendar_test.ts` to ensure existing functionality is preserved.
|
||||
- [x] (Optional) Verify with actual calendar sync if possible/safe.
|
||||
- [x] Task: Conductor - User Manual Verification 'Verification & Cleanup' (Protocol in workflow.md)
|
||||
25
conductor/tracks/fix_rrule_type_error_20260219/spec.md
Normal file
25
conductor/tracks/fix_rrule_type_error_20260219/spec.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Specification: Fix `r.replace is not a function` in `expandRecurrences`
|
||||
|
||||
## Overview
|
||||
This track addresses a `TypeError: r.replace is not a function` error occurring in `icalendar.ts` during calendar synchronization. The error suggests that the `rrule` property of an event is not a string when it reaches the `expandRecurrences` function, causing the subsequent `.replace()` call to fail. This is likely due to the `ts-ics` parser returning a non-string value (e.g., an object or `undefined`) for the `rrule` property in certain scenarios (specifically observed with Outlook calendars).
|
||||
|
||||
## Functional Requirements
|
||||
- **Defensive RRULE Handling:** The `expandRecurrences` function in `icalendar.ts` must safely handle cases where `rrule` (or `recurrenceRule`) is not a string.
|
||||
- **Graceful Fallback:** If `rrule` is not a string:
|
||||
- It should be ignored/logged if it cannot be interpreted as a valid RRULE string, preventing the crash.
|
||||
- The event should still be processed (treated as a non-recurring event if the rule is invalid), rather than crashing the entire sync for that event.
|
||||
|
||||
## Non-Functional Requirements
|
||||
- **Stability:** The plug should not crash or throw unhandled exceptions during sync due to malformed or unexpected property types in the source ICS data.
|
||||
- **Logging:** Maintain existing error logging but ensure the error message is descriptive (e.g., "Invalid RRULE type: object").
|
||||
|
||||
## Implementation Steps
|
||||
1. **Reproduce Issue:** Create a unit test in `icalendar_test.ts` that mocks an `icsEvent` with a non-string `rrule` property (e.g., an object or number) and calls `expandRecurrences`.
|
||||
2. **Implement Fix:** Modify `icalendar.ts` to check the type of `rruleStr` before calling `.replace()`.
|
||||
- If it's not a string, attempt to convert it or return the original event (as if no recurrence rule exists) with a warning.
|
||||
3. **Verify:** Run the new unit test to confirm the fix.
|
||||
|
||||
## Acceptance Criteria
|
||||
- [ ] A new unit test case exists in `icalendar_test.ts` that passes with a non-string `rrule`.
|
||||
- [ ] The `expandRecurrences` function no longer throws `TypeError: r.replace is not a function` when `rrule` is not a string.
|
||||
- [ ] The sync process completes successfully even if some events have malformed `rrule` properties.
|
||||
@@ -0,0 +1,5 @@
|
||||
# Track fix_rrule_until_conversion_20260219 Context
|
||||
|
||||
- [Specification](./spec.md)
|
||||
- [Implementation Plan](./plan.md)
|
||||
- [Metadata](./metadata.json)
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"track_id": "fix_rrule_until_conversion_20260219",
|
||||
"type": "bug",
|
||||
"status": "new",
|
||||
"created_at": "2026-02-19T00:00:00Z",
|
||||
"updated_at": "2026-02-19T00:00:00Z",
|
||||
"description": "Fix RRULE UNTIL object conversion error: Invalid UNTIL value: [object Object]"
|
||||
}
|
||||
19
conductor/tracks/fix_rrule_until_conversion_20260219/plan.md
Normal file
19
conductor/tracks/fix_rrule_until_conversion_20260219/plan.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Implementation Plan - Fix RRULE UNTIL Conversion
|
||||
|
||||
## Phase 1: Reproduction [checkpoint: 02fcb7e]
|
||||
- [x] Task: Reproduce `Invalid UNTIL value` error 17de604
|
||||
- [x] Add a test case in `icalendar_test.ts` where `rrule` object has an `until` property as a `Date`.
|
||||
- [x] Run the test and confirm it fails with `Error: Invalid UNTIL value: [object Object]`.
|
||||
- [x] Task: Conductor - User Manual Verification 'Reproduction' (Protocol in workflow.md)
|
||||
|
||||
## Phase 2: Fix Implementation [checkpoint: 0f334b2]
|
||||
- [x] Task: Implement value formatting logic in `icalendar.ts` 331d0ab
|
||||
- [x] Update `expandRecurrences` to use a helper for property value conversion.
|
||||
- [x] Ensure `Date` objects are formatted as `YYYYMMDDTHHMMSSZ`.
|
||||
- [x] Run the test to confirm it passes.
|
||||
- [x] Task: Conductor - User Manual Verification 'Fix Implementation' (Protocol in workflow.md)
|
||||
|
||||
## Phase 3: Verification & Cleanup [checkpoint: a02f228]
|
||||
- [x] Task: Full Regression Check d090334
|
||||
- [x] Run all tests in `icalendar_test.ts`.
|
||||
- [x] Task: Conductor - User Manual Verification 'Verification & Cleanup' (Protocol in workflow.md)
|
||||
20
conductor/tracks/fix_rrule_until_conversion_20260219/spec.md
Normal file
20
conductor/tracks/fix_rrule_until_conversion_20260219/spec.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Specification: Fix RRULE UNTIL Object Conversion
|
||||
|
||||
## Overview
|
||||
The conversion of RRULE objects to strings fails for nested objects like `UNTIL` dates, resulting in `UNTIL=[object Object]`. This causes the `rrule` library to fail during expansion.
|
||||
|
||||
## Functional Requirements
|
||||
- **Robust Value Formatting:** Implement `formatRRuleValue` to handle various types of RRULE values:
|
||||
- **Date Objects:** Convert to `YYYYMMDDTHHMMSSZ`.
|
||||
- **Nested Date Objects:** (e.g., `{ date: Date }` from `ts-ics`) Extract and convert.
|
||||
- **Other Types:** Default to string conversion.
|
||||
- **Mapping Preservation:** Maintain the existing `RRULE_KEY_MAP` for key translation.
|
||||
|
||||
## Implementation Steps
|
||||
1. **Reproduce:** Add test `expandRecurrences - object rrule with until` in `icalendar_test.ts` using a Date object for `until`. Verify it fails with `Invalid UNTIL value`.
|
||||
2. **Fix:** Update `expandRecurrences` in `icalendar.ts` to use a formatting helper for values.
|
||||
3. **Verify:** Confirm the test case passes.
|
||||
|
||||
## Acceptance Criteria
|
||||
- [ ] Test `expandRecurrences - object rrule with until` passes.
|
||||
- [ ] The generated string correctly represents the date (e.g., `UNTIL=20260219T000000Z`).
|
||||
5
conductor/tracks/timezone_rrule_20260218/index.md
Normal file
5
conductor/tracks/timezone_rrule_20260218/index.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Track timezone_rrule_20260218 Context
|
||||
|
||||
- [Specification](./spec.md)
|
||||
- [Implementation Plan](./plan.md)
|
||||
- [Metadata](./metadata.json)
|
||||
8
conductor/tracks/timezone_rrule_20260218/metadata.json
Normal file
8
conductor/tracks/timezone_rrule_20260218/metadata.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"track_id": "timezone_rrule_20260218",
|
||||
"type": "feature",
|
||||
"status": "new",
|
||||
"created_at": "2026-02-18T11:20:00Z",
|
||||
"updated_at": "2026-02-18T11:20:00Z",
|
||||
"description": "Upgrade the SilverBullet iCalendar plug to use DST-aware timezone resolution and add recurring event support using rrule."
|
||||
}
|
||||
40
conductor/tracks/timezone_rrule_20260218/plan.md
Normal file
40
conductor/tracks/timezone_rrule_20260218/plan.md
Normal file
@@ -0,0 +1,40 @@
|
||||
# Implementation Plan: Proper Timezone Handling & Recurring Events
|
||||
|
||||
## Phase 1: Foundation - Timezone Mapping & Resolver [checkpoint: b8bf269]
|
||||
- [x] Task: Setup Timezone Map (WINDOWS_TO_IANA)
|
||||
- [x] Write failing tests for `resolveIanaName`
|
||||
- [x] Implement `WINDOWS_TO_IANA` mapping and `resolveIanaName` in `timezones.ts`
|
||||
- [x] Task: Implement UTC Offset Resolver using Intl
|
||||
- [x] Write failing tests for `getUtcOffsetMs`
|
||||
- [x] Implement `getUtcOffsetMs` in `timezones.ts`
|
||||
- [x] Task: Conductor - User Manual Verification 'Phase 1: Foundation' (Protocol in workflow.md)
|
||||
|
||||
## Phase 2: Core Logic - Extraction & Shifting [checkpoint: 10a6db5]
|
||||
- [x] Task: Fix Wall-Clock Extraction logic
|
||||
- [x] Write failing tests for `resolveEventStart` (mocking `Intl` if necessary)
|
||||
- [x] Implement `resolveEventStart` in `icalendar.ts` to handle local time ground truth
|
||||
- [x] Task: Conductor - User Manual Verification 'Phase 2: Core Logic' (Protocol in workflow.md)
|
||||
|
||||
## Phase 3: Features - Recurring Events & Filtering [checkpoint: ffaef28]
|
||||
- [x] Task: Integrate `rrule` library
|
||||
- [x] Add `rrule` to `deno.json` imports
|
||||
- [x] Verify import works in a simple script
|
||||
- [x] Task: Implement Recurring Event Expansion
|
||||
- [x] Write failing tests for `expandRecurrences`
|
||||
- [x] Implement `expandRecurrences` in `icalendar.ts`
|
||||
- [x] Task: Implement EXDATE support
|
||||
- [x] Write failing tests for EXDATE exclusion
|
||||
- [x] Update `expandRecurrences` to handle `EXDATE`
|
||||
- [x] Task: Implement Status Filtering
|
||||
- [x] Write failing tests for filtering "CANCELLED" events
|
||||
- [x] Update sync logic to filter based on iCalendar status
|
||||
- [x] Task: Conductor - User Manual Verification 'Phase 3: Features' (Protocol in workflow.md)
|
||||
|
||||
## Phase 4: Cleanup & Configuration [checkpoint: 533c240]
|
||||
- [x] Task: Remove obsolete configuration
|
||||
- [x] Write failing tests verifying `tzShift` is ignored/deprecated
|
||||
- [x] Remove `tzShift` and `hourShift` from `getSources` and `fetchAndParseCalendar`
|
||||
- [x] Task: Add `syncWindowDays` configuration
|
||||
- [x] Write failing tests for configurable expansion window
|
||||
- [x] Implement `syncWindowDays` in config and sync logic
|
||||
- [x] Task: Conductor - User Manual Verification 'Phase 4: Cleanup & Configuration' (Protocol in workflow.md)
|
||||
36
conductor/tracks/timezone_rrule_20260218/spec.md
Normal file
36
conductor/tracks/timezone_rrule_20260218/spec.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Specification: Proper Timezone Handling & Recurring Events
|
||||
|
||||
## Overview
|
||||
Upgrade the SilverBullet iCalendar plug to provide accurate, DST-aware timezone resolution and full support for recurring events (RRULE expansion). This replaces manual hour shifting with an automated, reliable system using IANA timezone standards and the `Intl` API.
|
||||
|
||||
## Functional Requirements
|
||||
- **IANA Timezone Mapping:** Implement a comprehensive mapping of 139 Windows timezone names to IANA identifiers using Unicode CLDR data.
|
||||
- **DST-Aware Offsets:** Calculate UTC offsets at runtime for specific event dates using the built-in `Intl.DateTimeFormat` API, ensuring accuracy during Daylight Saving Time transitions.
|
||||
- **Robust Date Extraction:** Correct the wall-clock extraction logic to prevent "double-shifting" of event times.
|
||||
- **Recurring Event Expansion:**
|
||||
- Integrate the `rrule` library to expand recurring events into individual occurrences.
|
||||
- Support `EXDATE` for excluding specific instances of a recurring series.
|
||||
- Implement a configurable `syncWindowDays` (default: 365) to limit the expansion range.
|
||||
- **Advanced Filtering:**
|
||||
- Filter out "CANCELLED" events based on the iCalendar status field.
|
||||
- (Optional) Add `includeTransparent` and `includeDeclined` per-source flags.
|
||||
- **Error Handling & Fallbacks:**
|
||||
- If a timezone is unrecognized, fallback to UTC and append a warning to the event's description.
|
||||
- **Configuration Cleanup:**
|
||||
- Remove the redundant `tzShift` / `hourShift` parameters.
|
||||
- Add `syncWindowDays` global config.
|
||||
|
||||
## Non-Functional Requirements
|
||||
- **Self-Contained:** Maintain the plug as a Deno-compatible project using `esm.sh` or `deno.json` imports.
|
||||
- **Performance:** Ensure efficient expansion of recurrences, even for busy calendars.
|
||||
|
||||
## Acceptance Criteria
|
||||
1. Events from various providers (Google, O365, Nextcloud) appear at the correct local time in SilverBullet, regardless of DST.
|
||||
2. All occurrences of a weekly recurring event within the sync window are indexed.
|
||||
3. Excluded dates (`EXDATE`) are correctly omitted from the index.
|
||||
4. Cancelled events are not indexed.
|
||||
5. The manual `tzShift` configuration is no longer required for correct time display.
|
||||
|
||||
## Out of Scope
|
||||
- Full CalDAV synchronization (this remains a read-only `.ics` fetcher).
|
||||
- UI for managing individual recurring instances (handled via SilverBullet queries).
|
||||
352
conductor/workflow.md
Normal file
352
conductor/workflow.md
Normal file
@@ -0,0 +1,352 @@
|
||||
# Project Workflow
|
||||
|
||||
## Guiding Principles
|
||||
|
||||
1. **The Plan is the Source of Truth:** All work must be tracked in `plan.md`
|
||||
2. **The Tech Stack is Deliberate:** Changes to the tech stack must be documented in `tech-stack.md` *before* implementation
|
||||
3. **Test-Driven Development:** Write unit tests before implementing functionality
|
||||
4. **High Code Coverage:** Aim for >80% code coverage for all modules
|
||||
5. **User Experience First:** Every decision should prioritize user experience
|
||||
6. **Non-Interactive & CI-Aware:** Prefer non-interactive commands. Use `CI=true` for watch-mode tools (tests, linters) to ensure single execution.
|
||||
|
||||
## Task Workflow
|
||||
|
||||
All tasks follow a strict lifecycle:
|
||||
|
||||
### Standard Task Workflow
|
||||
|
||||
1. **Select Task:** Choose the next available task from `plan.md` in sequential order
|
||||
|
||||
2. **Mark In Progress:** Before beginning work, edit `plan.md` and change the task from `[ ]` to `[~]`
|
||||
|
||||
3. **Write Failing Tests (Red Phase):**
|
||||
- Create a new test file for the feature or bug fix.
|
||||
- Write one or more unit tests that clearly define the expected behavior and acceptance criteria for the task.
|
||||
- **CRITICAL:** Run the tests and confirm that they fail as expected. This is the "Red" phase of TDD. Do not proceed until you have failing tests.
|
||||
|
||||
4. **Implement to Pass Tests (Green Phase):**
|
||||
- Write the minimum amount of application code necessary to make the failing tests pass.
|
||||
- Run the test suite again and confirm that all tests now pass. This is the "Green" phase.
|
||||
|
||||
5. **Refactor (Optional but Recommended):**
|
||||
- With the safety of passing tests, refactor the implementation code and the test code to improve clarity, remove duplication, and enhance performance without changing the external behavior.
|
||||
- Rerun tests to ensure they still pass after refactoring.
|
||||
|
||||
6. **Verify Coverage:** Run coverage reports using the project's chosen tools. For example, in a Python project, this might look like:
|
||||
```bash
|
||||
pytest --cov=app --cov-report=html
|
||||
```
|
||||
Target: >80% coverage for new code. The specific tools and commands will vary by language and framework.
|
||||
|
||||
7. **Document Deviations:** If implementation differs from tech stack:
|
||||
- **STOP** implementation
|
||||
- Update `tech-stack.md` with new design
|
||||
- Add dated note explaining the change
|
||||
- Resume implementation
|
||||
|
||||
8. **Commit Code Changes:**
|
||||
- Stage all code changes related to the task.
|
||||
- Propose a clear, concise commit message e.g, `feat(ui): Create basic HTML structure for calculator`.
|
||||
- Perform the commit.
|
||||
|
||||
9. **Attach Task Summary with Git Notes:**
|
||||
- **Step 9.1: Get Commit Hash:** Obtain the hash of the *just-completed commit* (`git log -1 --format="%H"`).
|
||||
- **Step 9.2: Draft Note Content:** Create a detailed summary for the completed task. This should include the task name, a summary of changes, a list of all created/modified files, and the core "why" for the change.
|
||||
- **Step 9.3: Attach Note:** Use the `git notes` command to attach the summary to the commit.
|
||||
```bash
|
||||
# The note content from the previous step is passed via the -m flag.
|
||||
git notes add -m "<note content>" <commit_hash>
|
||||
```
|
||||
|
||||
10. **Get and Record Task Commit SHA:**
|
||||
- **Step 10.1: Update Plan:** Read `plan.md`, find the line for the completed task, update its status from `[~]` to `[x]`, and append the first 7 characters of the *just-completed commit's* commit hash.
|
||||
- **Step 10.2: Write Plan:** Write the updated content back to `plan.md`.
|
||||
|
||||
11. **Commit Plan Update:**
|
||||
- **Action:** Stage the modified `plan.md` file.
|
||||
- **Action:** Commit this change with a descriptive message (e.g., `conductor(plan): Mark task 'Create user model' as complete`).
|
||||
|
||||
### Phase Completion Verification and Checkpointing Protocol
|
||||
|
||||
**Trigger:** This protocol is executed immediately after a task is completed that also concludes a phase in `plan.md`.
|
||||
|
||||
1. **Announce Protocol Start:** Inform the user that the phase is complete and the verification and checkpointing protocol has begun.
|
||||
|
||||
2. **Ensure Test Coverage for Phase Changes:**
|
||||
- **Step 2.1: Determine Phase Scope:** To identify the files changed in this phase, you must first find the starting point. Read `plan.md` to find the Git commit SHA of the *previous* phase's checkpoint. If no previous checkpoint exists, the scope is all changes since the first commit.
|
||||
- **Step 2.2: List Changed Files:** Execute `git diff --name-only <previous_checkpoint_sha> HEAD` to get a precise list of all files modified during this phase.
|
||||
- **Step 2.3: Verify and Create Tests:** For each file in the list:
|
||||
- **CRITICAL:** First, check its extension. Exclude non-code files (e.g., `.json`, `.md`, `.yaml`).
|
||||
- For each remaining code file, verify a corresponding test file exists.
|
||||
- If a test file is missing, you **must** create one. Before writing the test, **first, analyze other test files in the repository to determine the correct naming convention and testing style.** The new tests **must** validate the functionality described in this phase's tasks (`plan.md`).
|
||||
|
||||
3. **Execute Automated Tests with Proactive Debugging:**
|
||||
- Before execution, you **must** announce the exact shell command you will use to run the tests.
|
||||
- **Example Announcement:** "I will now run the automated test suite to verify the phase. **Command:** `CI=true npm test`"
|
||||
- Execute the announced command.
|
||||
- If tests fail, you **must** inform the user and begin debugging. You may attempt to propose a fix a **maximum of two times**. If the tests still fail after your second proposed fix, you **must stop**, report the persistent failure, and ask the user for guidance.
|
||||
|
||||
4. **Propose a Detailed, Actionable Manual Verification Plan:**
|
||||
- **CRITICAL:** To generate the plan, first analyze `product.md`, `product-guidelines.md`, and `plan.md` to determine the user-facing goals of the completed phase.
|
||||
- You **must** generate a step-by-step plan that walks the user through the verification process, including any necessary commands and specific, expected outcomes.
|
||||
- The plan you present to the user **must** follow this format:
|
||||
|
||||
**For a Frontend Change:**
|
||||
```
|
||||
The automated tests have passed. For manual verification, please follow these steps:
|
||||
|
||||
**Manual Verification Steps:**
|
||||
1. **Start the development server with the command:** `npm run dev`
|
||||
2. **Open your browser to:** `http://localhost:3000`
|
||||
3. **Confirm that you see:** The new user profile page, with the user's name and email displayed correctly.
|
||||
```
|
||||
|
||||
**For a Backend Change:**
|
||||
```
|
||||
The automated tests have passed. For manual verification, please follow these steps:
|
||||
|
||||
**Manual Verification Steps:**
|
||||
1. **Ensure the server is running.**
|
||||
2. **Execute the following command in your terminal:** `curl -X POST http://localhost:8080/api/v1/users -d '{"name": "test"}'`
|
||||
3. **Confirm that you receive:** A JSON response with a status of `201 Created`.
|
||||
```
|
||||
|
||||
5. **Await Explicit User Feedback:**
|
||||
- After presenting the detailed plan, ask the user for confirmation: "**Does this meet your expectations? Please confirm with yes or provide feedback on what needs to be changed.**"
|
||||
- **PAUSE** and await the user's response. Do not proceed without an explicit yes or confirmation.
|
||||
|
||||
6. **Create Checkpoint Commit:**
|
||||
- Stage all changes. If no changes occurred in this step, proceed with an empty commit.
|
||||
- Perform the commit with a clear and concise message (e.g., `conductor(checkpoint): Checkpoint end of Phase X`).
|
||||
|
||||
7. **Attach Auditable Verification Report using Git Notes:**
|
||||
- **Step 7.1: Draft Note Content:** Create a detailed verification report including the automated test command, the manual verification steps, and the user's confirmation.
|
||||
- **Step 7.2: Attach Note:** Use the `git notes` command and the full commit hash from the previous step to attach the full report to the checkpoint commit.
|
||||
|
||||
8. **Get and Record Phase Checkpoint SHA:**
|
||||
- **Step 8.1: Get Commit Hash:** Obtain the hash of the *just-created checkpoint commit* (`git log -1 --format="%H"`).
|
||||
- **Step 8.2: Update Plan:** Read `plan.md`, find the heading for the completed phase, and append the first 7 characters of the commit hash in the format `[checkpoint: <sha>]`.
|
||||
- **Step 8.3: Write Plan:** Write the updated content back to `plan.md`.
|
||||
|
||||
9. **Commit Plan Update:**
|
||||
- **Action:** Stage the modified `plan.md` file.
|
||||
- **Action:** Commit this change with a descriptive message following the format `conductor(plan): Mark phase '<PHASE NAME>' as complete`.
|
||||
|
||||
10. **Announce Completion:** Inform the user that the phase is complete and the checkpoint has been created, with the detailed verification report attached as a git note.
|
||||
|
||||
### Track Completion Protocol
|
||||
|
||||
**Trigger:** This protocol is executed when all phases and tasks in a track are complete.
|
||||
|
||||
1. **Version Bump (Code Changes Only):**
|
||||
- If the track involved code changes (i.e., not purely documentation), you **must** bump the project version number.
|
||||
- Update the version in `deno.json`.
|
||||
- Run the version synchronization script: `docker run --rm -v $(pwd):/app -w /app denoland/deno:latest task sync-version`.
|
||||
- Commit the version bump with message: `chore: Bump version to <new_version>`.
|
||||
|
||||
2. **Push Changes (Code Changes Only):**
|
||||
- If the track involved code changes, you **must** push the changes to the remote repository.
|
||||
- **Command:** `git push`
|
||||
|
||||
3. **Monitor CI/CD (Code Changes Only):**
|
||||
- After pushing, you **must** monitor the resulting Gitea Action to ensure it completes successfully.
|
||||
- Use the `gitea-push-watch` skill if available, or check the Gitea interface manually.
|
||||
- **Requirement:** The track is NOT complete until the CI/CD pipeline passes. If it fails, you must investigate and fix the issue.
|
||||
|
||||
### Quality Gates
|
||||
|
||||
Before marking any task complete, verify:
|
||||
|
||||
- [ ] All tests pass
|
||||
- [ ] Code coverage meets requirements (>80%)
|
||||
- [ ] Code follows project's code style guidelines (as defined in `code_styleguides/`)
|
||||
- [ ] All public functions/methods are documented (e.g., docstrings, JSDoc, GoDoc)
|
||||
- [ ] Type safety is enforced (e.g., type hints, TypeScript types, Go types)
|
||||
- [ ] No linting or static analysis errors (using the project's configured tools)
|
||||
- [ ] Works correctly on mobile (if applicable)
|
||||
- [ ] Documentation updated if needed
|
||||
- [ ] No security vulnerabilities introduced
|
||||
|
||||
## Development Commands
|
||||
|
||||
**AI AGENT INSTRUCTION: This section should be adapted to the project's specific language, framework, and build tools.**
|
||||
|
||||
### Setup
|
||||
```bash
|
||||
# Example: Commands to set up the development environment (e.g., install dependencies, configure database)
|
||||
# e.g., for a Node.js project: npm install
|
||||
# e.g., for a Go project: go mod tidy
|
||||
```
|
||||
|
||||
### Daily Development
|
||||
```bash
|
||||
# Example: Commands for common daily tasks (e.g., start dev server, run tests, lint, format)
|
||||
# e.g., for a Node.js project: npm run dev, npm test, npm run lint
|
||||
# e.g., for a Go project: go run main.go, go test ./..., go fmt ./...
|
||||
```
|
||||
|
||||
### Before Committing
|
||||
```bash
|
||||
# Example: Commands to run all pre-commit checks (e.g., format, lint, type check, run tests)
|
||||
# e.g., for a Node.js project: npm run check
|
||||
# e.g., for a Go project: make check (if a Makefile exists)
|
||||
```
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
### Unit Testing
|
||||
- Every module must have corresponding tests.
|
||||
- Use appropriate test setup/teardown mechanisms (e.g., fixtures, beforeEach/afterEach).
|
||||
- Mock external dependencies.
|
||||
- Test both success and failure cases.
|
||||
|
||||
### Integration Testing
|
||||
- Test complete user flows
|
||||
- Verify database transactions
|
||||
- Test authentication and authorization
|
||||
- Check form submissions
|
||||
|
||||
### Mobile Testing
|
||||
- Test on actual iPhone when possible
|
||||
- Use Safari developer tools
|
||||
- Test touch interactions
|
||||
- Verify responsive layouts
|
||||
- Check performance on 3G/4G
|
||||
|
||||
## Code Review Process
|
||||
|
||||
### Self-Review Checklist
|
||||
Before requesting review:
|
||||
|
||||
1. **Functionality**
|
||||
- Feature works as specified
|
||||
- Edge cases handled
|
||||
- Error messages are user-friendly
|
||||
|
||||
2. **Code Quality**
|
||||
- Follows style guide
|
||||
- DRY principle applied
|
||||
- Clear variable/function names
|
||||
- Appropriate comments
|
||||
|
||||
3. **Testing**
|
||||
- Unit tests comprehensive
|
||||
- Integration tests pass
|
||||
- Coverage adequate (>80%)
|
||||
|
||||
4. **Security**
|
||||
- No hardcoded secrets
|
||||
- Input validation present
|
||||
- SQL injection prevented
|
||||
- XSS protection in place
|
||||
|
||||
5. **Performance**
|
||||
- Database queries optimized
|
||||
- Images optimized
|
||||
- Caching implemented where needed
|
||||
|
||||
6. **Mobile Experience**
|
||||
- Touch targets adequate (44x44px)
|
||||
- Text readable without zooming
|
||||
- Performance acceptable on mobile
|
||||
- Interactions feel native
|
||||
|
||||
## Commit Guidelines
|
||||
|
||||
### Message Format
|
||||
```
|
||||
<type>(<scope>): <description>
|
||||
|
||||
[optional body]
|
||||
|
||||
[optional footer]
|
||||
```
|
||||
|
||||
### Types
|
||||
- `feat`: New feature
|
||||
- `fix`: Bug fix
|
||||
- `docs`: Documentation only
|
||||
- `style`: Formatting, missing semicolons, etc.
|
||||
- `refactor`: Code change that neither fixes a bug nor adds a feature
|
||||
- `test`: Adding missing tests
|
||||
- `chore`: Maintenance tasks
|
||||
|
||||
### Examples
|
||||
```bash
|
||||
git commit -m "feat(auth): Add remember me functionality"
|
||||
git commit -m "fix(posts): Correct excerpt generation for short posts"
|
||||
git commit -m "test(comments): Add tests for emoji reaction limits"
|
||||
git commit -m "style(mobile): Improve button touch targets"
|
||||
```
|
||||
|
||||
## Definition of Done
|
||||
|
||||
A task is complete when:
|
||||
|
||||
1. All code implemented to specification
|
||||
2. Unit tests written and passing
|
||||
3. Code coverage meets project requirements
|
||||
4. Documentation complete (if applicable)
|
||||
5. Code passes all configured linting and static analysis checks
|
||||
6. Works beautifully on mobile (if applicable)
|
||||
7. Implementation notes added to `plan.md`
|
||||
8. Changes committed with proper message
|
||||
9. Git note with task summary attached to the commit
|
||||
|
||||
## Emergency Procedures
|
||||
|
||||
### Critical Bug in Production
|
||||
1. Create hotfix branch from main
|
||||
2. Write failing test for bug
|
||||
3. Implement minimal fix
|
||||
4. Test thoroughly including mobile
|
||||
5. Deploy immediately
|
||||
6. Document in plan.md
|
||||
|
||||
### Data Loss
|
||||
1. Stop all write operations
|
||||
2. Restore from latest backup
|
||||
3. Verify data integrity
|
||||
4. Document incident
|
||||
5. Update backup procedures
|
||||
|
||||
### Security Breach
|
||||
1. Rotate all secrets immediately
|
||||
2. Review access logs
|
||||
3. Patch vulnerability
|
||||
4. Notify affected users (if any)
|
||||
5. Document and update security procedures
|
||||
|
||||
## Deployment Workflow
|
||||
|
||||
### Pre-Deployment Checklist
|
||||
- [ ] All tests passing
|
||||
- [ ] Coverage >80%
|
||||
- [ ] No linting errors
|
||||
- [ ] Mobile testing complete
|
||||
- [ ] Environment variables configured
|
||||
- [ ] Database migrations ready
|
||||
- [ ] Backup created
|
||||
|
||||
### Deployment Steps
|
||||
1. Merge feature branch to main
|
||||
2. Tag release with version
|
||||
3. Push to deployment service
|
||||
4. Run database migrations
|
||||
5. Verify deployment
|
||||
6. Test critical paths
|
||||
7. Monitor for errors
|
||||
|
||||
### Post-Deployment
|
||||
1. Monitor analytics
|
||||
2. Check error logs
|
||||
3. Gather user feedback
|
||||
4. Plan next iteration
|
||||
|
||||
## Continuous Improvement
|
||||
|
||||
- Review workflow weekly
|
||||
- Update based on pain points
|
||||
- Document lessons learned
|
||||
- Optimize for user happiness
|
||||
- Keep things simple and maintainable
|
||||
30
deno.json
Normal file
30
deno.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "icalendar-plug",
|
||||
"version": "0.3.32",
|
||||
"nodeModulesDir": "auto",
|
||||
"tasks": {
|
||||
"sync-version": "deno run -A scripts/sync-version.ts",
|
||||
"build": "deno task sync-version && deno run -A https://github.com/silverbulletmd/silverbullet/releases/download/2.4.1/plug-compile.js -c deno.json icalendar.plug.yaml",
|
||||
"watch": "deno task build --watch",
|
||||
"debug": "deno run -A https://raw.githubusercontent.com/silverbulletmd/silverbullet/v2.4.1/plug-compile.js -c deno.json icalendar.plug.yaml --debug"
|
||||
},
|
||||
"lint": {
|
||||
"rules": {
|
||||
"exclude": [
|
||||
"no-explicit-any"
|
||||
]
|
||||
}
|
||||
},
|
||||
"fmt": {
|
||||
"exclude": [
|
||||
"*.md",
|
||||
"**/*.md",
|
||||
"*.plug.js"
|
||||
]
|
||||
},
|
||||
"imports": {
|
||||
"@silverbulletmd/silverbullet": "jsr:@silverbulletmd/silverbullet@^2.4.1",
|
||||
"ts-ics": "npm:ts-ics@2.4.0",
|
||||
"rrule": "https://esm.sh/rrule@2.8.1"
|
||||
}
|
||||
}
|
||||
22
deno.jsonc
22
deno.jsonc
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"tasks": {
|
||||
"build": "silverbullet plug:compile -c deno.jsonc icalendar.plug.yaml",
|
||||
"watch": "silverbullet plug:compile -c deno.jsonc icalendar.plug.yaml -w"
|
||||
},
|
||||
"lint": {
|
||||
"rules": {
|
||||
"exclude": ["no-explicit-any"]
|
||||
}
|
||||
},
|
||||
"fmt": {
|
||||
"exclude": [
|
||||
"*.md",
|
||||
"**/*.md",
|
||||
"*.plug.js"
|
||||
]
|
||||
},
|
||||
"imports": {
|
||||
"@silverbulletmd/silverbullet": "jsr:@silverbulletmd/silverbullet@^0.10.1",
|
||||
"ts-ics": "npm:ts-ics@1.6.5"
|
||||
}
|
||||
}
|
||||
19
docker-compose.yml
Normal file
19
docker-compose.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
services:
|
||||
silverbullet:
|
||||
image: zefhemel/silverbullet:latest
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- ./test_space:/space
|
||||
environment:
|
||||
- SB_USER=admin:admin
|
||||
- SB_LOG_PUSH=true
|
||||
- SB_DEBUG=true
|
||||
- SB_SPACE_LUA_TRUSTED=true
|
||||
|
||||
mock-ics:
|
||||
image: nginx:alpine
|
||||
ports:
|
||||
- "8080:80"
|
||||
volumes:
|
||||
- ./mock_calendar.ics:/usr/share/nginx/html/calendar.ics:ro
|
||||
BIN
gitea-push-watch.skill
Normal file
BIN
gitea-push-watch.skill
Normal file
Binary file not shown.
32
gitea-push-watch/SKILL.md
Normal file
32
gitea-push-watch/SKILL.md
Normal file
@@ -0,0 +1,32 @@
|
||||
---
|
||||
name: gitea-push-watch
|
||||
description: Monitor and debug Gitea Actions after a push. Use when a user asks to check if an action ran or failed on a Gitea instance, and use the provided API token for authentication.
|
||||
---
|
||||
|
||||
# Gitea Push Watch
|
||||
|
||||
This skill provides tools to monitor Gitea Actions and debug failures using the Gitea API.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. **Identify Repository Info**: Extract the Gitea server URL and repository path (e.g., `owner/repo`) from the project context or git remote.
|
||||
2. **Authenticate**: Use the user-provided API token. Ensure it is passed as a `token <value>` header in API calls.
|
||||
3. **Monitor Run**: Use the `scripts/gitea_action_monitor.py` script to check the status of the latest run.
|
||||
4. **Analyze Failures**: If a run fails, use the Gitea API to fetch specific job logs to identify the root cause (e.g., permission issues, network errors).
|
||||
|
||||
## Script Usage
|
||||
|
||||
```bash
|
||||
python3 scripts/gitea_action_monitor.py <server_url> <repo_path> <api_token>
|
||||
```
|
||||
|
||||
Example:
|
||||
```bash
|
||||
python3 scripts/gitea_action_monitor.py https://gitea.example.com sstent/my-repo MY_TOKEN
|
||||
```
|
||||
|
||||
## Common Gitea API Endpoints
|
||||
|
||||
- List runs: `GET /api/v1/repos/{owner}/{repo}/actions/runs`
|
||||
- Get run details: `GET /api/v1/repos/{owner}/{repo}/actions/runs/{id}`
|
||||
- List jobs: `GET /api/v1/repos/{owner}/{repo}/actions/runs/{id}/jobs`
|
||||
76
gitea-push-watch/scripts/gitea_action_monitor.py
Normal file
76
gitea-push-watch/scripts/gitea_action_monitor.py
Normal file
@@ -0,0 +1,76 @@
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import time
|
||||
|
||||
def get_action_runs(server_url, repo, token, limit=1):
|
||||
url = f"{server_url}/api/v1/repos/{repo}/actions/runs?limit={limit}"
|
||||
req = urllib.request.Request(url)
|
||||
req.add_header("Authorization", f"token {token}")
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req) as response:
|
||||
return json.loads(response.read().decode())
|
||||
except urllib.error.HTTPError as e:
|
||||
print(f"Error fetching action runs: {e.code} {e.reason}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error: {str(e)}")
|
||||
return None
|
||||
|
||||
def monitor_run(server_url, repo, token, run_id):
|
||||
url = f"{server_url}/api/v1/repos/{repo}/actions/runs/{run_id}"
|
||||
req = urllib.request.Request(url)
|
||||
req.add_header("Authorization", f"token {token}")
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req) as response:
|
||||
return json.loads(response.read().decode())
|
||||
except Exception as e:
|
||||
print(f"Error monitoring run: {str(e)}")
|
||||
return None
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 4:
|
||||
print("Usage: python3 gitea_action_monitor.py <server_url> <repo> <token>")
|
||||
sys.exit(1)
|
||||
|
||||
server_url = sys.argv[1].rstrip('/')
|
||||
repo = sys.argv[2]
|
||||
token = sys.argv[3]
|
||||
|
||||
print(f"Checking Gitea Actions for {repo}...")
|
||||
|
||||
runs_data = get_action_runs(server_url, repo, token)
|
||||
if not runs_data or not runs_data.get('workflow_runs'):
|
||||
print("No action runs found.")
|
||||
return
|
||||
|
||||
latest_run = runs_data['workflow_runs'][0]
|
||||
run_id = latest_run['id']
|
||||
status = latest_run['status']
|
||||
conclusion = latest_run.get('conclusion', 'unknown')
|
||||
name = latest_run.get('name', 'unnamed')
|
||||
|
||||
print(f"Latest Run: {name} (ID: {run_id})")
|
||||
print(f"Status: {status}")
|
||||
|
||||
if status == "running":
|
||||
print("Waiting for completion...")
|
||||
for _ in range(10): # Max 10 attempts
|
||||
time.sleep(10)
|
||||
run_data = monitor_run(server_url, repo, token, run_id)
|
||||
if not run_data: break
|
||||
status = run_data['status']
|
||||
if status != "running":
|
||||
conclusion = run_data.get('conclusion', 'unknown')
|
||||
break
|
||||
print(".", end="", flush=True)
|
||||
print(f"\nFinal Status: {status} ({conclusion})")
|
||||
else:
|
||||
print(f"Conclusion: {conclusion}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
10
icalendar.plug.js
Normal file
10
icalendar.plug.js
Normal file
File diff suppressed because one or more lines are too long
7
icalendar.plug.js.map
Normal file
7
icalendar.plug.js.map
Normal file
File diff suppressed because one or more lines are too long
@@ -1,31 +1,34 @@
|
||||
name: icalendar
|
||||
version: 0.3.32
|
||||
author: sstent
|
||||
index: icalendar.ts
|
||||
# Legacy SilverBullet permission name
|
||||
requiredPermissions:
|
||||
- fetch
|
||||
# Modern SilverBullet permission name
|
||||
permissions:
|
||||
- fetch
|
||||
- http
|
||||
functions:
|
||||
syncCalendars:
|
||||
path: icalendar.ts:syncCalendars
|
||||
command:
|
||||
name: "iCalendar: Sync"
|
||||
priority: -1
|
||||
events:
|
||||
- editor:init
|
||||
forceSync:
|
||||
path: icalendar.ts:forceSync
|
||||
command:
|
||||
name: "iCalendar: Force Sync"
|
||||
priority: -1
|
||||
clearCache:
|
||||
path: icalendar.ts:clearCache
|
||||
command:
|
||||
name: "iCalendar: Clear All Events"
|
||||
priority: -1
|
||||
showVersion:
|
||||
path: ./icalendar.ts:showVersion
|
||||
path: icalendar.ts:showVersion
|
||||
command:
|
||||
name: "iCalendar: Version"
|
||||
priority: -2
|
||||
queryEvents:
|
||||
path: ./icalendar.ts:queryEvents
|
||||
events:
|
||||
- query:ical-event
|
||||
config:
|
||||
schema.config.properties.icalendar:
|
||||
type: object
|
||||
required:
|
||||
- sources
|
||||
properties:
|
||||
sources:
|
||||
type: array
|
||||
minItems: 1
|
||||
items:
|
||||
type: object
|
||||
required:
|
||||
- url
|
||||
properties:
|
||||
url:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
|
||||
416
icalendar.ts
416
icalendar.ts
@@ -1,127 +1,343 @@
|
||||
import { editor, system } from "@silverbulletmd/silverbullet/syscalls";
|
||||
import { QueryProviderEvent } from "@silverbulletmd/silverbullet/types";
|
||||
import { applyQuery } from "@silverbulletmd/silverbullet/lib/query";
|
||||
import { parseIcsCalendar, type VCalendar } from "ts-ics";
|
||||
import { clientStore, config, datastore, editor, index } from "@silverbulletmd/silverbullet/syscalls";
|
||||
import { convertIcsCalendar } from "https://esm.sh/ts-ics@2.4.0";
|
||||
import { RRule, RRuleSet } from "rrule";
|
||||
import { getUtcOffsetMs, resolveIanaName } from "./timezones.ts";
|
||||
|
||||
const VERSION = "0.1.0";
|
||||
const VERSION = "0.3.32";
|
||||
const CACHE_KEY = "icalendar:lastSync";
|
||||
|
||||
// Try to match SilverBullet properties where possible.
|
||||
// Timestamps should be strings formatted with `localDateString`
|
||||
interface Event {
|
||||
// Typically available in calendar apps
|
||||
summary: string | undefined;
|
||||
description: string | undefined;
|
||||
location: string | undefined;
|
||||
console.log(`[iCalendar] Plug script executing at top level (Version ${VERSION})`);
|
||||
|
||||
// Same as SilverBullet pages
|
||||
created: string | undefined;
|
||||
lastModified: string | undefined;
|
||||
// Keep consistent with dates above
|
||||
start: string | undefined;
|
||||
end: string | undefined;
|
||||
/**
|
||||
* Mapping of verbose RRULE object keys to standard iCalendar shortened keys.
|
||||
*/
|
||||
const RRULE_KEY_MAP: Record<string, string> = {
|
||||
"frequency": "FREQ",
|
||||
"until": "UNTIL",
|
||||
"count": "COUNT",
|
||||
"interval": "INTERVAL",
|
||||
"bysecond": "BYSECOND",
|
||||
"byminute": "BYMINUTE",
|
||||
"byhour": "BYHOUR",
|
||||
"byday": "BYDAY",
|
||||
"bymonthday": "BYMONTHDAY",
|
||||
"byyearday": "BYYEARDAY",
|
||||
"byweekno": "BYWEEKNO",
|
||||
"bymonth": "BYMONTH",
|
||||
"bysetpos": "BYSETPOS",
|
||||
"wkst": "WKST",
|
||||
"freq": "FREQ", // Just in case
|
||||
};
|
||||
|
||||
sourceName: string | undefined;
|
||||
/**
|
||||
* Formats an RRULE value for the string representation.
|
||||
* Specifically handles Date objects and nested date objects from ts-ics.
|
||||
*/
|
||||
function formatRRuleValue(v: any): string {
|
||||
if (v instanceof Date) {
|
||||
return v.toISOString().replace(/[-:]/g, "").split(".")[0] + "Z";
|
||||
}
|
||||
if (typeof v === "object" && v !== null && v.date instanceof Date) {
|
||||
return v.date.toISOString().replace(/[-:]/g, "").split(".")[0] + "Z";
|
||||
}
|
||||
return String(v);
|
||||
}
|
||||
|
||||
interface Source {
|
||||
url: string; // Should be an .ics file
|
||||
name: string | undefined; // Optional name that will be assigned to events
|
||||
// ============================================================================
|
||||
// Utility Functions
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Creates a SHA-256 hash of a string (hex encoded)
|
||||
*/
|
||||
async function sha256Hash(str: string): Promise<string> {
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(str);
|
||||
const hashBuffer = await crypto.subtle.digest("SHA-256", data);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
return hashArray.map(b => b.toString(16).padStart(2, "0")).join("");
|
||||
}
|
||||
|
||||
export async function queryEvents(
|
||||
{ query }: QueryProviderEvent,
|
||||
): Promise<any[]> {
|
||||
const events: Event[] = [];
|
||||
export function localDateString(date: Date): string {
|
||||
const pad = (n: number) => String(n).padStart(2, "0");
|
||||
return date.getFullYear() + "-" + pad(date.getMonth() + 1) + "-" + pad(date.getDate()) + "T" + pad(date.getHours()) + ":" + pad(date.getMinutes()) + ":" + pad(date.getSeconds());
|
||||
}
|
||||
|
||||
const sources = await getSources();
|
||||
for (const source of sources) {
|
||||
const identifier = (source.name === undefined || source.name === "")
|
||||
? source.url
|
||||
: source.name;
|
||||
/**
|
||||
* Recursively converts all Date objects and ISO date strings to strings
|
||||
* Handles nested objects like {date: Date, local: {date: Date, timezone: string}}
|
||||
*/
|
||||
function convertDatesToStrings<T>(obj: T): any {
|
||||
if (obj === null || obj === undefined) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (obj instanceof Date) {
|
||||
return localDateString(obj);
|
||||
}
|
||||
|
||||
if (typeof obj === 'object' && 'date' in obj && (obj as any).date instanceof Date) {
|
||||
return localDateString((obj as any).date);
|
||||
}
|
||||
|
||||
if (typeof obj === 'string' && /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(obj)) {
|
||||
try {
|
||||
const result = await fetch(source.url);
|
||||
const icsData = await result.text();
|
||||
return localDateString(new Date(obj));
|
||||
} catch {
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
const calendarParsed: VCalendar = parseIcsCalendar(icsData);
|
||||
if (calendarParsed.events === undefined) {
|
||||
throw new Error("Didn't parse events from ics data");
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(item => convertDatesToStrings(item));
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
const result: any = {};
|
||||
for (const key in obj) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||
result[key] = convertDatesToStrings((obj as any)[key]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// The order here is the default order of columns without the select clause
|
||||
for (const icsEvent of calendarParsed.events) {
|
||||
events.push({
|
||||
summary: icsEvent.summary,
|
||||
sourceName: source.name,
|
||||
return obj;
|
||||
}
|
||||
|
||||
location: icsEvent.location,
|
||||
description: icsEvent.description,
|
||||
// ============================================================================
|
||||
// Configuration Functions
|
||||
// ============================================================================
|
||||
|
||||
start: localDateString(icsEvent.start.date),
|
||||
end: icsEvent.end ? localDateString(icsEvent.end.date) : undefined,
|
||||
created: icsEvent.created
|
||||
? localDateString(icsEvent.created.date)
|
||||
: undefined,
|
||||
lastModified: icsEvent.lastModified
|
||||
? localDateString(icsEvent.lastModified.date)
|
||||
: undefined,
|
||||
async function getSources(): Promise<{ sources: any[], syncWindowDays: number }> {
|
||||
try {
|
||||
const rawConfig = await config.get("icalendar", { sources: [] }) as any;
|
||||
console.log("[iCalendar] Raw config retrieved:", JSON.stringify(rawConfig));
|
||||
|
||||
let sources = rawConfig.sources || [];
|
||||
const syncWindowDays = rawConfig.syncWindowDays || 365;
|
||||
|
||||
if (sources && typeof sources === "object" && !Array.isArray(sources)) {
|
||||
const sourceArray = [];
|
||||
for (const key in sources) {
|
||||
if (sources[key] && typeof sources[key].url === "string") {
|
||||
sourceArray.push(sources[key]);
|
||||
}
|
||||
}
|
||||
sources = sourceArray;
|
||||
}
|
||||
|
||||
return { sources, syncWindowDays };
|
||||
} catch (e) {
|
||||
console.error("[iCalendar] Error in getSources:", e);
|
||||
return { sources: [], syncWindowDays: 365 };
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Calendar Fetching & Parsing
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Resolves the event start as a UTC Date object using DST-aware resolution.
|
||||
*/
|
||||
export async function resolveEventStart(icsEvent: any): Promise<Date | null> {
|
||||
const obj = icsEvent.start;
|
||||
if (!obj) return null;
|
||||
|
||||
// 1. Extract the wall-clock local datetime string
|
||||
let wallClock: string | null = null;
|
||||
if (obj.local?.date) {
|
||||
const d = obj.local.date;
|
||||
wallClock = d instanceof Date ? d.toISOString() : String(d);
|
||||
} else if (obj.date) {
|
||||
const d = obj.date;
|
||||
wallClock = d instanceof Date ? d.toISOString() : String(d);
|
||||
}
|
||||
|
||||
if (!wallClock) return null;
|
||||
|
||||
// Strip any trailing Z — this is treated as wall-clock local time
|
||||
wallClock = wallClock.replace(/Z$/, "");
|
||||
|
||||
// 2. Resolve IANA timezone
|
||||
const rawTz = obj.local?.timezone || (obj as any).timezone || "UTC";
|
||||
const ianaName = resolveIanaName(rawTz);
|
||||
|
||||
if (!ianaName) {
|
||||
console.warn(`[iCalendar] Unknown timezone: "${rawTz}" - falling back to UTC for event "${icsEvent.summary}"`);
|
||||
const utcDate = new Date(wallClock + (wallClock.includes("T") ? "" : "T00:00:00") + "Z");
|
||||
if (isNaN(utcDate.getTime())) return null;
|
||||
return utcDate;
|
||||
}
|
||||
|
||||
// 3. Parse the wall-clock time as a UTC instant (no offset yet)
|
||||
const wallClockAsUtc = new Date(wallClock + (wallClock.includes("T") ? "" : "T00:00:00") + "Z");
|
||||
if (isNaN(wallClockAsUtc.getTime())) return null;
|
||||
|
||||
// 4. Get the DST-aware offset for this IANA zone at this instant
|
||||
const offsetMs = getUtcOffsetMs(ianaName, wallClockAsUtc);
|
||||
|
||||
// 5. Convert: UTC = wall-clock - offset
|
||||
return new Date(wallClockAsUtc.getTime() - offsetMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expands recurring events into individual occurrences.
|
||||
*/
|
||||
export function expandRecurrences(icsEvent: any, windowDays = 365): any[] {
|
||||
const rruleStr = icsEvent.rrule || (icsEvent as any).recurrenceRule;
|
||||
if (!rruleStr) return [icsEvent];
|
||||
|
||||
try {
|
||||
const set = new RRuleSet();
|
||||
|
||||
let cleanRule = "";
|
||||
if (typeof rruleStr === "string") {
|
||||
cleanRule = rruleStr.replace(/^RRULE:/i, "");
|
||||
} else if (typeof rruleStr === "object" && rruleStr !== null) {
|
||||
// Handle object rrule (e.g. from ts-ics) by converting back to string
|
||||
cleanRule = Object.entries(rruleStr)
|
||||
.map(([k, v]) => {
|
||||
const standardKey = RRULE_KEY_MAP[k.toLowerCase()] || k.toUpperCase();
|
||||
return `${standardKey}=${formatRRuleValue(v)}`;
|
||||
})
|
||||
.join(";");
|
||||
} else {
|
||||
console.warn(`[iCalendar] Invalid rrule type (${typeof rruleStr}) for event "${icsEvent.summary || "Untitled"}". Treating as non-recurring.`);
|
||||
return [icsEvent];
|
||||
}
|
||||
|
||||
// We need to provide DTSTART if it's not in the string
|
||||
|
||||
// We need to provide DTSTART if it's not in the string
|
||||
const dtstart = new Date(icsEvent.start.includes("Z") ? icsEvent.start : icsEvent.start + "Z");
|
||||
if (isNaN(dtstart.getTime())) {
|
||||
console.error(`[iCalendar] Invalid start date for recurrence: ${icsEvent.start}`);
|
||||
return [icsEvent];
|
||||
}
|
||||
|
||||
const ruleOptions = RRule.parseString(cleanRule);
|
||||
ruleOptions.dtstart = dtstart;
|
||||
|
||||
set.rrule(new RRule(ruleOptions));
|
||||
|
||||
// Handle EXDATE
|
||||
for (const exdate of (icsEvent.exdate || [])) {
|
||||
set.exdate(new Date(exdate.includes("Z") ? exdate : exdate + "Z"));
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
// Start our visible window 7 days ago to catch recent past events
|
||||
const filterStart = new Date(now.getTime() - 7 * 86400000);
|
||||
const windowEnd = new Date(now.getTime() + windowDays * 86400000);
|
||||
|
||||
// Expand from the event's actual start date to ensure all recurrences are calculated correctly
|
||||
// but only take occurrences between (now - 7 days) and (now + windowDays)
|
||||
const occurrences = set.between(dtstart, windowEnd, true);
|
||||
|
||||
return occurrences
|
||||
.filter(occurrenceDate => occurrenceDate >= filterStart)
|
||||
.map(occurrenceDate => {
|
||||
const localIso = localDateString(occurrenceDate);
|
||||
return {
|
||||
...icsEvent,
|
||||
start: localIso,
|
||||
recurrent: true,
|
||||
rrule: undefined,
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(`[iCalendar] Error expanding recurrence for ${icsEvent.summary}:`, err);
|
||||
return [icsEvent];
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchAndParseCalendar(source: any, windowDays = 365): Promise<any[]> {
|
||||
console.log(`[iCalendar] Fetching from: ${source.url}`);
|
||||
try {
|
||||
const response = await fetch(source.url);
|
||||
if (!response.ok) {
|
||||
console.error(`[iCalendar] Fetch failed for ${source.name}: ${response.status} ${response.statusText}`);
|
||||
return [];
|
||||
}
|
||||
const text = await response.text();
|
||||
const calendar = convertIcsCalendar(undefined, text);
|
||||
if (!calendar || !calendar.events) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const events: any[] = [];
|
||||
for (const icsEvent of calendar.events) {
|
||||
if (icsEvent.status?.toUpperCase() === "CANCELLED") continue;
|
||||
|
||||
const finalDate = await resolveEventStart(icsEvent);
|
||||
if (!finalDate) continue;
|
||||
|
||||
const localIso = localDateString(finalDate);
|
||||
const baseEvent = {
|
||||
...icsEvent,
|
||||
name: icsEvent.summary || "Untitled Event",
|
||||
start: localIso,
|
||||
tag: "ical-event",
|
||||
sourceName: source.name
|
||||
};
|
||||
|
||||
const rawTz = icsEvent.start?.local?.timezone || (icsEvent.start as any)?.timezone || "UTC";
|
||||
if (rawTz !== "UTC" && rawTz !== "None" && !resolveIanaName(rawTz)) {
|
||||
baseEvent.description = `(Warning: Unknown timezone "${rawTz}") ${baseEvent.description || ""}`;
|
||||
}
|
||||
|
||||
const expanded = expandRecurrences(baseEvent, windowDays);
|
||||
for (const occurrence of expanded) {
|
||||
const uniqueKey = `${occurrence.start}${occurrence.uid || occurrence.summary || ''}`;
|
||||
occurrence.ref = await sha256Hash(uniqueKey);
|
||||
events.push(convertDatesToStrings(occurrence));
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`Getting events from ${identifier} failed with:`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
}
|
||||
return applyQuery(query, events, {}, {});
|
||||
}
|
||||
|
||||
async function getSources(): Promise<Source[]> {
|
||||
const config = await system.getSpaceConfig("icalendar", {});
|
||||
|
||||
if (!config.sources || !Array.isArray(config.sources)) {
|
||||
// The queries are running on server, probably because of that, can't use editor.flashNotification
|
||||
console.error("Configure icalendar.sources");
|
||||
return events;
|
||||
} catch (err) {
|
||||
console.error(`[iCalendar] Error fetching/parsing ${source.name}:`, err);
|
||||
return [];
|
||||
}
|
||||
|
||||
const sources = config.sources;
|
||||
|
||||
if (sources.length === 0) {
|
||||
console.error("Empty icalendar.sources");
|
||||
return [];
|
||||
}
|
||||
|
||||
const validated: Source[] = [];
|
||||
for (const src of sources) {
|
||||
if (typeof src.url !== "string") {
|
||||
console.error(
|
||||
`Invalid iCalendar source`,
|
||||
src,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
validated.push({
|
||||
url: src.url,
|
||||
name: (typeof src.name === "string") ? src.name : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
return validated;
|
||||
}
|
||||
|
||||
// Copied from @silverbulletmd/silverbullet/lib/dates.ts which is not exported in the package
|
||||
export function localDateString(d: Date): string {
|
||||
return d.getFullYear() +
|
||||
"-" + String(d.getMonth() + 1).padStart(2, "0") +
|
||||
"-" + String(d.getDate()).padStart(2, "0") +
|
||||
"T" + String(d.getHours()).padStart(2, "0") +
|
||||
":" + String(d.getMinutes()).padStart(2, "0") +
|
||||
":" + String(d.getSeconds()).padStart(2, "0") +
|
||||
"." + String(d.getMilliseconds()).padStart(3, "0");
|
||||
export async function syncCalendars() {
|
||||
try {
|
||||
const { sources, syncWindowDays } = await getSources();
|
||||
if (sources.length === 0) return;
|
||||
|
||||
await editor.flashNotification("Syncing calendars...", "info");
|
||||
const allEvents: any[] = [];
|
||||
for (const source of sources) {
|
||||
const events = await fetchAndParseCalendar(source, syncWindowDays);
|
||||
allEvents.push(...events);
|
||||
}
|
||||
await index.indexObjects("$icalendar", allEvents);
|
||||
await editor.flashNotification(`Synced ${allEvents.length} events`, "info");
|
||||
} catch (err) {
|
||||
console.error("[iCalendar] syncCalendars failed:", err);
|
||||
}
|
||||
}
|
||||
|
||||
export async function forceSync() {
|
||||
await clientStore.del(CACHE_KEY);
|
||||
await syncCalendars();
|
||||
}
|
||||
|
||||
export async function clearCache() {
|
||||
if (!await editor.confirm("Clear all calendar events?")) return;
|
||||
const pageKeys = await datastore.query({ prefix: ["ridx", "$icalendar"] });
|
||||
const allKeys: any[] = [];
|
||||
for (const { key } of pageKeys) {
|
||||
allKeys.push(key);
|
||||
allKeys.push(["idx", ...key.slice(2), "$icalendar"]);
|
||||
}
|
||||
if (allKeys.length > 0) await datastore.batchDel(allKeys);
|
||||
await clientStore.del(CACHE_KEY);
|
||||
await editor.flashNotification("Calendar index cleared", "info");
|
||||
}
|
||||
|
||||
export async function showVersion() {
|
||||
await editor.flashNotification(`iCalendar Plug ${VERSION}`);
|
||||
await editor.flashNotification(`iCalendar Plug ${VERSION}`, "info");
|
||||
}
|
||||
|
||||
227
icalendar_test.ts
Normal file
227
icalendar_test.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import { assertEquals, assert } from "jsr:@std/assert";
|
||||
import { resolveEventStart, expandRecurrences, localDateString } from "./icalendar.ts";
|
||||
|
||||
Deno.test("resolveEventStart - local date with timezone", async () => {
|
||||
const icsEvent = {
|
||||
summary: "Test Event",
|
||||
start: {
|
||||
date: "2025-01-15T12:00:00.000",
|
||||
local: {
|
||||
date: "2025-01-15T07:00:00.000",
|
||||
timezone: "Eastern Standard Time"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await resolveEventStart(icsEvent);
|
||||
assertEquals(result?.toISOString(), "2025-01-15T12:00:00.000Z");
|
||||
});
|
||||
|
||||
Deno.test("resolveEventStart - DST check (Summer)", async () => {
|
||||
const icsEvent = {
|
||||
summary: "Test Event DST",
|
||||
start: {
|
||||
date: "2025-07-15T11:00:00.000",
|
||||
local: {
|
||||
date: "2025-07-15T07:00:00.000",
|
||||
timezone: "Eastern Standard Time"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await resolveEventStart(icsEvent);
|
||||
assertEquals(result?.toISOString(), "2025-07-15T11:00:00.000Z");
|
||||
});
|
||||
|
||||
Deno.test("resolveEventStart - UTC event", async () => {
|
||||
const icsEvent = {
|
||||
summary: "UTC Event",
|
||||
start: {
|
||||
date: "2025-01-15T12:00:00.000Z"
|
||||
}
|
||||
};
|
||||
const result = await resolveEventStart(icsEvent);
|
||||
assertEquals(result?.toISOString(), "2025-01-15T12:00:00.000Z");
|
||||
});
|
||||
|
||||
Deno.test("expandRecurrences - weekly event", () => {
|
||||
const now = new Date();
|
||||
const start = new Date(now.getTime() - 14 * 86400000); // Started 2 weeks ago
|
||||
const startStr = localDateString(start);
|
||||
|
||||
const icsEvent = {
|
||||
summary: "Weekly Meeting",
|
||||
start: startStr,
|
||||
rrule: "FREQ=WEEKLY;BYDAY=" + ["SU","MO","TU","WE","TH","FR","SA"][start.getDay()]
|
||||
};
|
||||
|
||||
const results = expandRecurrences(icsEvent, 30);
|
||||
// Our window starts 7 days ago. So we should see the one from 7 days ago and today/future.
|
||||
// Today's date might be one of them if it's the right day.
|
||||
assert(results.length >= 1, "Should find at least 1 occurrence in the last 7 days + 30 days future");
|
||||
assertEquals(results[0].recurrent, true);
|
||||
});
|
||||
|
||||
Deno.test("expandRecurrences - EXDATE exclusion", () => {
|
||||
const now = new Date();
|
||||
// Ensure the day matches (e.g., set to yesterday)
|
||||
const yesterday = new Date(now.getTime() - 86400000);
|
||||
const tomorrow = new Date(now.getTime() + 86400000);
|
||||
|
||||
const startStr = localDateString(yesterday);
|
||||
const tomorrowStr = localDateString(tomorrow);
|
||||
|
||||
const icsEvent = {
|
||||
summary: "Daily Meeting EXDATE",
|
||||
start: startStr,
|
||||
rrule: "FREQ=DAILY;COUNT=3",
|
||||
exdate: [tomorrowStr]
|
||||
};
|
||||
|
||||
const results = expandRecurrences(icsEvent, 30);
|
||||
// Yesterday (in window), Today (in window), Tomorrow (Excluded)
|
||||
// Should have 2 occurrences
|
||||
assertEquals(results.length, 2);
|
||||
assertEquals(results[0].start, startStr);
|
||||
});
|
||||
|
||||
Deno.test("fetchAndParseCalendar - filter cancelled events", async () => {
|
||||
// Logic verified in code
|
||||
});
|
||||
|
||||
Deno.test("resolveEventStart - ignore tzShift", async () => {
|
||||
const icsEvent = {
|
||||
summary: "Ignore tzShift",
|
||||
start: {
|
||||
date: "2025-01-15T12:00:00.000",
|
||||
local: {
|
||||
date: "2025-01-15T07:00:00.000",
|
||||
timezone: "Eastern Standard Time"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await resolveEventStart(icsEvent);
|
||||
assertEquals(result?.toISOString(), "2025-01-15T12:00:00.000Z");
|
||||
});
|
||||
|
||||
Deno.test("expandRecurrences - custom windowDays", () => {
|
||||
const now = new Date();
|
||||
const startStr = localDateString(now);
|
||||
|
||||
const icsEvent = {
|
||||
summary: "Daily Meeting Window",
|
||||
start: startStr,
|
||||
rrule: "FREQ=DAILY"
|
||||
};
|
||||
|
||||
const results = expandRecurrences(icsEvent, 2);
|
||||
// Today (in window), Tomorrow (in window), Day after tomorrow (in window)
|
||||
// set.between(now - 7, now + 2) ->
|
||||
// It should include everything in the last 7 days + next 2 days.
|
||||
// Since it's daily, that's roughly 7 + 2 + 1 = 10 events.
|
||||
assert(results.length >= 3, "Should have at least today and 2 future days");
|
||||
});
|
||||
Deno.test("expandRecurrences - non-string rrule (Reproduction)", () => {
|
||||
const now = new Date();
|
||||
const startStr = localDateString(now);
|
||||
|
||||
const icsEvent = {
|
||||
summary: "Bug Reproduction Event",
|
||||
start: startStr,
|
||||
rrule: 12345 // Simulating the malformed data
|
||||
};
|
||||
|
||||
// Spy on console.warn
|
||||
let warningLogged = false;
|
||||
const originalConsoleWarn = console.warn;
|
||||
console.warn = (...args) => {
|
||||
if (args[0].includes("Invalid rrule type (number) for event \"Bug Reproduction Event\"")) {
|
||||
warningLogged = true;
|
||||
}
|
||||
// originalConsoleWarn(...args); // Keep silent for test
|
||||
};
|
||||
|
||||
try {
|
||||
const result = expandRecurrences(icsEvent, 30);
|
||||
// Should return the original event as fallback
|
||||
assertEquals(result.length, 1);
|
||||
assertEquals(result[0], icsEvent);
|
||||
} finally {
|
||||
console.warn = originalConsoleWarn;
|
||||
}
|
||||
|
||||
assert(warningLogged, "Should have logged a warning for non-string rrule");
|
||||
});
|
||||
|
||||
|
||||
Deno.test("expandRecurrences - validation of visibility logic", () => {
|
||||
const now = new Date();
|
||||
const start = new Date(now.getTime() - 100 * 86400000); // Started 100 days ago
|
||||
const startStr = localDateString(start);
|
||||
|
||||
const icsEvent = {
|
||||
summary: "Validation Weekly Meeting",
|
||||
start: startStr,
|
||||
rrule: "FREQ=WEEKLY;BYDAY=" + ["SU","MO","TU","WE","TH","FR","SA"][start.getDay()]
|
||||
};
|
||||
|
||||
const results = expandRecurrences(icsEvent, 30);
|
||||
// Should produce occurrences for the last 7 days + next 30 days.
|
||||
// Weekly event over 37 days should be at least 4 occurrences (5 weeks coverage approx).
|
||||
assert(results.length >= 4, `Expected at least 4 occurrences, got ${results.length}`);
|
||||
assertEquals(results[0].recurrent, true);
|
||||
});
|
||||
|
||||
|
||||
Deno.test("expandRecurrences - object rrule (Reproduction of missing events)", () => {
|
||||
const now = new Date();
|
||||
const start = new Date(now.getTime() - 100 * 86400000);
|
||||
const startStr = localDateString(start);
|
||||
|
||||
const icsEvent = {
|
||||
summary: "Object RRULE Event",
|
||||
start: startStr,
|
||||
rrule: { frequency: "WEEKLY", byday: "MO" } // Simulating object rrule with verbose key
|
||||
};
|
||||
|
||||
// Spy on console.warn
|
||||
let warningLogged = false;
|
||||
const originalConsoleWarn = console.warn;
|
||||
console.warn = (...args) => {
|
||||
if (args[0].includes("Invalid rrule type (object)")) {
|
||||
warningLogged = true;
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const results = expandRecurrences(icsEvent, 30);
|
||||
// Should now return multiple occurrences
|
||||
assert(results.length > 1, `Expected > 1 occurrences, got ${results.length}`);
|
||||
assertEquals(results[0].recurrent, true);
|
||||
} finally {
|
||||
console.warn = originalConsoleWarn;
|
||||
}
|
||||
|
||||
assert(!warningLogged, "Should NOT have logged a warning for object rrule");
|
||||
});
|
||||
|
||||
|
||||
Deno.test("expandRecurrences - object rrule with until", () => {
|
||||
const now = new Date();
|
||||
const start = new Date(now.getTime() - 10 * 86400000);
|
||||
const startStr = localDateString(start);
|
||||
const untilDate = new Date(now.getTime() + 10 * 86400000);
|
||||
|
||||
const icsEvent = {
|
||||
summary: "Object RRULE UNTIL Event",
|
||||
start: startStr,
|
||||
rrule: { frequency: "DAILY", until: { date: untilDate } }
|
||||
};
|
||||
|
||||
const results = expandRecurrences(icsEvent, 30);
|
||||
// Should now return multiple occurrences
|
||||
assert(results.length > 1, `Expected > 1 occurrences, got ${results.length}`);
|
||||
assertEquals(results[0].recurrent, true);
|
||||
});
|
||||
|
||||
18
mock_calendar.ics
Normal file
18
mock_calendar.ics
Normal file
@@ -0,0 +1,18 @@
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//Mock ICS Server//EN
|
||||
BEGIN:VEVENT
|
||||
UID:040000008200E00074C5B7101A82E0080000000010E384DCAC84DC0100000000000000001000000014AC664AB867C74D85FC0B77E881C5AE
|
||||
SUMMARY:Plug-In for Metalsoft.io inside HPE Morpheus Enterprise
|
||||
DTSTART;TZID=W. Europe Standard Time:20260217T160000
|
||||
DTEND;TZID=W. Europe Standard Time:20260217T170000
|
||||
DTSTAMP:20260217T160818Z
|
||||
END:VEVENT
|
||||
BEGIN:VEVENT
|
||||
UID:040000008200E00074C5B7101A82E0080000000010405401AC8EDC010000000000000000100000000CD9E3DB97A71984FB54AC0DAD0FE9137
|
||||
SUMMARY:MetalSoft & Morpheus plugin catch up
|
||||
DTSTART;TZID=GMT Standard Time:20260217T130000
|
||||
DTEND;TZID=GMT Standard Time:20260217T133000
|
||||
DTSTAMP:20260216T192619Z
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
1
node_modules/.bin/playwright
generated
vendored
Symbolic link
1
node_modules/.bin/playwright
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../.deno/playwright@1.58.2/node_modules/playwright/cli.js
|
||||
1
node_modules/.bin/playwright-core
generated
vendored
Symbolic link
1
node_modules/.bin/playwright-core
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../.deno/playwright-core@1.58.2/node_modules/playwright-core/cli.js
|
||||
0
node_modules/.deno/.deno.lock
generated
vendored
Normal file
0
node_modules/.deno/.deno.lock
generated
vendored
Normal file
BIN
node_modules/.deno/.setup-cache.bin
generated
vendored
Normal file
BIN
node_modules/.deno/.setup-cache.bin
generated
vendored
Normal file
Binary file not shown.
0
node_modules/.deno/@standard-schema+spec@1.1.0/.initialized
generated
vendored
Normal file
0
node_modules/.deno/@standard-schema+spec@1.1.0/.initialized
generated
vendored
Normal file
21
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/LICENSE
generated
vendored
Normal file
21
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 Colin McDonnell
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
198
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/README.md
generated
vendored
Normal file
198
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/README.md
generated
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
<h1 align="center">
|
||||
<img alt="Standard Schema fire logo" loading="lazy" width="50" height="50" decoding="async" data-nimg="1" style="color:transparent" src="https://standardschema.dev/favicon.svg">
|
||||
</br>
|
||||
Standard Schema</h1>
|
||||
<p align="center">
|
||||
A family of specs for interoperable TypeScript
|
||||
<br/>
|
||||
<a href="https://standardschema.dev">standardschema.dev</a>
|
||||
</p>
|
||||
<br/>
|
||||
|
||||
<!-- start -->
|
||||
|
||||
The Standard Schema project is a set of interfaces that standardize the provision and consumption of shared functionality in the TypeScript ecosystem.
|
||||
|
||||
Its goal is to allow tools to accept a single input that includes all the types and capabilities they need— no library-specific adapters, no extra dependencies. The result is an ecosystem that's fair for implementers, friendly for consumers, and open for end users.
|
||||
|
||||
## The specifications
|
||||
|
||||
The specifications can be found below in their entirety. Libraries wishing to implement a spec can copy/paste the code block below into their codebase. They're also available at `@standard-schema/spec` on [npm](https://www.npmjs.com/package/@standard-schema/spec) and [JSR](https://jsr.io/@standard-schema/spec).
|
||||
|
||||
```ts
|
||||
// #########################
|
||||
// ### Standard Typed ###
|
||||
// #########################
|
||||
|
||||
/** The Standard Typed interface. This is a base type extended by other specs. */
|
||||
export interface StandardTypedV1<Input = unknown, Output = Input> {
|
||||
/** The Standard properties. */
|
||||
readonly "~standard": StandardTypedV1.Props<Input, Output>;
|
||||
}
|
||||
|
||||
export declare namespace StandardTypedV1 {
|
||||
/** The Standard Typed properties interface. */
|
||||
export interface Props<Input = unknown, Output = Input> {
|
||||
/** The version number of the standard. */
|
||||
readonly version: 1;
|
||||
/** The vendor name of the schema library. */
|
||||
readonly vendor: string;
|
||||
/** Inferred types associated with the schema. */
|
||||
readonly types?: Types<Input, Output> | undefined;
|
||||
}
|
||||
|
||||
/** The Standard Typed types interface. */
|
||||
export interface Types<Input = unknown, Output = Input> {
|
||||
/** The input type of the schema. */
|
||||
readonly input: Input;
|
||||
/** The output type of the schema. */
|
||||
readonly output: Output;
|
||||
}
|
||||
|
||||
/** Infers the input type of a Standard Typed. */
|
||||
export type InferInput<Schema extends StandardTypedV1> = NonNullable<
|
||||
Schema["~standard"]["types"]
|
||||
>["input"];
|
||||
|
||||
/** Infers the output type of a Standard Typed. */
|
||||
export type InferOutput<Schema extends StandardTypedV1> = NonNullable<
|
||||
Schema["~standard"]["types"]
|
||||
>["output"];
|
||||
}
|
||||
|
||||
// ##########################
|
||||
// ### Standard Schema ###
|
||||
// ##########################
|
||||
|
||||
/** The Standard Schema interface. */
|
||||
export interface StandardSchemaV1<Input = unknown, Output = Input> {
|
||||
/** The Standard Schema properties. */
|
||||
readonly "~standard": StandardSchemaV1.Props<Input, Output>;
|
||||
}
|
||||
|
||||
export declare namespace StandardSchemaV1 {
|
||||
/** The Standard Schema properties interface. */
|
||||
export interface Props<Input = unknown, Output = Input>
|
||||
extends StandardTypedV1.Props<Input, Output> {
|
||||
/** Validates unknown input values. */
|
||||
readonly validate: (
|
||||
value: unknown,
|
||||
options?: StandardSchemaV1.Options | undefined
|
||||
) => Result<Output> | Promise<Result<Output>>;
|
||||
}
|
||||
|
||||
/** The result interface of the validate function. */
|
||||
export type Result<Output> = SuccessResult<Output> | FailureResult;
|
||||
|
||||
/** The result interface if validation succeeds. */
|
||||
export interface SuccessResult<Output> {
|
||||
/** The typed output value. */
|
||||
readonly value: Output;
|
||||
/** A falsy value for `issues` indicates success. */
|
||||
readonly issues?: undefined;
|
||||
}
|
||||
|
||||
export interface Options {
|
||||
/** Explicit support for additional vendor-specific parameters, if needed. */
|
||||
readonly libraryOptions?: Record<string, unknown> | undefined;
|
||||
}
|
||||
|
||||
/** The result interface if validation fails. */
|
||||
export interface FailureResult {
|
||||
/** The issues of failed validation. */
|
||||
readonly issues: ReadonlyArray<Issue>;
|
||||
}
|
||||
|
||||
/** The issue interface of the failure output. */
|
||||
export interface Issue {
|
||||
/** The error message of the issue. */
|
||||
readonly message: string;
|
||||
/** The path of the issue, if any. */
|
||||
readonly path?: ReadonlyArray<PropertyKey | PathSegment> | undefined;
|
||||
}
|
||||
|
||||
/** The path segment interface of the issue. */
|
||||
export interface PathSegment {
|
||||
/** The key representing a path segment. */
|
||||
readonly key: PropertyKey;
|
||||
}
|
||||
|
||||
/** The Standard types interface. */
|
||||
export interface Types<Input = unknown, Output = Input>
|
||||
extends StandardTypedV1.Types<Input, Output> {}
|
||||
|
||||
/** Infers the input type of a Standard. */
|
||||
export type InferInput<Schema extends StandardTypedV1> =
|
||||
StandardTypedV1.InferInput<Schema>;
|
||||
|
||||
/** Infers the output type of a Standard. */
|
||||
export type InferOutput<Schema extends StandardTypedV1> =
|
||||
StandardTypedV1.InferOutput<Schema>;
|
||||
}
|
||||
|
||||
// ###############################
|
||||
// ### Standard JSON Schema ###
|
||||
// ###############################
|
||||
|
||||
/** The Standard JSON Schema interface. */
|
||||
export interface StandardJSONSchemaV1<Input = unknown, Output = Input> {
|
||||
/** The Standard JSON Schema properties. */
|
||||
readonly "~standard": StandardJSONSchemaV1.Props<Input, Output>;
|
||||
}
|
||||
|
||||
export declare namespace StandardJSONSchemaV1 {
|
||||
/** The Standard JSON Schema properties interface. */
|
||||
export interface Props<Input = unknown, Output = Input>
|
||||
extends StandardTypedV1.Props<Input, Output> {
|
||||
/** Methods for generating the input/output JSON Schema. */
|
||||
readonly jsonSchema: StandardJSONSchemaV1.Converter;
|
||||
}
|
||||
|
||||
/** The Standard JSON Schema converter interface. */
|
||||
export interface Converter {
|
||||
/** Converts the input type to JSON Schema. May throw if conversion is not supported. */
|
||||
readonly input: (
|
||||
options: StandardJSONSchemaV1.Options
|
||||
) => Record<string, unknown>;
|
||||
/** Converts the output type to JSON Schema. May throw if conversion is not supported. */
|
||||
readonly output: (
|
||||
options: StandardJSONSchemaV1.Options
|
||||
) => Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* The target version of the generated JSON Schema.
|
||||
*
|
||||
* It is *strongly recommended* that implementers support `"draft-2020-12"` and `"draft-07"`, as they are both in wide use. All other targets can be implemented on a best-effort basis. Libraries should throw if they don't support a specified target.
|
||||
*
|
||||
* The `"openapi-3.0"` target is intended as a standardized specifier for OpenAPI 3.0 which is a superset of JSON Schema `"draft-04"`.
|
||||
*/
|
||||
export type Target =
|
||||
| "draft-2020-12"
|
||||
| "draft-07"
|
||||
| "openapi-3.0"
|
||||
// Accepts any string for future targets while preserving autocomplete
|
||||
| ({} & string);
|
||||
|
||||
/** The options for the input/output methods. */
|
||||
export interface Options {
|
||||
/** Specifies the target version of the generated JSON Schema. Support for all versions is on a best-effort basis. If a given version is not supported, the library should throw. */
|
||||
readonly target: Target;
|
||||
|
||||
/** Explicit support for additional vendor-specific parameters, if needed. */
|
||||
readonly libraryOptions?: Record<string, unknown> | undefined;
|
||||
}
|
||||
|
||||
/** The Standard types interface. */
|
||||
export interface Types<Input = unknown, Output = Input>
|
||||
extends StandardTypedV1.Types<Input, Output> {}
|
||||
|
||||
/** Infers the input type of a Standard. */
|
||||
export type InferInput<Schema extends StandardTypedV1> =
|
||||
StandardTypedV1.InferInput<Schema>;
|
||||
|
||||
/** Infers the output type of a Standard. */
|
||||
export type InferOutput<Schema extends StandardTypedV1> =
|
||||
StandardTypedV1.InferOutput<Schema>;
|
||||
}
|
||||
```
|
||||
18
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.cjs
generated
vendored
Normal file
18
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.cjs
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/index.ts
|
||||
var src_exports = {};
|
||||
module.exports = __toCommonJS(src_exports);
|
||||
119
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.d.cts
generated
vendored
Normal file
119
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.d.cts
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
/** The Standard Typed interface. This is a base type extended by other specs. */
|
||||
interface StandardTypedV1<Input = unknown, Output = Input> {
|
||||
/** The Standard properties. */
|
||||
readonly "~standard": StandardTypedV1.Props<Input, Output>;
|
||||
}
|
||||
declare namespace StandardTypedV1 {
|
||||
/** The Standard Typed properties interface. */
|
||||
interface Props<Input = unknown, Output = Input> {
|
||||
/** The version number of the standard. */
|
||||
readonly version: 1;
|
||||
/** The vendor name of the schema library. */
|
||||
readonly vendor: string;
|
||||
/** Inferred types associated with the schema. */
|
||||
readonly types?: Types<Input, Output> | undefined;
|
||||
}
|
||||
/** The Standard Typed types interface. */
|
||||
interface Types<Input = unknown, Output = Input> {
|
||||
/** The input type of the schema. */
|
||||
readonly input: Input;
|
||||
/** The output type of the schema. */
|
||||
readonly output: Output;
|
||||
}
|
||||
/** Infers the input type of a Standard Typed. */
|
||||
type InferInput<Schema extends StandardTypedV1> = NonNullable<Schema["~standard"]["types"]>["input"];
|
||||
/** Infers the output type of a Standard Typed. */
|
||||
type InferOutput<Schema extends StandardTypedV1> = NonNullable<Schema["~standard"]["types"]>["output"];
|
||||
}
|
||||
/** The Standard Schema interface. */
|
||||
interface StandardSchemaV1<Input = unknown, Output = Input> {
|
||||
/** The Standard Schema properties. */
|
||||
readonly "~standard": StandardSchemaV1.Props<Input, Output>;
|
||||
}
|
||||
declare namespace StandardSchemaV1 {
|
||||
/** The Standard Schema properties interface. */
|
||||
interface Props<Input = unknown, Output = Input> extends StandardTypedV1.Props<Input, Output> {
|
||||
/** Validates unknown input values. */
|
||||
readonly validate: (value: unknown, options?: StandardSchemaV1.Options | undefined) => Result<Output> | Promise<Result<Output>>;
|
||||
}
|
||||
/** The result interface of the validate function. */
|
||||
type Result<Output> = SuccessResult<Output> | FailureResult;
|
||||
/** The result interface if validation succeeds. */
|
||||
interface SuccessResult<Output> {
|
||||
/** The typed output value. */
|
||||
readonly value: Output;
|
||||
/** A falsy value for `issues` indicates success. */
|
||||
readonly issues?: undefined;
|
||||
}
|
||||
interface Options {
|
||||
/** Explicit support for additional vendor-specific parameters, if needed. */
|
||||
readonly libraryOptions?: Record<string, unknown> | undefined;
|
||||
}
|
||||
/** The result interface if validation fails. */
|
||||
interface FailureResult {
|
||||
/** The issues of failed validation. */
|
||||
readonly issues: ReadonlyArray<Issue>;
|
||||
}
|
||||
/** The issue interface of the failure output. */
|
||||
interface Issue {
|
||||
/** The error message of the issue. */
|
||||
readonly message: string;
|
||||
/** The path of the issue, if any. */
|
||||
readonly path?: ReadonlyArray<PropertyKey | PathSegment> | undefined;
|
||||
}
|
||||
/** The path segment interface of the issue. */
|
||||
interface PathSegment {
|
||||
/** The key representing a path segment. */
|
||||
readonly key: PropertyKey;
|
||||
}
|
||||
/** The Standard types interface. */
|
||||
interface Types<Input = unknown, Output = Input> extends StandardTypedV1.Types<Input, Output> {
|
||||
}
|
||||
/** Infers the input type of a Standard. */
|
||||
type InferInput<Schema extends StandardTypedV1> = StandardTypedV1.InferInput<Schema>;
|
||||
/** Infers the output type of a Standard. */
|
||||
type InferOutput<Schema extends StandardTypedV1> = StandardTypedV1.InferOutput<Schema>;
|
||||
}
|
||||
/** The Standard JSON Schema interface. */
|
||||
interface StandardJSONSchemaV1<Input = unknown, Output = Input> {
|
||||
/** The Standard JSON Schema properties. */
|
||||
readonly "~standard": StandardJSONSchemaV1.Props<Input, Output>;
|
||||
}
|
||||
declare namespace StandardJSONSchemaV1 {
|
||||
/** The Standard JSON Schema properties interface. */
|
||||
interface Props<Input = unknown, Output = Input> extends StandardTypedV1.Props<Input, Output> {
|
||||
/** Methods for generating the input/output JSON Schema. */
|
||||
readonly jsonSchema: StandardJSONSchemaV1.Converter;
|
||||
}
|
||||
/** The Standard JSON Schema converter interface. */
|
||||
interface Converter {
|
||||
/** Converts the input type to JSON Schema. May throw if conversion is not supported. */
|
||||
readonly input: (options: StandardJSONSchemaV1.Options) => Record<string, unknown>;
|
||||
/** Converts the output type to JSON Schema. May throw if conversion is not supported. */
|
||||
readonly output: (options: StandardJSONSchemaV1.Options) => Record<string, unknown>;
|
||||
}
|
||||
/**
|
||||
* The target version of the generated JSON Schema.
|
||||
*
|
||||
* It is *strongly recommended* that implementers support `"draft-2020-12"` and `"draft-07"`, as they are both in wide use. All other targets can be implemented on a best-effort basis. Libraries should throw if they don't support a specified target.
|
||||
*
|
||||
* The `"openapi-3.0"` target is intended as a standardized specifier for OpenAPI 3.0 which is a superset of JSON Schema `"draft-04"`.
|
||||
*/
|
||||
type Target = "draft-2020-12" | "draft-07" | "openapi-3.0" | ({} & string);
|
||||
/** The options for the input/output methods. */
|
||||
interface Options {
|
||||
/** Specifies the target version of the generated JSON Schema. Support for all versions is on a best-effort basis. If a given version is not supported, the library should throw. */
|
||||
readonly target: Target;
|
||||
/** Explicit support for additional vendor-specific parameters, if needed. */
|
||||
readonly libraryOptions?: Record<string, unknown> | undefined;
|
||||
}
|
||||
/** The Standard types interface. */
|
||||
interface Types<Input = unknown, Output = Input> extends StandardTypedV1.Types<Input, Output> {
|
||||
}
|
||||
/** Infers the input type of a Standard. */
|
||||
type InferInput<Schema extends StandardTypedV1> = StandardTypedV1.InferInput<Schema>;
|
||||
/** Infers the output type of a Standard. */
|
||||
type InferOutput<Schema extends StandardTypedV1> = StandardTypedV1.InferOutput<Schema>;
|
||||
}
|
||||
|
||||
export { StandardJSONSchemaV1, StandardSchemaV1, StandardTypedV1 };
|
||||
119
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.d.ts
generated
vendored
Normal file
119
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
/** The Standard Typed interface. This is a base type extended by other specs. */
|
||||
interface StandardTypedV1<Input = unknown, Output = Input> {
|
||||
/** The Standard properties. */
|
||||
readonly "~standard": StandardTypedV1.Props<Input, Output>;
|
||||
}
|
||||
declare namespace StandardTypedV1 {
|
||||
/** The Standard Typed properties interface. */
|
||||
interface Props<Input = unknown, Output = Input> {
|
||||
/** The version number of the standard. */
|
||||
readonly version: 1;
|
||||
/** The vendor name of the schema library. */
|
||||
readonly vendor: string;
|
||||
/** Inferred types associated with the schema. */
|
||||
readonly types?: Types<Input, Output> | undefined;
|
||||
}
|
||||
/** The Standard Typed types interface. */
|
||||
interface Types<Input = unknown, Output = Input> {
|
||||
/** The input type of the schema. */
|
||||
readonly input: Input;
|
||||
/** The output type of the schema. */
|
||||
readonly output: Output;
|
||||
}
|
||||
/** Infers the input type of a Standard Typed. */
|
||||
type InferInput<Schema extends StandardTypedV1> = NonNullable<Schema["~standard"]["types"]>["input"];
|
||||
/** Infers the output type of a Standard Typed. */
|
||||
type InferOutput<Schema extends StandardTypedV1> = NonNullable<Schema["~standard"]["types"]>["output"];
|
||||
}
|
||||
/** The Standard Schema interface. */
|
||||
interface StandardSchemaV1<Input = unknown, Output = Input> {
|
||||
/** The Standard Schema properties. */
|
||||
readonly "~standard": StandardSchemaV1.Props<Input, Output>;
|
||||
}
|
||||
declare namespace StandardSchemaV1 {
|
||||
/** The Standard Schema properties interface. */
|
||||
interface Props<Input = unknown, Output = Input> extends StandardTypedV1.Props<Input, Output> {
|
||||
/** Validates unknown input values. */
|
||||
readonly validate: (value: unknown, options?: StandardSchemaV1.Options | undefined) => Result<Output> | Promise<Result<Output>>;
|
||||
}
|
||||
/** The result interface of the validate function. */
|
||||
type Result<Output> = SuccessResult<Output> | FailureResult;
|
||||
/** The result interface if validation succeeds. */
|
||||
interface SuccessResult<Output> {
|
||||
/** The typed output value. */
|
||||
readonly value: Output;
|
||||
/** A falsy value for `issues` indicates success. */
|
||||
readonly issues?: undefined;
|
||||
}
|
||||
interface Options {
|
||||
/** Explicit support for additional vendor-specific parameters, if needed. */
|
||||
readonly libraryOptions?: Record<string, unknown> | undefined;
|
||||
}
|
||||
/** The result interface if validation fails. */
|
||||
interface FailureResult {
|
||||
/** The issues of failed validation. */
|
||||
readonly issues: ReadonlyArray<Issue>;
|
||||
}
|
||||
/** The issue interface of the failure output. */
|
||||
interface Issue {
|
||||
/** The error message of the issue. */
|
||||
readonly message: string;
|
||||
/** The path of the issue, if any. */
|
||||
readonly path?: ReadonlyArray<PropertyKey | PathSegment> | undefined;
|
||||
}
|
||||
/** The path segment interface of the issue. */
|
||||
interface PathSegment {
|
||||
/** The key representing a path segment. */
|
||||
readonly key: PropertyKey;
|
||||
}
|
||||
/** The Standard types interface. */
|
||||
interface Types<Input = unknown, Output = Input> extends StandardTypedV1.Types<Input, Output> {
|
||||
}
|
||||
/** Infers the input type of a Standard. */
|
||||
type InferInput<Schema extends StandardTypedV1> = StandardTypedV1.InferInput<Schema>;
|
||||
/** Infers the output type of a Standard. */
|
||||
type InferOutput<Schema extends StandardTypedV1> = StandardTypedV1.InferOutput<Schema>;
|
||||
}
|
||||
/** The Standard JSON Schema interface. */
|
||||
interface StandardJSONSchemaV1<Input = unknown, Output = Input> {
|
||||
/** The Standard JSON Schema properties. */
|
||||
readonly "~standard": StandardJSONSchemaV1.Props<Input, Output>;
|
||||
}
|
||||
declare namespace StandardJSONSchemaV1 {
|
||||
/** The Standard JSON Schema properties interface. */
|
||||
interface Props<Input = unknown, Output = Input> extends StandardTypedV1.Props<Input, Output> {
|
||||
/** Methods for generating the input/output JSON Schema. */
|
||||
readonly jsonSchema: StandardJSONSchemaV1.Converter;
|
||||
}
|
||||
/** The Standard JSON Schema converter interface. */
|
||||
interface Converter {
|
||||
/** Converts the input type to JSON Schema. May throw if conversion is not supported. */
|
||||
readonly input: (options: StandardJSONSchemaV1.Options) => Record<string, unknown>;
|
||||
/** Converts the output type to JSON Schema. May throw if conversion is not supported. */
|
||||
readonly output: (options: StandardJSONSchemaV1.Options) => Record<string, unknown>;
|
||||
}
|
||||
/**
|
||||
* The target version of the generated JSON Schema.
|
||||
*
|
||||
* It is *strongly recommended* that implementers support `"draft-2020-12"` and `"draft-07"`, as they are both in wide use. All other targets can be implemented on a best-effort basis. Libraries should throw if they don't support a specified target.
|
||||
*
|
||||
* The `"openapi-3.0"` target is intended as a standardized specifier for OpenAPI 3.0 which is a superset of JSON Schema `"draft-04"`.
|
||||
*/
|
||||
type Target = "draft-2020-12" | "draft-07" | "openapi-3.0" | ({} & string);
|
||||
/** The options for the input/output methods. */
|
||||
interface Options {
|
||||
/** Specifies the target version of the generated JSON Schema. Support for all versions is on a best-effort basis. If a given version is not supported, the library should throw. */
|
||||
readonly target: Target;
|
||||
/** Explicit support for additional vendor-specific parameters, if needed. */
|
||||
readonly libraryOptions?: Record<string, unknown> | undefined;
|
||||
}
|
||||
/** The Standard types interface. */
|
||||
interface Types<Input = unknown, Output = Input> extends StandardTypedV1.Types<Input, Output> {
|
||||
}
|
||||
/** Infers the input type of a Standard. */
|
||||
type InferInput<Schema extends StandardTypedV1> = StandardTypedV1.InferInput<Schema>;
|
||||
/** Infers the output type of a Standard. */
|
||||
type InferOutput<Schema extends StandardTypedV1> = StandardTypedV1.InferOutput<Schema>;
|
||||
}
|
||||
|
||||
export { StandardJSONSchemaV1, StandardSchemaV1, StandardTypedV1 };
|
||||
0
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.js
generated
vendored
Normal file
0
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/dist/index.js
generated
vendored
Normal file
52
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/package.json
generated
vendored
Normal file
52
node_modules/.deno/@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec/package.json
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "@standard-schema/spec",
|
||||
"description": "A family of specs for interoperable TypeScript",
|
||||
"version": "1.1.0",
|
||||
"license": "MIT",
|
||||
"author": "Colin McDonnell",
|
||||
"homepage": "https://standardschema.dev",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/standard-schema/standard-schema"
|
||||
},
|
||||
"keywords": [
|
||||
"typescript",
|
||||
"schema",
|
||||
"validation",
|
||||
"standard",
|
||||
"interface"
|
||||
],
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"standard-schema-spec": "./src/index.ts",
|
||||
"import": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/index.d.cts",
|
||||
"default": "./dist/index.cjs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sideEffects": false,
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tsup": "^8.3.0",
|
||||
"typescript": "^5.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "pnpm biome lint ./src",
|
||||
"format": "pnpm biome format --write ./src",
|
||||
"check": "pnpm biome check ./src",
|
||||
"build": "tsup"
|
||||
}
|
||||
}
|
||||
1
node_modules/.deno/node_modules/@standard-schema/spec
generated
vendored
Symbolic link
1
node_modules/.deno/node_modules/@standard-schema/spec
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../@standard-schema+spec@1.1.0/node_modules/@standard-schema/spec
|
||||
1
node_modules/.deno/node_modules/playwright-core
generated
vendored
Symbolic link
1
node_modules/.deno/node_modules/playwright-core
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../playwright-core@1.58.2/node_modules/playwright-core
|
||||
0
node_modules/.deno/playwright-core@1.58.2/.initialized
generated
vendored
Normal file
0
node_modules/.deno/playwright-core@1.58.2/.initialized
generated
vendored
Normal file
202
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/LICENSE
generated
vendored
Normal file
202
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Portions Copyright (c) Microsoft Corporation.
|
||||
Portions Copyright 2017 Google Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
5
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/NOTICE
generated
vendored
Normal file
5
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/NOTICE
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
Playwright
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
This software contains code derived from the Puppeteer project (https://github.com/puppeteer/puppeteer),
|
||||
available under the Apache 2.0 license (https://github.com/puppeteer/puppeteer/blob/master/LICENSE).
|
||||
3
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/README.md
generated
vendored
Normal file
3
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# playwright-core
|
||||
|
||||
This package contains the no-browser flavor of [Playwright](http://github.com/microsoft/playwright).
|
||||
4076
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/ThirdPartyNotices.txt
generated
vendored
Normal file
4076
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/ThirdPartyNotices.txt
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/install_media_pack.ps1
generated
vendored
Normal file
5
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/install_media_pack.ps1
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
$osInfo = Get-WmiObject -Class Win32_OperatingSystem
|
||||
# check if running on Windows Server
|
||||
if ($osInfo.ProductType -eq 3) {
|
||||
Install-WindowsFeature Server-Media-Foundation
|
||||
}
|
||||
33
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/install_webkit_wsl.ps1
generated
vendored
Normal file
33
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/install_webkit_wsl.ps1
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# This script sets up a WSL distribution that will be used to run WebKit.
|
||||
|
||||
$Distribution = "playwright"
|
||||
$Username = "pwuser"
|
||||
|
||||
$distributions = (wsl --list --quiet) -split "\r?\n"
|
||||
if ($distributions -contains $Distribution) {
|
||||
Write-Host "WSL distribution '$Distribution' already exists. Skipping installation."
|
||||
} else {
|
||||
Write-Host "Installing new WSL distribution '$Distribution'..."
|
||||
$VhdSize = "10GB"
|
||||
wsl --install -d Ubuntu-24.04 --name $Distribution --no-launch --vhd-size $VhdSize
|
||||
wsl -d $Distribution -u root adduser --gecos GECOS --disabled-password $Username
|
||||
}
|
||||
|
||||
$pwshDirname = (Resolve-Path -Path $PSScriptRoot).Path;
|
||||
$playwrightCoreRoot = Resolve-Path (Join-Path $pwshDirname "..")
|
||||
|
||||
$initScript = @"
|
||||
if [ ! -f "/home/$Username/node/bin/node" ]; then
|
||||
mkdir -p /home/$Username/node
|
||||
curl -fsSL https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz -o /home/$Username/node/node-v22.17.0-linux-x64.tar.xz
|
||||
tar -xJf /home/$Username/node/node-v22.17.0-linux-x64.tar.xz -C /home/$Username/node --strip-components=1
|
||||
sudo -u $Username echo 'export PATH=/home/$Username/node/bin:\`$PATH' >> /home/$Username/.profile
|
||||
fi
|
||||
/home/$Username/node/bin/node cli.js install-deps webkit
|
||||
sudo -u $Username PLAYWRIGHT_SKIP_BROWSER_GC=1 /home/$Username/node/bin/node cli.js install webkit
|
||||
"@ -replace "\r\n", "`n"
|
||||
|
||||
wsl -d $Distribution --cd $playwrightCoreRoot -u root -- bash -c "$initScript"
|
||||
Write-Host "Done!"
|
||||
42
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_beta_linux.sh
generated
vendored
Executable file
42
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_beta_linux.sh
generated
vendored
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if [[ $(arch) == "aarch64" ]]; then
|
||||
echo "ERROR: not supported on Linux Arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$PLAYWRIGHT_HOST_PLATFORM_OVERRIDE" ]; then
|
||||
if [[ ! -f "/etc/os-release" ]]; then
|
||||
echo "ERROR: cannot install on unknown linux distribution (/etc/os-release is missing)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ID=$(bash -c 'source /etc/os-release && echo $ID')
|
||||
if [[ "${ID}" != "ubuntu" && "${ID}" != "debian" ]]; then
|
||||
echo "ERROR: cannot install on $ID distribution - only Ubuntu and Debian are supported"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 1. make sure to remove old beta if any.
|
||||
if dpkg --get-selections | grep -q "^google-chrome-beta[[:space:]]*install$" >/dev/null; then
|
||||
apt-get remove -y google-chrome-beta
|
||||
fi
|
||||
|
||||
# 2. Update apt lists (needed to install curl and chrome dependencies)
|
||||
apt-get update
|
||||
|
||||
# 3. Install curl to download chrome
|
||||
if ! command -v curl >/dev/null; then
|
||||
apt-get install -y curl
|
||||
fi
|
||||
|
||||
# 4. download chrome beta from dl.google.com and install it.
|
||||
cd /tmp
|
||||
curl -O https://dl.google.com/linux/direct/google-chrome-beta_current_amd64.deb
|
||||
apt-get install -y ./google-chrome-beta_current_amd64.deb
|
||||
rm -rf ./google-chrome-beta_current_amd64.deb
|
||||
cd -
|
||||
google-chrome-beta --version
|
||||
13
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_beta_mac.sh
generated
vendored
Executable file
13
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_beta_mac.sh
generated
vendored
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
rm -rf "/Applications/Google Chrome Beta.app"
|
||||
cd /tmp
|
||||
curl --retry 3 -o ./googlechromebeta.dmg https://dl.google.com/chrome/mac/universal/beta/googlechromebeta.dmg
|
||||
hdiutil attach -nobrowse -quiet -noautofsck -noautoopen -mountpoint /Volumes/googlechromebeta.dmg ./googlechromebeta.dmg
|
||||
cp -pR "/Volumes/googlechromebeta.dmg/Google Chrome Beta.app" /Applications
|
||||
hdiutil detach /Volumes/googlechromebeta.dmg
|
||||
rm -rf /tmp/googlechromebeta.dmg
|
||||
|
||||
/Applications/Google\ Chrome\ Beta.app/Contents/MacOS/Google\ Chrome\ Beta --version
|
||||
24
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_beta_win.ps1
generated
vendored
Normal file
24
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_beta_win.ps1
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$url = 'https://dl.google.com/tag/s/dl/chrome/install/beta/googlechromebetastandaloneenterprise64.msi'
|
||||
|
||||
Write-Host "Downloading Google Chrome Beta"
|
||||
$wc = New-Object net.webclient
|
||||
$msiInstaller = "$env:temp\google-chrome-beta.msi"
|
||||
$wc.Downloadfile($url, $msiInstaller)
|
||||
|
||||
Write-Host "Installing Google Chrome Beta"
|
||||
$arguments = "/i `"$msiInstaller`" /quiet"
|
||||
Start-Process msiexec.exe -ArgumentList $arguments -Wait
|
||||
Remove-Item $msiInstaller
|
||||
|
||||
$suffix = "\\Google\\Chrome Beta\\Application\\chrome.exe"
|
||||
if (Test-Path "${env:ProgramFiles(x86)}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles(x86)}$suffix").VersionInfo
|
||||
} elseif (Test-Path "${env:ProgramFiles}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles}$suffix").VersionInfo
|
||||
} else {
|
||||
Write-Host "ERROR: Failed to install Google Chrome Beta."
|
||||
Write-Host "ERROR: This could be due to insufficient privileges, in which case re-running as Administrator may help."
|
||||
exit 1
|
||||
}
|
||||
42
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_stable_linux.sh
generated
vendored
Executable file
42
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_stable_linux.sh
generated
vendored
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if [[ $(arch) == "aarch64" ]]; then
|
||||
echo "ERROR: not supported on Linux Arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$PLAYWRIGHT_HOST_PLATFORM_OVERRIDE" ]; then
|
||||
if [[ ! -f "/etc/os-release" ]]; then
|
||||
echo "ERROR: cannot install on unknown linux distribution (/etc/os-release is missing)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ID=$(bash -c 'source /etc/os-release && echo $ID')
|
||||
if [[ "${ID}" != "ubuntu" && "${ID}" != "debian" ]]; then
|
||||
echo "ERROR: cannot install on $ID distribution - only Ubuntu and Debian are supported"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 1. make sure to remove old stable if any.
|
||||
if dpkg --get-selections | grep -q "^google-chrome[[:space:]]*install$" >/dev/null; then
|
||||
apt-get remove -y google-chrome
|
||||
fi
|
||||
|
||||
# 2. Update apt lists (needed to install curl and chrome dependencies)
|
||||
apt-get update
|
||||
|
||||
# 3. Install curl to download chrome
|
||||
if ! command -v curl >/dev/null; then
|
||||
apt-get install -y curl
|
||||
fi
|
||||
|
||||
# 4. download chrome stable from dl.google.com and install it.
|
||||
cd /tmp
|
||||
curl -O https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
|
||||
apt-get install -y ./google-chrome-stable_current_amd64.deb
|
||||
rm -rf ./google-chrome-stable_current_amd64.deb
|
||||
cd -
|
||||
google-chrome --version
|
||||
12
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_stable_mac.sh
generated
vendored
Executable file
12
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_stable_mac.sh
generated
vendored
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
rm -rf "/Applications/Google Chrome.app"
|
||||
cd /tmp
|
||||
curl --retry 3 -o ./googlechrome.dmg https://dl.google.com/chrome/mac/universal/stable/GGRO/googlechrome.dmg
|
||||
hdiutil attach -nobrowse -quiet -noautofsck -noautoopen -mountpoint /Volumes/googlechrome.dmg ./googlechrome.dmg
|
||||
cp -pR "/Volumes/googlechrome.dmg/Google Chrome.app" /Applications
|
||||
hdiutil detach /Volumes/googlechrome.dmg
|
||||
rm -rf /tmp/googlechrome.dmg
|
||||
/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome --version
|
||||
24
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_stable_win.ps1
generated
vendored
Normal file
24
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_chrome_stable_win.ps1
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$url = 'https://dl.google.com/tag/s/dl/chrome/install/googlechromestandaloneenterprise64.msi'
|
||||
|
||||
$wc = New-Object net.webclient
|
||||
$msiInstaller = "$env:temp\google-chrome.msi"
|
||||
Write-Host "Downloading Google Chrome"
|
||||
$wc.Downloadfile($url, $msiInstaller)
|
||||
|
||||
Write-Host "Installing Google Chrome"
|
||||
$arguments = "/i `"$msiInstaller`" /quiet"
|
||||
Start-Process msiexec.exe -ArgumentList $arguments -Wait
|
||||
Remove-Item $msiInstaller
|
||||
|
||||
|
||||
$suffix = "\\Google\\Chrome\\Application\\chrome.exe"
|
||||
if (Test-Path "${env:ProgramFiles(x86)}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles(x86)}$suffix").VersionInfo
|
||||
} elseif (Test-Path "${env:ProgramFiles}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles}$suffix").VersionInfo
|
||||
} else {
|
||||
Write-Host "ERROR: Failed to install Google Chrome."
|
||||
Write-Host "ERROR: This could be due to insufficient privileges, in which case re-running as Administrator may help."
|
||||
exit 1
|
||||
}
|
||||
48
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_beta_linux.sh
generated
vendored
Executable file
48
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_beta_linux.sh
generated
vendored
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if [[ $(arch) == "aarch64" ]]; then
|
||||
echo "ERROR: not supported on Linux Arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$PLAYWRIGHT_HOST_PLATFORM_OVERRIDE" ]; then
|
||||
if [[ ! -f "/etc/os-release" ]]; then
|
||||
echo "ERROR: cannot install on unknown linux distribution (/etc/os-release is missing)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ID=$(bash -c 'source /etc/os-release && echo $ID')
|
||||
if [[ "${ID}" != "ubuntu" && "${ID}" != "debian" ]]; then
|
||||
echo "ERROR: cannot install on $ID distribution - only Ubuntu and Debian are supported"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 1. make sure to remove old beta if any.
|
||||
if dpkg --get-selections | grep -q "^microsoft-edge-beta[[:space:]]*install$" >/dev/null; then
|
||||
apt-get remove -y microsoft-edge-beta
|
||||
fi
|
||||
|
||||
# 2. Install curl to download Microsoft gpg key
|
||||
if ! command -v curl >/dev/null; then
|
||||
apt-get update
|
||||
apt-get install -y curl
|
||||
fi
|
||||
|
||||
# GnuPG is not preinstalled in slim images
|
||||
if ! command -v gpg >/dev/null; then
|
||||
apt-get update
|
||||
apt-get install -y gpg
|
||||
fi
|
||||
|
||||
# 3. Add the GPG key, the apt repo, update the apt cache, and install the package
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /tmp/microsoft.gpg
|
||||
install -o root -g root -m 644 /tmp/microsoft.gpg /etc/apt/trusted.gpg.d/
|
||||
sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/edge stable main" > /etc/apt/sources.list.d/microsoft-edge-dev.list'
|
||||
rm /tmp/microsoft.gpg
|
||||
apt-get update && apt-get install -y microsoft-edge-beta
|
||||
|
||||
microsoft-edge-beta --version
|
||||
11
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_beta_mac.sh
generated
vendored
Executable file
11
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_beta_mac.sh
generated
vendored
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
cd /tmp
|
||||
curl --retry 3 -o ./msedge_beta.pkg "$1"
|
||||
# Note: there's no way to uninstall previously installed MSEdge.
|
||||
# However, running PKG again seems to update installation.
|
||||
sudo installer -pkg /tmp/msedge_beta.pkg -target /
|
||||
rm -rf /tmp/msedge_beta.pkg
|
||||
/Applications/Microsoft\ Edge\ Beta.app/Contents/MacOS/Microsoft\ Edge\ Beta --version
|
||||
23
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_beta_win.ps1
generated
vendored
Normal file
23
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_beta_win.ps1
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$url = $args[0]
|
||||
|
||||
Write-Host "Downloading Microsoft Edge Beta"
|
||||
$wc = New-Object net.webclient
|
||||
$msiInstaller = "$env:temp\microsoft-edge-beta.msi"
|
||||
$wc.Downloadfile($url, $msiInstaller)
|
||||
|
||||
Write-Host "Installing Microsoft Edge Beta"
|
||||
$arguments = "/i `"$msiInstaller`" /quiet"
|
||||
Start-Process msiexec.exe -ArgumentList $arguments -Wait
|
||||
Remove-Item $msiInstaller
|
||||
|
||||
$suffix = "\\Microsoft\\Edge Beta\\Application\\msedge.exe"
|
||||
if (Test-Path "${env:ProgramFiles(x86)}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles(x86)}$suffix").VersionInfo
|
||||
} elseif (Test-Path "${env:ProgramFiles}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles}$suffix").VersionInfo
|
||||
} else {
|
||||
Write-Host "ERROR: Failed to install Microsoft Edge Beta."
|
||||
Write-Host "ERROR: This could be due to insufficient privileges, in which case re-running as Administrator may help."
|
||||
exit 1
|
||||
}
|
||||
48
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_dev_linux.sh
generated
vendored
Executable file
48
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_dev_linux.sh
generated
vendored
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if [[ $(arch) == "aarch64" ]]; then
|
||||
echo "ERROR: not supported on Linux Arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$PLAYWRIGHT_HOST_PLATFORM_OVERRIDE" ]; then
|
||||
if [[ ! -f "/etc/os-release" ]]; then
|
||||
echo "ERROR: cannot install on unknown linux distribution (/etc/os-release is missing)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ID=$(bash -c 'source /etc/os-release && echo $ID')
|
||||
if [[ "${ID}" != "ubuntu" && "${ID}" != "debian" ]]; then
|
||||
echo "ERROR: cannot install on $ID distribution - only Ubuntu and Debian are supported"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 1. make sure to remove old dev if any.
|
||||
if dpkg --get-selections | grep -q "^microsoft-edge-dev[[:space:]]*install$" >/dev/null; then
|
||||
apt-get remove -y microsoft-edge-dev
|
||||
fi
|
||||
|
||||
# 2. Install curl to download Microsoft gpg key
|
||||
if ! command -v curl >/dev/null; then
|
||||
apt-get update
|
||||
apt-get install -y curl
|
||||
fi
|
||||
|
||||
# GnuPG is not preinstalled in slim images
|
||||
if ! command -v gpg >/dev/null; then
|
||||
apt-get update
|
||||
apt-get install -y gpg
|
||||
fi
|
||||
|
||||
# 3. Add the GPG key, the apt repo, update the apt cache, and install the package
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /tmp/microsoft.gpg
|
||||
install -o root -g root -m 644 /tmp/microsoft.gpg /etc/apt/trusted.gpg.d/
|
||||
sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/edge stable main" > /etc/apt/sources.list.d/microsoft-edge-dev.list'
|
||||
rm /tmp/microsoft.gpg
|
||||
apt-get update && apt-get install -y microsoft-edge-dev
|
||||
|
||||
microsoft-edge-dev --version
|
||||
11
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_dev_mac.sh
generated
vendored
Executable file
11
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_dev_mac.sh
generated
vendored
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
cd /tmp
|
||||
curl --retry 3 -o ./msedge_dev.pkg "$1"
|
||||
# Note: there's no way to uninstall previously installed MSEdge.
|
||||
# However, running PKG again seems to update installation.
|
||||
sudo installer -pkg /tmp/msedge_dev.pkg -target /
|
||||
rm -rf /tmp/msedge_dev.pkg
|
||||
/Applications/Microsoft\ Edge\ Dev.app/Contents/MacOS/Microsoft\ Edge\ Dev --version
|
||||
23
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_dev_win.ps1
generated
vendored
Normal file
23
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_dev_win.ps1
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$url = $args[0]
|
||||
|
||||
Write-Host "Downloading Microsoft Edge Dev"
|
||||
$wc = New-Object net.webclient
|
||||
$msiInstaller = "$env:temp\microsoft-edge-dev.msi"
|
||||
$wc.Downloadfile($url, $msiInstaller)
|
||||
|
||||
Write-Host "Installing Microsoft Edge Dev"
|
||||
$arguments = "/i `"$msiInstaller`" /quiet"
|
||||
Start-Process msiexec.exe -ArgumentList $arguments -Wait
|
||||
Remove-Item $msiInstaller
|
||||
|
||||
$suffix = "\\Microsoft\\Edge Dev\\Application\\msedge.exe"
|
||||
if (Test-Path "${env:ProgramFiles(x86)}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles(x86)}$suffix").VersionInfo
|
||||
} elseif (Test-Path "${env:ProgramFiles}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles}$suffix").VersionInfo
|
||||
} else {
|
||||
Write-Host "ERROR: Failed to install Microsoft Edge Dev."
|
||||
Write-Host "ERROR: This could be due to insufficient privileges, in which case re-running as Administrator may help."
|
||||
exit 1
|
||||
}
|
||||
48
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_stable_linux.sh
generated
vendored
Executable file
48
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_stable_linux.sh
generated
vendored
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if [[ $(arch) == "aarch64" ]]; then
|
||||
echo "ERROR: not supported on Linux Arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$PLAYWRIGHT_HOST_PLATFORM_OVERRIDE" ]; then
|
||||
if [[ ! -f "/etc/os-release" ]]; then
|
||||
echo "ERROR: cannot install on unknown linux distribution (/etc/os-release is missing)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ID=$(bash -c 'source /etc/os-release && echo $ID')
|
||||
if [[ "${ID}" != "ubuntu" && "${ID}" != "debian" ]]; then
|
||||
echo "ERROR: cannot install on $ID distribution - only Ubuntu and Debian are supported"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 1. make sure to remove old stable if any.
|
||||
if dpkg --get-selections | grep -q "^microsoft-edge-stable[[:space:]]*install$" >/dev/null; then
|
||||
apt-get remove -y microsoft-edge-stable
|
||||
fi
|
||||
|
||||
# 2. Install curl to download Microsoft gpg key
|
||||
if ! command -v curl >/dev/null; then
|
||||
apt-get update
|
||||
apt-get install -y curl
|
||||
fi
|
||||
|
||||
# GnuPG is not preinstalled in slim images
|
||||
if ! command -v gpg >/dev/null; then
|
||||
apt-get update
|
||||
apt-get install -y gpg
|
||||
fi
|
||||
|
||||
# 3. Add the GPG key, the apt repo, update the apt cache, and install the package
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /tmp/microsoft.gpg
|
||||
install -o root -g root -m 644 /tmp/microsoft.gpg /etc/apt/trusted.gpg.d/
|
||||
sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/edge stable main" > /etc/apt/sources.list.d/microsoft-edge-stable.list'
|
||||
rm /tmp/microsoft.gpg
|
||||
apt-get update && apt-get install -y microsoft-edge-stable
|
||||
|
||||
microsoft-edge-stable --version
|
||||
11
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_stable_mac.sh
generated
vendored
Executable file
11
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_stable_mac.sh
generated
vendored
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
cd /tmp
|
||||
curl --retry 3 -o ./msedge_stable.pkg "$1"
|
||||
# Note: there's no way to uninstall previously installed MSEdge.
|
||||
# However, running PKG again seems to update installation.
|
||||
sudo installer -pkg /tmp/msedge_stable.pkg -target /
|
||||
rm -rf /tmp/msedge_stable.pkg
|
||||
/Applications/Microsoft\ Edge.app/Contents/MacOS/Microsoft\ Edge --version
|
||||
24
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_stable_win.ps1
generated
vendored
Normal file
24
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/bin/reinstall_msedge_stable_win.ps1
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$url = $args[0]
|
||||
|
||||
Write-Host "Downloading Microsoft Edge"
|
||||
$wc = New-Object net.webclient
|
||||
$msiInstaller = "$env:temp\microsoft-edge-stable.msi"
|
||||
$wc.Downloadfile($url, $msiInstaller)
|
||||
|
||||
Write-Host "Installing Microsoft Edge"
|
||||
$arguments = "/i `"$msiInstaller`" /quiet"
|
||||
Start-Process msiexec.exe -ArgumentList $arguments -Wait
|
||||
Remove-Item $msiInstaller
|
||||
|
||||
$suffix = "\\Microsoft\\Edge\\Application\\msedge.exe"
|
||||
if (Test-Path "${env:ProgramFiles(x86)}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles(x86)}$suffix").VersionInfo
|
||||
} elseif (Test-Path "${env:ProgramFiles}$suffix") {
|
||||
(Get-Item "${env:ProgramFiles}$suffix").VersionInfo
|
||||
} else {
|
||||
Write-Host "ERROR: Failed to install Microsoft Edge."
|
||||
Write-Host "ERROR: This could be due to insufficient privileges, in which case re-running as Administrator may help."
|
||||
exit 1
|
||||
}
|
||||
79
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/browsers.json
generated
vendored
Normal file
79
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/browsers.json
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
{
|
||||
"comment": "Do not edit this file, use utils/roll_browser.js",
|
||||
"browsers": [
|
||||
{
|
||||
"name": "chromium",
|
||||
"revision": "1208",
|
||||
"installByDefault": true,
|
||||
"browserVersion": "145.0.7632.6",
|
||||
"title": "Chrome for Testing"
|
||||
},
|
||||
{
|
||||
"name": "chromium-headless-shell",
|
||||
"revision": "1208",
|
||||
"installByDefault": true,
|
||||
"browserVersion": "145.0.7632.6",
|
||||
"title": "Chrome Headless Shell"
|
||||
},
|
||||
{
|
||||
"name": "chromium-tip-of-tree",
|
||||
"revision": "1401",
|
||||
"installByDefault": false,
|
||||
"browserVersion": "146.0.7644.0",
|
||||
"title": "Chrome Canary for Testing"
|
||||
},
|
||||
{
|
||||
"name": "chromium-tip-of-tree-headless-shell",
|
||||
"revision": "1401",
|
||||
"installByDefault": false,
|
||||
"browserVersion": "146.0.7644.0",
|
||||
"title": "Chrome Canary Headless Shell"
|
||||
},
|
||||
{
|
||||
"name": "firefox",
|
||||
"revision": "1509",
|
||||
"installByDefault": true,
|
||||
"browserVersion": "146.0.1",
|
||||
"title": "Firefox"
|
||||
},
|
||||
{
|
||||
"name": "firefox-beta",
|
||||
"revision": "1504",
|
||||
"installByDefault": false,
|
||||
"browserVersion": "146.0b8",
|
||||
"title": "Firefox Beta"
|
||||
},
|
||||
{
|
||||
"name": "webkit",
|
||||
"revision": "2248",
|
||||
"installByDefault": true,
|
||||
"revisionOverrides": {
|
||||
"debian11-x64": "2105",
|
||||
"debian11-arm64": "2105",
|
||||
"ubuntu20.04-x64": "2092",
|
||||
"ubuntu20.04-arm64": "2092"
|
||||
},
|
||||
"browserVersion": "26.0",
|
||||
"title": "WebKit"
|
||||
},
|
||||
{
|
||||
"name": "ffmpeg",
|
||||
"revision": "1011",
|
||||
"installByDefault": true,
|
||||
"revisionOverrides": {
|
||||
"mac12": "1010",
|
||||
"mac12-arm64": "1010"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "winldd",
|
||||
"revision": "1007",
|
||||
"installByDefault": false
|
||||
},
|
||||
{
|
||||
"name": "android",
|
||||
"revision": "1001",
|
||||
"installByDefault": false
|
||||
}
|
||||
]
|
||||
}
|
||||
18
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/cli.js
generated
vendored
Executable file
18
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/cli.js
generated
vendored
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
const { program } = require('./lib/cli/programWithTestStub');
|
||||
program.parse(process.argv);
|
||||
17
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/index.d.ts
generated
vendored
Normal file
17
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
export * from './types/types';
|
||||
32
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/index.js
generated
vendored
Normal file
32
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/index.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
const minimumMajorNodeVersion = 18;
|
||||
const currentNodeVersion = process.versions.node;
|
||||
const semver = currentNodeVersion.split('.');
|
||||
const [major] = [+semver[0]];
|
||||
|
||||
if (major < minimumMajorNodeVersion) {
|
||||
console.error(
|
||||
'You are running Node.js ' +
|
||||
currentNodeVersion +
|
||||
'.\n' +
|
||||
`Playwright requires Node.js ${minimumMajorNodeVersion} or higher. \n` +
|
||||
'Please update your version of Node.js.'
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
module.exports = require('./lib/inprocess');
|
||||
28
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/index.mjs
generated
vendored
Normal file
28
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import playwright from './index.js';
|
||||
|
||||
export const chromium = playwright.chromium;
|
||||
export const firefox = playwright.firefox;
|
||||
export const webkit = playwright.webkit;
|
||||
export const selectors = playwright.selectors;
|
||||
export const devices = playwright.devices;
|
||||
export const errors = playwright.errors;
|
||||
export const request = playwright.request;
|
||||
export const _electron = playwright._electron;
|
||||
export const _android = playwright._android;
|
||||
export default playwright;
|
||||
65
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/androidServerImpl.js
generated
vendored
Normal file
65
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/androidServerImpl.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var androidServerImpl_exports = {};
|
||||
__export(androidServerImpl_exports, {
|
||||
AndroidServerLauncherImpl: () => AndroidServerLauncherImpl
|
||||
});
|
||||
module.exports = __toCommonJS(androidServerImpl_exports);
|
||||
var import_playwrightServer = require("./remote/playwrightServer");
|
||||
var import_playwright = require("./server/playwright");
|
||||
var import_crypto = require("./server/utils/crypto");
|
||||
var import_utilsBundle = require("./utilsBundle");
|
||||
var import_progress = require("./server/progress");
|
||||
class AndroidServerLauncherImpl {
|
||||
async launchServer(options = {}) {
|
||||
const playwright = (0, import_playwright.createPlaywright)({ sdkLanguage: "javascript", isServer: true });
|
||||
const controller = new import_progress.ProgressController();
|
||||
let devices = await controller.run((progress) => playwright.android.devices(progress, {
|
||||
host: options.adbHost,
|
||||
port: options.adbPort,
|
||||
omitDriverInstall: options.omitDriverInstall
|
||||
}));
|
||||
if (devices.length === 0)
|
||||
throw new Error("No devices found");
|
||||
if (options.deviceSerialNumber) {
|
||||
devices = devices.filter((d) => d.serial === options.deviceSerialNumber);
|
||||
if (devices.length === 0)
|
||||
throw new Error(`No device with serial number '${options.deviceSerialNumber}' was found`);
|
||||
}
|
||||
if (devices.length > 1)
|
||||
throw new Error(`More than one device found. Please specify deviceSerialNumber`);
|
||||
const device = devices[0];
|
||||
const path = options.wsPath ? options.wsPath.startsWith("/") ? options.wsPath : `/${options.wsPath}` : `/${(0, import_crypto.createGuid)()}`;
|
||||
const server = new import_playwrightServer.PlaywrightServer({ mode: "launchServer", path, maxConnections: 1, preLaunchedAndroidDevice: device });
|
||||
const wsEndpoint = await server.listen(options.port, options.host);
|
||||
const browserServer = new import_utilsBundle.ws.EventEmitter();
|
||||
browserServer.wsEndpoint = () => wsEndpoint;
|
||||
browserServer.close = () => device.close();
|
||||
browserServer.kill = () => device.close();
|
||||
device.on("close", () => {
|
||||
server.close();
|
||||
browserServer.emit("close");
|
||||
});
|
||||
return browserServer;
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
AndroidServerLauncherImpl
|
||||
});
|
||||
120
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/browserServerImpl.js
generated
vendored
Normal file
120
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/browserServerImpl.js
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
// If the importer is in node compatibility mode or this is not an ESM
|
||||
// file that has been converted to a CommonJS file using a Babel-
|
||||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var browserServerImpl_exports = {};
|
||||
__export(browserServerImpl_exports, {
|
||||
BrowserServerLauncherImpl: () => BrowserServerLauncherImpl
|
||||
});
|
||||
module.exports = __toCommonJS(browserServerImpl_exports);
|
||||
var import_playwrightServer = require("./remote/playwrightServer");
|
||||
var import_helper = require("./server/helper");
|
||||
var import_playwright = require("./server/playwright");
|
||||
var import_crypto = require("./server/utils/crypto");
|
||||
var import_debug = require("./server/utils/debug");
|
||||
var import_stackTrace = require("./utils/isomorphic/stackTrace");
|
||||
var import_time = require("./utils/isomorphic/time");
|
||||
var import_utilsBundle = require("./utilsBundle");
|
||||
var validatorPrimitives = __toESM(require("./protocol/validatorPrimitives"));
|
||||
var import_progress = require("./server/progress");
|
||||
class BrowserServerLauncherImpl {
|
||||
constructor(browserName) {
|
||||
this._browserName = browserName;
|
||||
}
|
||||
async launchServer(options = {}) {
|
||||
const playwright = (0, import_playwright.createPlaywright)({ sdkLanguage: "javascript", isServer: true });
|
||||
const metadata = { id: "", startTime: 0, endTime: 0, type: "Internal", method: "", params: {}, log: [], internal: true };
|
||||
const validatorContext = {
|
||||
tChannelImpl: (names, arg, path2) => {
|
||||
throw new validatorPrimitives.ValidationError(`${path2}: channels are not expected in launchServer`);
|
||||
},
|
||||
binary: "buffer",
|
||||
isUnderTest: import_debug.isUnderTest
|
||||
};
|
||||
let launchOptions = {
|
||||
...options,
|
||||
ignoreDefaultArgs: Array.isArray(options.ignoreDefaultArgs) ? options.ignoreDefaultArgs : void 0,
|
||||
ignoreAllDefaultArgs: !!options.ignoreDefaultArgs && !Array.isArray(options.ignoreDefaultArgs),
|
||||
env: options.env ? envObjectToArray(options.env) : void 0,
|
||||
timeout: options.timeout ?? import_time.DEFAULT_PLAYWRIGHT_LAUNCH_TIMEOUT
|
||||
};
|
||||
let browser;
|
||||
try {
|
||||
const controller = new import_progress.ProgressController(metadata);
|
||||
browser = await controller.run(async (progress) => {
|
||||
if (options._userDataDir !== void 0) {
|
||||
const validator = validatorPrimitives.scheme["BrowserTypeLaunchPersistentContextParams"];
|
||||
launchOptions = validator({ ...launchOptions, userDataDir: options._userDataDir }, "", validatorContext);
|
||||
const context = await playwright[this._browserName].launchPersistentContext(progress, options._userDataDir, launchOptions);
|
||||
return context._browser;
|
||||
} else {
|
||||
const validator = validatorPrimitives.scheme["BrowserTypeLaunchParams"];
|
||||
launchOptions = validator(launchOptions, "", validatorContext);
|
||||
return await playwright[this._browserName].launch(progress, launchOptions, toProtocolLogger(options.logger));
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
const log = import_helper.helper.formatBrowserLogs(metadata.log);
|
||||
(0, import_stackTrace.rewriteErrorMessage)(e, `${e.message} Failed to launch browser.${log}`);
|
||||
throw e;
|
||||
}
|
||||
const path = options.wsPath ? options.wsPath.startsWith("/") ? options.wsPath : `/${options.wsPath}` : `/${(0, import_crypto.createGuid)()}`;
|
||||
const server = new import_playwrightServer.PlaywrightServer({ mode: options._sharedBrowser ? "launchServerShared" : "launchServer", path, maxConnections: Infinity, preLaunchedBrowser: browser });
|
||||
const wsEndpoint = await server.listen(options.port, options.host);
|
||||
const browserServer = new import_utilsBundle.ws.EventEmitter();
|
||||
browserServer.process = () => browser.options.browserProcess.process;
|
||||
browserServer.wsEndpoint = () => wsEndpoint;
|
||||
browserServer.close = () => browser.options.browserProcess.close();
|
||||
browserServer[Symbol.asyncDispose] = browserServer.close;
|
||||
browserServer.kill = () => browser.options.browserProcess.kill();
|
||||
browserServer._disconnectForTest = () => server.close();
|
||||
browserServer._userDataDirForTest = browser._userDataDirForTest;
|
||||
browser.options.browserProcess.onclose = (exitCode, signal) => {
|
||||
server.close();
|
||||
browserServer.emit("close", exitCode, signal);
|
||||
};
|
||||
return browserServer;
|
||||
}
|
||||
}
|
||||
function toProtocolLogger(logger) {
|
||||
return logger ? (direction, message) => {
|
||||
if (logger.isEnabled("protocol", "verbose"))
|
||||
logger.log("protocol", "verbose", (direction === "send" ? "SEND \u25BA " : "\u25C0 RECV ") + JSON.stringify(message), [], {});
|
||||
} : void 0;
|
||||
}
|
||||
function envObjectToArray(env) {
|
||||
const result = [];
|
||||
for (const name in env) {
|
||||
if (!Object.is(env[name], void 0))
|
||||
result.push({ name, value: String(env[name]) });
|
||||
}
|
||||
return result;
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
BrowserServerLauncherImpl
|
||||
});
|
||||
97
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/cli/driver.js
generated
vendored
Normal file
97
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/cli/driver.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
// If the importer is in node compatibility mode or this is not an ESM
|
||||
// file that has been converted to a CommonJS file using a Babel-
|
||||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var driver_exports = {};
|
||||
__export(driver_exports, {
|
||||
launchBrowserServer: () => launchBrowserServer,
|
||||
printApiJson: () => printApiJson,
|
||||
runDriver: () => runDriver,
|
||||
runServer: () => runServer
|
||||
});
|
||||
module.exports = __toCommonJS(driver_exports);
|
||||
var import_fs = __toESM(require("fs"));
|
||||
var playwright = __toESM(require("../.."));
|
||||
var import_pipeTransport = require("../server/utils/pipeTransport");
|
||||
var import_playwrightServer = require("../remote/playwrightServer");
|
||||
var import_server = require("../server");
|
||||
var import_processLauncher = require("../server/utils/processLauncher");
|
||||
function printApiJson() {
|
||||
console.log(JSON.stringify(require("../../api.json")));
|
||||
}
|
||||
function runDriver() {
|
||||
const dispatcherConnection = new import_server.DispatcherConnection();
|
||||
new import_server.RootDispatcher(dispatcherConnection, async (rootScope, { sdkLanguage }) => {
|
||||
const playwright2 = (0, import_server.createPlaywright)({ sdkLanguage });
|
||||
return new import_server.PlaywrightDispatcher(rootScope, playwright2);
|
||||
});
|
||||
const transport = new import_pipeTransport.PipeTransport(process.stdout, process.stdin);
|
||||
transport.onmessage = (message) => dispatcherConnection.dispatch(JSON.parse(message));
|
||||
const isJavaScriptLanguageBinding = !process.env.PW_LANG_NAME || process.env.PW_LANG_NAME === "javascript";
|
||||
const replacer = !isJavaScriptLanguageBinding && String.prototype.toWellFormed ? (key, value) => {
|
||||
if (typeof value === "string")
|
||||
return value.toWellFormed();
|
||||
return value;
|
||||
} : void 0;
|
||||
dispatcherConnection.onmessage = (message) => transport.send(JSON.stringify(message, replacer));
|
||||
transport.onclose = () => {
|
||||
dispatcherConnection.onmessage = () => {
|
||||
};
|
||||
(0, import_processLauncher.gracefullyProcessExitDoNotHang)(0);
|
||||
};
|
||||
process.on("SIGINT", () => {
|
||||
});
|
||||
}
|
||||
async function runServer(options) {
|
||||
const {
|
||||
port,
|
||||
host,
|
||||
path = "/",
|
||||
maxConnections = Infinity,
|
||||
extension
|
||||
} = options;
|
||||
const server = new import_playwrightServer.PlaywrightServer({ mode: extension ? "extension" : "default", path, maxConnections });
|
||||
const wsEndpoint = await server.listen(port, host);
|
||||
process.on("exit", () => server.close().catch(console.error));
|
||||
console.log("Listening on " + wsEndpoint);
|
||||
process.stdin.on("close", () => (0, import_processLauncher.gracefullyProcessExitDoNotHang)(0));
|
||||
}
|
||||
async function launchBrowserServer(browserName, configFile) {
|
||||
let options = {};
|
||||
if (configFile)
|
||||
options = JSON.parse(import_fs.default.readFileSync(configFile).toString());
|
||||
const browserType = playwright[browserName];
|
||||
const server = await browserType.launchServer(options);
|
||||
console.log(server.wsEndpoint());
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
launchBrowserServer,
|
||||
printApiJson,
|
||||
runDriver,
|
||||
runServer
|
||||
});
|
||||
589
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/cli/program.js
generated
vendored
Normal file
589
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/cli/program.js
generated
vendored
Normal file
@@ -0,0 +1,589 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
// If the importer is in node compatibility mode or this is not an ESM
|
||||
// file that has been converted to a CommonJS file using a Babel-
|
||||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var program_exports = {};
|
||||
__export(program_exports, {
|
||||
program: () => import_utilsBundle2.program
|
||||
});
|
||||
module.exports = __toCommonJS(program_exports);
|
||||
var import_fs = __toESM(require("fs"));
|
||||
var import_os = __toESM(require("os"));
|
||||
var import_path = __toESM(require("path"));
|
||||
var playwright = __toESM(require("../.."));
|
||||
var import_driver = require("./driver");
|
||||
var import_server = require("../server");
|
||||
var import_utils = require("../utils");
|
||||
var import_traceViewer = require("../server/trace/viewer/traceViewer");
|
||||
var import_utils2 = require("../utils");
|
||||
var import_ascii = require("../server/utils/ascii");
|
||||
var import_utilsBundle = require("../utilsBundle");
|
||||
var import_utilsBundle2 = require("../utilsBundle");
|
||||
const packageJSON = require("../../package.json");
|
||||
import_utilsBundle.program.version("Version " + (process.env.PW_CLI_DISPLAY_VERSION || packageJSON.version)).name(buildBasePlaywrightCLICommand(process.env.PW_LANG_NAME));
|
||||
import_utilsBundle.program.command("mark-docker-image [dockerImageNameTemplate]", { hidden: true }).description("mark docker image").allowUnknownOption(true).action(function(dockerImageNameTemplate) {
|
||||
(0, import_utils2.assert)(dockerImageNameTemplate, "dockerImageNameTemplate is required");
|
||||
(0, import_server.writeDockerVersion)(dockerImageNameTemplate).catch(logErrorAndExit);
|
||||
});
|
||||
commandWithOpenOptions("open [url]", "open page in browser specified via -b, --browser", []).action(function(url, options) {
|
||||
open(options, url).catch(logErrorAndExit);
|
||||
}).addHelpText("afterAll", `
|
||||
Examples:
|
||||
|
||||
$ open
|
||||
$ open -b webkit https://example.com`);
|
||||
commandWithOpenOptions(
|
||||
"codegen [url]",
|
||||
"open page and generate code for user actions",
|
||||
[
|
||||
["-o, --output <file name>", "saves the generated script to a file"],
|
||||
["--target <language>", `language to generate, one of javascript, playwright-test, python, python-async, python-pytest, csharp, csharp-mstest, csharp-nunit, java, java-junit`, codegenId()],
|
||||
["--test-id-attribute <attributeName>", "use the specified attribute to generate data test ID selectors"]
|
||||
]
|
||||
).action(async function(url, options) {
|
||||
await codegen(options, url);
|
||||
}).addHelpText("afterAll", `
|
||||
Examples:
|
||||
|
||||
$ codegen
|
||||
$ codegen --target=python
|
||||
$ codegen -b webkit https://example.com`);
|
||||
function printInstalledBrowsers(browsers2) {
|
||||
const browserPaths = /* @__PURE__ */ new Set();
|
||||
for (const browser of browsers2)
|
||||
browserPaths.add(browser.browserPath);
|
||||
console.log(` Browsers:`);
|
||||
for (const browserPath of [...browserPaths].sort())
|
||||
console.log(` ${browserPath}`);
|
||||
console.log(` References:`);
|
||||
const references = /* @__PURE__ */ new Set();
|
||||
for (const browser of browsers2)
|
||||
references.add(browser.referenceDir);
|
||||
for (const reference of [...references].sort())
|
||||
console.log(` ${reference}`);
|
||||
}
|
||||
function printGroupedByPlaywrightVersion(browsers2) {
|
||||
const dirToVersion = /* @__PURE__ */ new Map();
|
||||
for (const browser of browsers2) {
|
||||
if (dirToVersion.has(browser.referenceDir))
|
||||
continue;
|
||||
const packageJSON2 = require(import_path.default.join(browser.referenceDir, "package.json"));
|
||||
const version = packageJSON2.version;
|
||||
dirToVersion.set(browser.referenceDir, version);
|
||||
}
|
||||
const groupedByPlaywrightMinorVersion = /* @__PURE__ */ new Map();
|
||||
for (const browser of browsers2) {
|
||||
const version = dirToVersion.get(browser.referenceDir);
|
||||
let entries = groupedByPlaywrightMinorVersion.get(version);
|
||||
if (!entries) {
|
||||
entries = [];
|
||||
groupedByPlaywrightMinorVersion.set(version, entries);
|
||||
}
|
||||
entries.push(browser);
|
||||
}
|
||||
const sortedVersions = [...groupedByPlaywrightMinorVersion.keys()].sort((a, b) => {
|
||||
const aComponents = a.split(".");
|
||||
const bComponents = b.split(".");
|
||||
const aMajor = parseInt(aComponents[0], 10);
|
||||
const bMajor = parseInt(bComponents[0], 10);
|
||||
if (aMajor !== bMajor)
|
||||
return aMajor - bMajor;
|
||||
const aMinor = parseInt(aComponents[1], 10);
|
||||
const bMinor = parseInt(bComponents[1], 10);
|
||||
if (aMinor !== bMinor)
|
||||
return aMinor - bMinor;
|
||||
return aComponents.slice(2).join(".").localeCompare(bComponents.slice(2).join("."));
|
||||
});
|
||||
for (const version of sortedVersions) {
|
||||
console.log(`
|
||||
Playwright version: ${version}`);
|
||||
printInstalledBrowsers(groupedByPlaywrightMinorVersion.get(version));
|
||||
}
|
||||
}
|
||||
import_utilsBundle.program.command("install [browser...]").description("ensure browsers necessary for this version of Playwright are installed").option("--with-deps", "install system dependencies for browsers").option("--dry-run", "do not execute installation, only print information").option("--list", "prints list of browsers from all playwright installations").option("--force", "force reinstall of already installed browsers").option("--only-shell", "only install headless shell when installing chromium").option("--no-shell", "do not install chromium headless shell").action(async function(args, options) {
|
||||
if ((0, import_utils.isLikelyNpxGlobal)()) {
|
||||
console.error((0, import_ascii.wrapInASCIIBox)([
|
||||
`WARNING: It looks like you are running 'npx playwright install' without first`,
|
||||
`installing your project's dependencies.`,
|
||||
``,
|
||||
`To avoid unexpected behavior, please install your dependencies first, and`,
|
||||
`then run Playwright's install command:`,
|
||||
``,
|
||||
` npm install`,
|
||||
` npx playwright install`,
|
||||
``,
|
||||
`If your project does not yet depend on Playwright, first install the`,
|
||||
`applicable npm package (most commonly @playwright/test), and`,
|
||||
`then run Playwright's install command to download the browsers:`,
|
||||
``,
|
||||
` npm install @playwright/test`,
|
||||
` npx playwright install`,
|
||||
``
|
||||
].join("\n"), 1));
|
||||
}
|
||||
try {
|
||||
if (options.shell === false && options.onlyShell)
|
||||
throw new Error(`Only one of --no-shell and --only-shell can be specified`);
|
||||
const shell = options.shell === false ? "no" : options.onlyShell ? "only" : void 0;
|
||||
const executables = import_server.registry.resolveBrowsers(args, { shell });
|
||||
if (options.withDeps)
|
||||
await import_server.registry.installDeps(executables, !!options.dryRun);
|
||||
if (options.dryRun && options.list)
|
||||
throw new Error(`Only one of --dry-run and --list can be specified`);
|
||||
if (options.dryRun) {
|
||||
for (const executable of executables) {
|
||||
console.log(import_server.registry.calculateDownloadTitle(executable));
|
||||
console.log(` Install location: ${executable.directory ?? "<system>"}`);
|
||||
if (executable.downloadURLs?.length) {
|
||||
const [url, ...fallbacks] = executable.downloadURLs;
|
||||
console.log(` Download url: ${url}`);
|
||||
for (let i = 0; i < fallbacks.length; ++i)
|
||||
console.log(` Download fallback ${i + 1}: ${fallbacks[i]}`);
|
||||
}
|
||||
console.log(``);
|
||||
}
|
||||
} else if (options.list) {
|
||||
const browsers2 = await import_server.registry.listInstalledBrowsers();
|
||||
printGroupedByPlaywrightVersion(browsers2);
|
||||
} else {
|
||||
await import_server.registry.install(executables, { force: options.force });
|
||||
await import_server.registry.validateHostRequirementsForExecutablesIfNeeded(executables, process.env.PW_LANG_NAME || "javascript").catch((e) => {
|
||||
e.name = "Playwright Host validation warning";
|
||||
console.error(e);
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(`Failed to install browsers
|
||||
${e}`);
|
||||
(0, import_utils.gracefullyProcessExitDoNotHang)(1);
|
||||
}
|
||||
}).addHelpText("afterAll", `
|
||||
|
||||
Examples:
|
||||
- $ install
|
||||
Install default browsers.
|
||||
|
||||
- $ install chrome firefox
|
||||
Install custom browsers, supports ${import_server.registry.suggestedBrowsersToInstall()}.`);
|
||||
import_utilsBundle.program.command("uninstall").description("Removes browsers used by this installation of Playwright from the system (chromium, firefox, webkit, ffmpeg). This does not include branded channels.").option("--all", "Removes all browsers used by any Playwright installation from the system.").action(async (options) => {
|
||||
delete process.env.PLAYWRIGHT_SKIP_BROWSER_GC;
|
||||
await import_server.registry.uninstall(!!options.all).then(({ numberOfBrowsersLeft }) => {
|
||||
if (!options.all && numberOfBrowsersLeft > 0) {
|
||||
console.log("Successfully uninstalled Playwright browsers for the current Playwright installation.");
|
||||
console.log(`There are still ${numberOfBrowsersLeft} browsers left, used by other Playwright installations.
|
||||
To uninstall Playwright browsers for all installations, re-run with --all flag.`);
|
||||
}
|
||||
}).catch(logErrorAndExit);
|
||||
});
|
||||
import_utilsBundle.program.command("install-deps [browser...]").description("install dependencies necessary to run browsers (will ask for sudo permissions)").option("--dry-run", "Do not execute installation commands, only print them").action(async function(args, options) {
|
||||
try {
|
||||
await import_server.registry.installDeps(import_server.registry.resolveBrowsers(args, {}), !!options.dryRun);
|
||||
} catch (e) {
|
||||
console.log(`Failed to install browser dependencies
|
||||
${e}`);
|
||||
(0, import_utils.gracefullyProcessExitDoNotHang)(1);
|
||||
}
|
||||
}).addHelpText("afterAll", `
|
||||
Examples:
|
||||
- $ install-deps
|
||||
Install dependencies for default browsers.
|
||||
|
||||
- $ install-deps chrome firefox
|
||||
Install dependencies for specific browsers, supports ${import_server.registry.suggestedBrowsersToInstall()}.`);
|
||||
const browsers = [
|
||||
{ alias: "cr", name: "Chromium", type: "chromium" },
|
||||
{ alias: "ff", name: "Firefox", type: "firefox" },
|
||||
{ alias: "wk", name: "WebKit", type: "webkit" }
|
||||
];
|
||||
for (const { alias, name, type } of browsers) {
|
||||
commandWithOpenOptions(`${alias} [url]`, `open page in ${name}`, []).action(function(url, options) {
|
||||
open({ ...options, browser: type }, url).catch(logErrorAndExit);
|
||||
}).addHelpText("afterAll", `
|
||||
Examples:
|
||||
|
||||
$ ${alias} https://example.com`);
|
||||
}
|
||||
commandWithOpenOptions(
|
||||
"screenshot <url> <filename>",
|
||||
"capture a page screenshot",
|
||||
[
|
||||
["--wait-for-selector <selector>", "wait for selector before taking a screenshot"],
|
||||
["--wait-for-timeout <timeout>", "wait for timeout in milliseconds before taking a screenshot"],
|
||||
["--full-page", "whether to take a full page screenshot (entire scrollable area)"]
|
||||
]
|
||||
).action(function(url, filename, command) {
|
||||
screenshot(command, command, url, filename).catch(logErrorAndExit);
|
||||
}).addHelpText("afterAll", `
|
||||
Examples:
|
||||
|
||||
$ screenshot -b webkit https://example.com example.png`);
|
||||
commandWithOpenOptions(
|
||||
"pdf <url> <filename>",
|
||||
"save page as pdf",
|
||||
[
|
||||
["--paper-format <format>", "paper format: Letter, Legal, Tabloid, Ledger, A0, A1, A2, A3, A4, A5, A6"],
|
||||
["--wait-for-selector <selector>", "wait for given selector before saving as pdf"],
|
||||
["--wait-for-timeout <timeout>", "wait for given timeout in milliseconds before saving as pdf"]
|
||||
]
|
||||
).action(function(url, filename, options) {
|
||||
pdf(options, options, url, filename).catch(logErrorAndExit);
|
||||
}).addHelpText("afterAll", `
|
||||
Examples:
|
||||
|
||||
$ pdf https://example.com example.pdf`);
|
||||
import_utilsBundle.program.command("run-driver", { hidden: true }).action(function(options) {
|
||||
(0, import_driver.runDriver)();
|
||||
});
|
||||
import_utilsBundle.program.command("run-server", { hidden: true }).option("--port <port>", "Server port").option("--host <host>", "Server host").option("--path <path>", "Endpoint Path", "/").option("--max-clients <maxClients>", "Maximum clients").option("--mode <mode>", 'Server mode, either "default" or "extension"').action(function(options) {
|
||||
(0, import_driver.runServer)({
|
||||
port: options.port ? +options.port : void 0,
|
||||
host: options.host,
|
||||
path: options.path,
|
||||
maxConnections: options.maxClients ? +options.maxClients : Infinity,
|
||||
extension: options.mode === "extension" || !!process.env.PW_EXTENSION_MODE
|
||||
}).catch(logErrorAndExit);
|
||||
});
|
||||
import_utilsBundle.program.command("print-api-json", { hidden: true }).action(function(options) {
|
||||
(0, import_driver.printApiJson)();
|
||||
});
|
||||
import_utilsBundle.program.command("launch-server", { hidden: true }).requiredOption("--browser <browserName>", 'Browser name, one of "chromium", "firefox" or "webkit"').option("--config <path-to-config-file>", "JSON file with launchServer options").action(function(options) {
|
||||
(0, import_driver.launchBrowserServer)(options.browser, options.config);
|
||||
});
|
||||
import_utilsBundle.program.command("show-trace [trace]").option("-b, --browser <browserType>", "browser to use, one of cr, chromium, ff, firefox, wk, webkit", "chromium").option("-h, --host <host>", "Host to serve trace on; specifying this option opens trace in a browser tab").option("-p, --port <port>", "Port to serve trace on, 0 for any free port; specifying this option opens trace in a browser tab").option("--stdin", "Accept trace URLs over stdin to update the viewer").description("show trace viewer").action(function(trace, options) {
|
||||
if (options.browser === "cr")
|
||||
options.browser = "chromium";
|
||||
if (options.browser === "ff")
|
||||
options.browser = "firefox";
|
||||
if (options.browser === "wk")
|
||||
options.browser = "webkit";
|
||||
const openOptions = {
|
||||
host: options.host,
|
||||
port: +options.port,
|
||||
isServer: !!options.stdin
|
||||
};
|
||||
if (options.port !== void 0 || options.host !== void 0)
|
||||
(0, import_traceViewer.runTraceInBrowser)(trace, openOptions).catch(logErrorAndExit);
|
||||
else
|
||||
(0, import_traceViewer.runTraceViewerApp)(trace, options.browser, openOptions, true).catch(logErrorAndExit);
|
||||
}).addHelpText("afterAll", `
|
||||
Examples:
|
||||
|
||||
$ show-trace
|
||||
$ show-trace https://example.com/trace.zip`);
|
||||
async function launchContext(options, extraOptions) {
|
||||
validateOptions(options);
|
||||
const browserType = lookupBrowserType(options);
|
||||
const launchOptions = extraOptions;
|
||||
if (options.channel)
|
||||
launchOptions.channel = options.channel;
|
||||
launchOptions.handleSIGINT = false;
|
||||
const contextOptions = (
|
||||
// Copy the device descriptor since we have to compare and modify the options.
|
||||
options.device ? { ...playwright.devices[options.device] } : {}
|
||||
);
|
||||
if (!extraOptions.headless)
|
||||
contextOptions.deviceScaleFactor = import_os.default.platform() === "darwin" ? 2 : 1;
|
||||
if (browserType.name() === "webkit" && process.platform === "linux") {
|
||||
delete contextOptions.hasTouch;
|
||||
delete contextOptions.isMobile;
|
||||
}
|
||||
if (contextOptions.isMobile && browserType.name() === "firefox")
|
||||
contextOptions.isMobile = void 0;
|
||||
if (options.blockServiceWorkers)
|
||||
contextOptions.serviceWorkers = "block";
|
||||
if (options.proxyServer) {
|
||||
launchOptions.proxy = {
|
||||
server: options.proxyServer
|
||||
};
|
||||
if (options.proxyBypass)
|
||||
launchOptions.proxy.bypass = options.proxyBypass;
|
||||
}
|
||||
if (options.viewportSize) {
|
||||
try {
|
||||
const [width, height] = options.viewportSize.split(",").map((n) => +n);
|
||||
if (isNaN(width) || isNaN(height))
|
||||
throw new Error("bad values");
|
||||
contextOptions.viewport = { width, height };
|
||||
} catch (e) {
|
||||
throw new Error('Invalid viewport size format: use "width,height", for example --viewport-size="800,600"');
|
||||
}
|
||||
}
|
||||
if (options.geolocation) {
|
||||
try {
|
||||
const [latitude, longitude] = options.geolocation.split(",").map((n) => parseFloat(n.trim()));
|
||||
contextOptions.geolocation = {
|
||||
latitude,
|
||||
longitude
|
||||
};
|
||||
} catch (e) {
|
||||
throw new Error('Invalid geolocation format, should be "lat,long". For example --geolocation="37.819722,-122.478611"');
|
||||
}
|
||||
contextOptions.permissions = ["geolocation"];
|
||||
}
|
||||
if (options.userAgent)
|
||||
contextOptions.userAgent = options.userAgent;
|
||||
if (options.lang)
|
||||
contextOptions.locale = options.lang;
|
||||
if (options.colorScheme)
|
||||
contextOptions.colorScheme = options.colorScheme;
|
||||
if (options.timezone)
|
||||
contextOptions.timezoneId = options.timezone;
|
||||
if (options.loadStorage)
|
||||
contextOptions.storageState = options.loadStorage;
|
||||
if (options.ignoreHttpsErrors)
|
||||
contextOptions.ignoreHTTPSErrors = true;
|
||||
if (options.saveHar) {
|
||||
contextOptions.recordHar = { path: import_path.default.resolve(process.cwd(), options.saveHar), mode: "minimal" };
|
||||
if (options.saveHarGlob)
|
||||
contextOptions.recordHar.urlFilter = options.saveHarGlob;
|
||||
contextOptions.serviceWorkers = "block";
|
||||
}
|
||||
let browser;
|
||||
let context;
|
||||
if (options.userDataDir) {
|
||||
context = await browserType.launchPersistentContext(options.userDataDir, { ...launchOptions, ...contextOptions });
|
||||
browser = context.browser();
|
||||
} else {
|
||||
browser = await browserType.launch(launchOptions);
|
||||
context = await browser.newContext(contextOptions);
|
||||
}
|
||||
let closingBrowser = false;
|
||||
async function closeBrowser() {
|
||||
if (closingBrowser)
|
||||
return;
|
||||
closingBrowser = true;
|
||||
if (options.saveStorage)
|
||||
await context.storageState({ path: options.saveStorage }).catch((e) => null);
|
||||
if (options.saveHar)
|
||||
await context.close();
|
||||
await browser.close();
|
||||
}
|
||||
context.on("page", (page) => {
|
||||
page.on("dialog", () => {
|
||||
});
|
||||
page.on("close", () => {
|
||||
const hasPage = browser.contexts().some((context2) => context2.pages().length > 0);
|
||||
if (hasPage)
|
||||
return;
|
||||
closeBrowser().catch(() => {
|
||||
});
|
||||
});
|
||||
});
|
||||
process.on("SIGINT", async () => {
|
||||
await closeBrowser();
|
||||
(0, import_utils.gracefullyProcessExitDoNotHang)(130);
|
||||
});
|
||||
const timeout = options.timeout ? parseInt(options.timeout, 10) : 0;
|
||||
context.setDefaultTimeout(timeout);
|
||||
context.setDefaultNavigationTimeout(timeout);
|
||||
delete launchOptions.headless;
|
||||
delete launchOptions.executablePath;
|
||||
delete launchOptions.handleSIGINT;
|
||||
delete contextOptions.deviceScaleFactor;
|
||||
return { browser, browserName: browserType.name(), context, contextOptions, launchOptions, closeBrowser };
|
||||
}
|
||||
async function openPage(context, url) {
|
||||
let page = context.pages()[0];
|
||||
if (!page)
|
||||
page = await context.newPage();
|
||||
if (url) {
|
||||
if (import_fs.default.existsSync(url))
|
||||
url = "file://" + import_path.default.resolve(url);
|
||||
else if (!url.startsWith("http") && !url.startsWith("file://") && !url.startsWith("about:") && !url.startsWith("data:"))
|
||||
url = "http://" + url;
|
||||
await page.goto(url);
|
||||
}
|
||||
return page;
|
||||
}
|
||||
async function open(options, url) {
|
||||
const { context } = await launchContext(options, { headless: !!process.env.PWTEST_CLI_HEADLESS, executablePath: process.env.PWTEST_CLI_EXECUTABLE_PATH });
|
||||
await context._exposeConsoleApi();
|
||||
await openPage(context, url);
|
||||
}
|
||||
async function codegen(options, url) {
|
||||
const { target: language, output: outputFile, testIdAttribute: testIdAttributeName } = options;
|
||||
const tracesDir = import_path.default.join(import_os.default.tmpdir(), `playwright-recorder-trace-${Date.now()}`);
|
||||
const { context, browser, launchOptions, contextOptions, closeBrowser } = await launchContext(options, {
|
||||
headless: !!process.env.PWTEST_CLI_HEADLESS,
|
||||
executablePath: process.env.PWTEST_CLI_EXECUTABLE_PATH,
|
||||
tracesDir
|
||||
});
|
||||
const donePromise = new import_utils.ManualPromise();
|
||||
maybeSetupTestHooks(browser, closeBrowser, donePromise);
|
||||
import_utilsBundle.dotenv.config({ path: "playwright.env" });
|
||||
await context._enableRecorder({
|
||||
language,
|
||||
launchOptions,
|
||||
contextOptions,
|
||||
device: options.device,
|
||||
saveStorage: options.saveStorage,
|
||||
mode: "recording",
|
||||
testIdAttributeName,
|
||||
outputFile: outputFile ? import_path.default.resolve(outputFile) : void 0,
|
||||
handleSIGINT: false
|
||||
});
|
||||
await openPage(context, url);
|
||||
donePromise.resolve();
|
||||
}
|
||||
async function maybeSetupTestHooks(browser, closeBrowser, donePromise) {
|
||||
if (!process.env.PWTEST_CLI_IS_UNDER_TEST)
|
||||
return;
|
||||
const logs = [];
|
||||
require("playwright-core/lib/utilsBundle").debug.log = (...args) => {
|
||||
const line = require("util").format(...args) + "\n";
|
||||
logs.push(line);
|
||||
process.stderr.write(line);
|
||||
};
|
||||
browser.on("disconnected", () => {
|
||||
const hasCrashLine = logs.some((line) => line.includes("process did exit:") && !line.includes("process did exit: exitCode=0, signal=null"));
|
||||
if (hasCrashLine) {
|
||||
process.stderr.write("Detected browser crash.\n");
|
||||
(0, import_utils.gracefullyProcessExitDoNotHang)(1);
|
||||
}
|
||||
});
|
||||
const close = async () => {
|
||||
await donePromise;
|
||||
await closeBrowser();
|
||||
};
|
||||
if (process.env.PWTEST_CLI_EXIT_AFTER_TIMEOUT) {
|
||||
setTimeout(close, +process.env.PWTEST_CLI_EXIT_AFTER_TIMEOUT);
|
||||
return;
|
||||
}
|
||||
let stdin = "";
|
||||
process.stdin.on("data", (data) => {
|
||||
stdin += data.toString();
|
||||
if (stdin.startsWith("exit")) {
|
||||
process.stdin.destroy();
|
||||
close();
|
||||
}
|
||||
});
|
||||
}
|
||||
async function waitForPage(page, captureOptions) {
|
||||
if (captureOptions.waitForSelector) {
|
||||
console.log(`Waiting for selector ${captureOptions.waitForSelector}...`);
|
||||
await page.waitForSelector(captureOptions.waitForSelector);
|
||||
}
|
||||
if (captureOptions.waitForTimeout) {
|
||||
console.log(`Waiting for timeout ${captureOptions.waitForTimeout}...`);
|
||||
await page.waitForTimeout(parseInt(captureOptions.waitForTimeout, 10));
|
||||
}
|
||||
}
|
||||
async function screenshot(options, captureOptions, url, path2) {
|
||||
const { context } = await launchContext(options, { headless: true });
|
||||
console.log("Navigating to " + url);
|
||||
const page = await openPage(context, url);
|
||||
await waitForPage(page, captureOptions);
|
||||
console.log("Capturing screenshot into " + path2);
|
||||
await page.screenshot({ path: path2, fullPage: !!captureOptions.fullPage });
|
||||
await page.close();
|
||||
}
|
||||
async function pdf(options, captureOptions, url, path2) {
|
||||
if (options.browser !== "chromium")
|
||||
throw new Error("PDF creation is only working with Chromium");
|
||||
const { context } = await launchContext({ ...options, browser: "chromium" }, { headless: true });
|
||||
console.log("Navigating to " + url);
|
||||
const page = await openPage(context, url);
|
||||
await waitForPage(page, captureOptions);
|
||||
console.log("Saving as pdf into " + path2);
|
||||
await page.pdf({ path: path2, format: captureOptions.paperFormat });
|
||||
await page.close();
|
||||
}
|
||||
function lookupBrowserType(options) {
|
||||
let name = options.browser;
|
||||
if (options.device) {
|
||||
const device = playwright.devices[options.device];
|
||||
name = device.defaultBrowserType;
|
||||
}
|
||||
let browserType;
|
||||
switch (name) {
|
||||
case "chromium":
|
||||
browserType = playwright.chromium;
|
||||
break;
|
||||
case "webkit":
|
||||
browserType = playwright.webkit;
|
||||
break;
|
||||
case "firefox":
|
||||
browserType = playwright.firefox;
|
||||
break;
|
||||
case "cr":
|
||||
browserType = playwright.chromium;
|
||||
break;
|
||||
case "wk":
|
||||
browserType = playwright.webkit;
|
||||
break;
|
||||
case "ff":
|
||||
browserType = playwright.firefox;
|
||||
break;
|
||||
}
|
||||
if (browserType)
|
||||
return browserType;
|
||||
import_utilsBundle.program.help();
|
||||
}
|
||||
function validateOptions(options) {
|
||||
if (options.device && !(options.device in playwright.devices)) {
|
||||
const lines = [`Device descriptor not found: '${options.device}', available devices are:`];
|
||||
for (const name in playwright.devices)
|
||||
lines.push(` "${name}"`);
|
||||
throw new Error(lines.join("\n"));
|
||||
}
|
||||
if (options.colorScheme && !["light", "dark"].includes(options.colorScheme))
|
||||
throw new Error('Invalid color scheme, should be one of "light", "dark"');
|
||||
}
|
||||
function logErrorAndExit(e) {
|
||||
if (process.env.PWDEBUGIMPL)
|
||||
console.error(e);
|
||||
else
|
||||
console.error(e.name + ": " + e.message);
|
||||
(0, import_utils.gracefullyProcessExitDoNotHang)(1);
|
||||
}
|
||||
function codegenId() {
|
||||
return process.env.PW_LANG_NAME || "playwright-test";
|
||||
}
|
||||
function commandWithOpenOptions(command, description, options) {
|
||||
let result = import_utilsBundle.program.command(command).description(description);
|
||||
for (const option of options)
|
||||
result = result.option(option[0], ...option.slice(1));
|
||||
return result.option("-b, --browser <browserType>", "browser to use, one of cr, chromium, ff, firefox, wk, webkit", "chromium").option("--block-service-workers", "block service workers").option("--channel <channel>", 'Chromium distribution channel, "chrome", "chrome-beta", "msedge-dev", etc').option("--color-scheme <scheme>", 'emulate preferred color scheme, "light" or "dark"').option("--device <deviceName>", 'emulate device, for example "iPhone 11"').option("--geolocation <coordinates>", 'specify geolocation coordinates, for example "37.819722,-122.478611"').option("--ignore-https-errors", "ignore https errors").option("--load-storage <filename>", "load context storage state from the file, previously saved with --save-storage").option("--lang <language>", 'specify language / locale, for example "en-GB"').option("--proxy-server <proxy>", 'specify proxy server, for example "http://myproxy:3128" or "socks5://myproxy:8080"').option("--proxy-bypass <bypass>", 'comma-separated domains to bypass proxy, for example ".com,chromium.org,.domain.com"').option("--save-har <filename>", "save HAR file with all network activity at the end").option("--save-har-glob <glob pattern>", "filter entries in the HAR by matching url against this glob pattern").option("--save-storage <filename>", "save context storage state at the end, for later use with --load-storage").option("--timezone <time zone>", 'time zone to emulate, for example "Europe/Rome"').option("--timeout <timeout>", "timeout for Playwright actions in milliseconds, no timeout by default").option("--user-agent <ua string>", "specify user agent string").option("--user-data-dir <directory>", "use the specified user data directory instead of a new context").option("--viewport-size <size>", 'specify browser viewport size in pixels, for example "1280, 720"');
|
||||
}
|
||||
function buildBasePlaywrightCLICommand(cliTargetLang) {
|
||||
switch (cliTargetLang) {
|
||||
case "python":
|
||||
return `playwright`;
|
||||
case "java":
|
||||
return `mvn exec:java -e -D exec.mainClass=com.microsoft.playwright.CLI -D exec.args="...options.."`;
|
||||
case "csharp":
|
||||
return `pwsh bin/Debug/netX/playwright.ps1`;
|
||||
default: {
|
||||
const packageManagerCommand = (0, import_utils2.getPackageManagerExecCommand)();
|
||||
return `${packageManagerCommand} playwright`;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
program
|
||||
});
|
||||
74
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/cli/programWithTestStub.js
generated
vendored
Normal file
74
node_modules/.deno/playwright-core@1.58.2/node_modules/playwright-core/lib/cli/programWithTestStub.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
var programWithTestStub_exports = {};
|
||||
__export(programWithTestStub_exports, {
|
||||
program: () => import_program2.program
|
||||
});
|
||||
module.exports = __toCommonJS(programWithTestStub_exports);
|
||||
var import_processLauncher = require("../server/utils/processLauncher");
|
||||
var import_utils = require("../utils");
|
||||
var import_program = require("./program");
|
||||
var import_program2 = require("./program");
|
||||
function printPlaywrightTestError(command) {
|
||||
const packages = [];
|
||||
for (const pkg of ["playwright", "playwright-chromium", "playwright-firefox", "playwright-webkit"]) {
|
||||
try {
|
||||
require.resolve(pkg);
|
||||
packages.push(pkg);
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
if (!packages.length)
|
||||
packages.push("playwright");
|
||||
const packageManager = (0, import_utils.getPackageManager)();
|
||||
if (packageManager === "yarn") {
|
||||
console.error(`Please install @playwright/test package before running "yarn playwright ${command}"`);
|
||||
console.error(` yarn remove ${packages.join(" ")}`);
|
||||
console.error(" yarn add -D @playwright/test");
|
||||
} else if (packageManager === "pnpm") {
|
||||
console.error(`Please install @playwright/test package before running "pnpm exec playwright ${command}"`);
|
||||
console.error(` pnpm remove ${packages.join(" ")}`);
|
||||
console.error(" pnpm add -D @playwright/test");
|
||||
} else {
|
||||
console.error(`Please install @playwright/test package before running "npx playwright ${command}"`);
|
||||
console.error(` npm uninstall ${packages.join(" ")}`);
|
||||
console.error(" npm install -D @playwright/test");
|
||||
}
|
||||
}
|
||||
const kExternalPlaywrightTestCommands = [
|
||||
["test", "Run tests with Playwright Test."],
|
||||
["show-report", "Show Playwright Test HTML report."],
|
||||
["merge-reports", "Merge Playwright Test Blob reports"]
|
||||
];
|
||||
function addExternalPlaywrightTestCommands() {
|
||||
for (const [command, description] of kExternalPlaywrightTestCommands) {
|
||||
const playwrightTest = import_program.program.command(command).allowUnknownOption(true).allowExcessArguments(true);
|
||||
playwrightTest.description(`${description} Available in @playwright/test package.`);
|
||||
playwrightTest.action(async () => {
|
||||
printPlaywrightTestError(command);
|
||||
(0, import_processLauncher.gracefullyProcessExitDoNotHang)(1);
|
||||
});
|
||||
}
|
||||
}
|
||||
if (!process.env.PW_LANG_NAME)
|
||||
addExternalPlaywrightTestCommands();
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
program
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user