Compare commits
3 commits
30838a1523
...
0d14ef6142
Author | SHA1 | Date | |
---|---|---|---|
0d14ef6142 | |||
23b8101426 | |||
d0cdbf55a3 |
18 changed files with 1730 additions and 106 deletions
54
Cargo.lock
generated
54
Cargo.lock
generated
|
@ -172,6 +172,7 @@ dependencies = [
|
|||
"reqwest",
|
||||
"rstest",
|
||||
"scraper",
|
||||
"secrecy",
|
||||
"serde",
|
||||
"serde-env",
|
||||
"serde-hex",
|
||||
|
@ -180,11 +181,13 @@ dependencies = [
|
|||
"syntect",
|
||||
"temp_testdir",
|
||||
"thiserror",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"unic-emoji-char",
|
||||
"url",
|
||||
"yarte",
|
||||
"yarte_helpers",
|
||||
|
@ -2468,6 +2471,16 @@ dependencies = [
|
|||
"tendril",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "secrecy"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bd1c54ea06cfd2f6b63219704de0b9b4f72dcc2b8fdef820be6cd799780e91e"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "security-framework"
|
||||
version = "2.11.0"
|
||||
|
@ -3132,6 +3145,47 @@ version = "0.1.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
|
||||
|
||||
[[package]]
|
||||
name = "unic-char-property"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221"
|
||||
dependencies = [
|
||||
"unic-char-range",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unic-char-range"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc"
|
||||
|
||||
[[package]]
|
||||
name = "unic-common"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc"
|
||||
|
||||
[[package]]
|
||||
name = "unic-emoji-char"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b07221e68897210270a38bde4babb655869637af0f69407f96053a34f76494d"
|
||||
dependencies = [
|
||||
"unic-char-property",
|
||||
"unic-char-range",
|
||||
"unic-ucd-version",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unic-ucd-version"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4"
|
||||
dependencies = [
|
||||
"unic-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.7.0"
|
||||
|
|
|
@ -25,6 +25,7 @@ axum = { version = "0.7.5", default-features = false, features = [
|
|||
"http1",
|
||||
"http2",
|
||||
"json",
|
||||
"query",
|
||||
"tokio",
|
||||
"tracing",
|
||||
] }
|
||||
|
@ -52,6 +53,7 @@ reqwest = { version = "0.12.4", default-features = false, features = [
|
|||
"json",
|
||||
"stream",
|
||||
] }
|
||||
secrecy = { version = "0.8.0", features = ["serde"] }
|
||||
serde = { version = "1.0.203", features = ["derive"] }
|
||||
serde-env = "0.1.1"
|
||||
serde-hex = "0.1.0"
|
||||
|
@ -65,11 +67,13 @@ syntect = { version = "5.2.0", default-features = false, features = [
|
|||
"regex-onig",
|
||||
] }
|
||||
thiserror = "1.0.61"
|
||||
time = { version = "0.3.36", features = ["serde-human-readable", "macros"] }
|
||||
tokio = { version = "1.37.0", features = ["macros", "fs", "rt-multi-thread"] }
|
||||
tokio-util = { version = "0.7.11", features = ["io"] }
|
||||
tower-http = { version = "0.5.2", features = ["trace", "set-header"] }
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = "0.3.18"
|
||||
unic-emoji-char = "0.9.0"
|
||||
url = "2.5.0"
|
||||
yarte = { version = "0.15.7", features = ["json"] }
|
||||
|
||||
|
|
212
README.md
212
README.md
|
@ -1,23 +1,28 @@
|
|||
# Artifactview
|
||||
|
||||
View CI build artifacts from Forgejo/Github using your web browser.
|
||||
View CI build artifacts from Forgejo/GitHub using your web browser!
|
||||
|
||||
Forgejo and GitHub's CI systems allow you to upload files and directories as
|
||||
[artifacts](https://github.com/actions/upload-artifact). These can be downloaded as zip
|
||||
files. However there is no simple way to view individual files of an artifact.
|
||||
|
||||
Artifactview is a small web application that fetches these CI artifacts and displays
|
||||
their contents.
|
||||
That's why I developed Artifactview. It is a small web application that fetches these CI
|
||||
artifacts and serves their contents.
|
||||
|
||||
It offers full support for single page applications and custom 404 error pages.
|
||||
Single-page applications require a file named `200.html` placed in the root directory,
|
||||
which will be served in case no file exists for the requested path. A custom 404 error
|
||||
page is defined using a file named `404.html` in the root directory.
|
||||
It is a valuable tool in open source software development: you can quickly look at test
|
||||
reports or coverage data or showcase your single page web applications to your
|
||||
teammates.
|
||||
|
||||
Artifactview displays a file listing if there is no `index.html` or fallback page
|
||||
present, so you can browse artifacts that dont contain websites.
|
||||
## Features
|
||||
|
||||
![Artifact file listing](resources/screenshotFiles.png)
|
||||
- 📦 Quickly view CI artifacts in your browser without messing with zip files
|
||||
- 📂 File listing for directories without index page
|
||||
- 🏠 Every artifact has a unique subdomain to support pages with absolute paths
|
||||
- 🌎 Full SPA support with `200.html` and `404.html` fallback pages
|
||||
- 👁️ Viewer for Markdown, syntax-highlighted code and JUnit test reports
|
||||
- 🐵 Greasemonkey userscript to automatically add a "View artifact" button to
|
||||
GitHub/Gitea/Forgejo
|
||||
- 🦀 Fast and efficient, only extracts files from zip archive if necessary
|
||||
|
||||
## How to use
|
||||
|
||||
|
@ -27,6 +32,151 @@ box on the main page. You can also pass the run URL with the `?url=` parameter.
|
|||
Artifactview will show you a selection page where you will be able to choose the
|
||||
artifact you want to browse.
|
||||
|
||||
If there is no `index.html` or fallback page present, a file listing will be shown so
|
||||
you can browse the contents of the artifact.
|
||||
|
||||
![Artifact file listing](resources/screenshotFiles.png)
|
||||
|
||||
If you want to use Artifactview to showcase a static website, you can make use of
|
||||
fallback pages. If a file named `200.html` is placed in the root directory, it will be
|
||||
served in case no file exists for the requested path. This allows serving single-page
|
||||
applications with custom routing. A custom 404 error page is defined using a file named
|
||||
`404.html` in the root directory.
|
||||
|
||||
The behavior is the same as with other web hosts like surge.sh, so a lot of website
|
||||
build tools already follow that convention.
|
||||
|
||||
Artifactview includes different viewers to better display files of certain types that
|
||||
browsers cannot handle by default. There is a renderer for markdown files as well as a
|
||||
syntax highlighter for source code files. The viewers are only shown if the files are
|
||||
accessed with the `?viewer=` URL parameter which is automatically set when opening a
|
||||
file from a directory listing. You can always download the raw version of the file via
|
||||
the link in the top right corner.
|
||||
|
||||
![Code viewer](resources/screenshotCode.png)
|
||||
|
||||
Artifactview even includes an interactive viewer for JUnit test reports (XML files with
|
||||
`junit` in their filename). The application has been designed to be easily extendable,
|
||||
so if you have suggestions on other viewers that should be added, feel free to create an
|
||||
issue or a PR.
|
||||
|
||||
![JUnit report viewer](resources/screenshotJUnit.png)
|
||||
|
||||
Accessing Artifactview by copying the CI run URL into its homepage may be a little bit
|
||||
tedious. That's why there are some convenient alternatives available.
|
||||
|
||||
You can install the Greasemonkey userscript from the link at the bottom of the homepage.
|
||||
The script adds a "View artifact" link with an eye icon next to every CI artifact on
|
||||
both GitHub and Forgejo.
|
||||
|
||||
If you want to give every collaborator to your project easy access to previews, you can
|
||||
use Artifactview to automatically create a pull request comments with links to the
|
||||
artifacts.
|
||||
|
||||
![Pull request comment](./resources/screenshotPrComment.png)
|
||||
|
||||
To accomplish that, simply add this step to your CI workflow (after uploading the
|
||||
artifacts).
|
||||
|
||||
```yaml
|
||||
- name: 🔗 Artifactview PR comment
|
||||
if: ${{ always() && github.event_name == 'pull_request' }}
|
||||
run: |
|
||||
curl -X POST https://av.thetadev.de/.well-known/api/prComment -H "Content-Type: application/json" --data "{\"url\": \"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID\", pr: ${{ github.event.number }}}"
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
Artifactview does have a HTTP API to access data about the CI artifacts. To make the API
|
||||
available to every site without interfering with any paths from the artifacts, the
|
||||
endpoints are located within the reserved `/.well-known/api` directory.
|
||||
|
||||
### Get list of artifacts of a CI run
|
||||
|
||||
`GET /.well-known/api/artifacts?url=<RUN_URL>`
|
||||
|
||||
`GET <HOST>--<USER>--<REPO>--<RUN>-<ARTIFACT>.example.com/.well-known/api/artifacts`
|
||||
|
||||
**Response**
|
||||
|
||||
**Note:** the difference between `download_url` and `user_download_url` is that the
|
||||
first one is used by the API client and the second one is shown to the user.
|
||||
`user_download_url` is only set for GitHub artifacts. Forgejo does not have different
|
||||
download URLs since it does not require authentication to download artifacts.
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Example",
|
||||
"size": 1523222,
|
||||
"expired": false,
|
||||
"download_url": "https://codeberg.org/thetadev/artifactview/actions/runs/28/artifacts/Example",
|
||||
"user_download_url": null
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Get metadata of the current artifact
|
||||
|
||||
`GET <HOST>--<USER>--<REPO>--<RUN>-<ARTIFACT>.example.com/.well-known/api/artifact`
|
||||
|
||||
**Response**
|
||||
|
||||
```json
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Example",
|
||||
"size": 1523222,
|
||||
"expired": false,
|
||||
"download_url": "https://codeberg.org/thetadev/artifactview/actions/runs/28/artifacts/Example",
|
||||
"user_download_url": null
|
||||
}
|
||||
```
|
||||
|
||||
### Get all files from the artifact
|
||||
|
||||
`GET <HOST>--<USER>--<REPO>--<RUN>-<ARTIFACT>.example.com/.well-known/api/files`
|
||||
|
||||
**Response**
|
||||
|
||||
```json
|
||||
[
|
||||
{ "name": "example.rs", "size": 406, "crc32": "2013120c" },
|
||||
{ "name": "README.md", "size": 13060, "crc32": "61c692f0" }
|
||||
]
|
||||
```
|
||||
|
||||
### Create a pull request comment
|
||||
|
||||
`POST /.well-known/api/prComment`
|
||||
|
||||
Artifactview can create a comment under a pull request containing links to view the
|
||||
artifacts. This way everyone looking at a project can easily access the artifact
|
||||
previews.
|
||||
|
||||
To use this feature, you need to setup an access token with the permission to create
|
||||
comments for every code forge you want to use (more details in the section
|
||||
[Access tokens](#access-tokens)).
|
||||
|
||||
To prevent abuse and spamming, this endpoint is rate-limited and Artifactview will only
|
||||
create comments after it verified that the workflow matches the given pull request and
|
||||
the worflow is still running.
|
||||
|
||||
| JSON parameter | Description |
|
||||
| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `url` (string) ❕ | CI workflow URL<br />Example: https://codeberg.org/ThetaDev/artifactview/actions/runs/31 |
|
||||
| `pr` (int) ❕ | Pull request number |
|
||||
| `recreate` (bool) | If set to true, the pull request comment will be deleted and recreated if it already exists. If set to false or omitted, the comment will be edited instead. |
|
||||
| `title` (string) | Comment title (default: "Latest build artifacts") |
|
||||
| `artifact_titles` (map) | Set custom titles for your artifacts.<br />Example: `{"Hello": "🏠 Hello World ;-)"}` |
|
||||
|
||||
**Response**
|
||||
|
||||
```json
|
||||
{ "status": 200, "msg": "created comment #2183634497" }
|
||||
```
|
||||
|
||||
## Setup
|
||||
|
||||
You can run artifactview using the docker image provided under
|
||||
|
@ -70,8 +220,14 @@ networks:
|
|||
|
||||
Artifactview is configured using environment variables.
|
||||
|
||||
Note that some variables contain lists and maps of values. Lists need to have their
|
||||
values separated with semicolons. Maps use an arrow `=>` between key and value, with
|
||||
pairs separated by semicolons.
|
||||
|
||||
Example list: `foo;bar`, example map: `foo=>f1;bar=>b1`
|
||||
|
||||
| Variable | Default | Description |
|
||||
| ------------------------- | ----------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| --------------------------- | ----------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `PORT` | 3000 | HTTP port |
|
||||
| `CACHE_DIR` | /tmp/artifactview | Temporary directory where to store the artifacts |
|
||||
| `ROOT_DOMAIN` | localhost:3000 | Public hostname+port number under which artifactview is accessible. If this is configured incorrectly, artifactview will show the error message "host does not end with configured ROOT_DOMAIN" |
|
||||
|
@ -82,16 +238,38 @@ Artifactview is configured using environment variables.
|
|||
| `MAX_FILE_COUNT` | 10000 | Maximum amount of files within a zip file |
|
||||
| `MAX_AGE_H` | 12 | Maximum age in hours after which cached artifacts are deleted |
|
||||
| `ZIP_TIMEOUT_MS` | 1000 | Maximum time in milliseconds for reading the index of a zip file. If this takes too long, the zip file is most likely excessively large or malicious (zip bomb) |
|
||||
| `GITHUB_TOKEN` | - | GitHub API token for downloading artifacts. Using a fine-grained token with public read permissions is recommended |
|
||||
| `GITHUB_TOKEN` | - | GitHub API token for downloading artifacts and creating PR comments. Using a fine-grained token with public read permissions is recommended |
|
||||
| `FORGEJO_TOKENS` | - | Forgejo API tokens for creating PR comments<br />Example: `codeberg.org=>fc010f65348468d05e570806275528c936ce93a4` |
|
||||
| `MEM_CACHE_SIZE` | 50 | Artifactview keeps artifact metadata as well as the zip file indexes in memory to improve performance. The amount of cached items is adjustable. |
|
||||
| `REAL_IP_HEADER` | - | Get the client IP address from a HTTP request header<br />If Artifactview is exposed to the network directly, this option has to be unset. If you are using a reverse proxy the proxy needs to be configured to send the actual client IP as a request header.<br />For most proxies this header is `x-forwarded-for`. |
|
||||
| `LIMIT_ARTIFACTS_PER_MIN` | 5 | Limit the amount of downloaded artifacts per IP address and minute |
|
||||
| `LIMIT_ARTIFACTS_PER_MIN` | 5 | Limit the amount of downloaded artifacts per IP address and minute to prevent excessive resource usage. |
|
||||
| `LIMIT_PR_COMMENTS_PER_MIN` | 5 | Limit the amount of pull request comment requests per IP address and minute to prevent spamming. |
|
||||
| `REPO_BLACKLIST` | - | List of sites/users/repos that can NOT be accessed. The blacklist takes precedence over the whitelist (repos included in both lists cannot be accessed)<br />Example: `github.com/evil-corp/world-destruction;codeberg.org/blackhat;example.org` |
|
||||
| `REPO_WHITELIST` | - | List of sites/users/repos that can ONLY be accessed. If the whitelist is empty, it will be ignored and any repository can be accessed. Uses the same syntax as `REPO_BLACKLIST`. |
|
||||
| `SITE_ALIASES` | - | Aliases for sites to make URLs shorter<br />Example: `gh => github.com;cb => codeberg.org` |
|
||||
| `SUGGESTED_SITES` | codeberg.org; github.com; gitea.com | List of suggested code forges (host only, without https://, separated by `;`). If repo_whitelist is empty, this value is used for the matched sites in the userscript. The first value is used in the placeholder URL on the home page. |
|
||||
| `VIEWER_MAX_SIZE` | 500000 | Maximum file size to be displayed using the viewer |
|
||||
|
||||
### Access tokens
|
||||
|
||||
GitHub does not allow downloading artifacts for public repositories for unauthenticated
|
||||
users. So you need to setup an access token to use Artifactview with GitHub. These are
|
||||
the permissions that need to be enabled:
|
||||
|
||||
- Repository access: All repositories
|
||||
- Repository permissions: Pull requests (Read and write)
|
||||
|
||||
Forgejo does not require access tokens to download artifacts on public repositories, so
|
||||
you only need to create a token if you want to use the `prComment`-API. In this case,
|
||||
the token needs the following permissions:
|
||||
|
||||
- Repository and Organization Access: Public only
|
||||
- issue: Read and write
|
||||
- user: Read (for determining own user ID)
|
||||
|
||||
Note that if you are using Artifactview to create pull request comments, it is
|
||||
recommended to create a second bot account instead of using your main account.
|
||||
|
||||
## Technical details
|
||||
|
||||
### URL format
|
||||
|
@ -104,8 +282,8 @@ Example: `https://github-com--theta-dev--example-project--4-11.example.com`
|
|||
The reason for using subdomains instead of URL paths is that many websites expect to be
|
||||
served from a separate subdomain and access resources using absolute paths. Using URLs
|
||||
like `example.com/github.com/theta-dev/example-project/4/11/path/to/file` would make the
|
||||
application easier to host, but it would not be possible to simply preview a
|
||||
React/Vue/Svelte web project.
|
||||
application easier to host, but it would not be possible to preview a React/Vue/Svelte
|
||||
web project.
|
||||
|
||||
Since domains only allow letters, numbers and dashes but repository names allow dots and
|
||||
underscores, these escape sequences are used to access repositories with special
|
||||
|
@ -139,5 +317,5 @@ will serve no files from the `.well-known` folder.
|
|||
There is a configurable limit for both the maximum downloaded artifact size and the
|
||||
maximum size of individual files to be served (100 MB by default). Additionally there is
|
||||
a configurable timeout for the zip file indexing operation. These measures should
|
||||
protect the server againt denial-of-service attacks like overfilling the server drive or
|
||||
uploading zip bombs.
|
||||
protect the server against denial-of-service attacks like overfilling the server drive
|
||||
or uploading zip bombs.
|
||||
|
|
BIN
resources/screenshotCode.png
Normal file
BIN
resources/screenshotCode.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 43 KiB |
BIN
resources/screenshotJUnit.png
Normal file
BIN
resources/screenshotJUnit.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 47 KiB |
BIN
resources/screenshotPrComment.png
Normal file
BIN
resources/screenshotPrComment.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 20 KiB |
316
src/app.rs
316
src/app.rs
|
@ -1,17 +1,24 @@
|
|||
use std::{
|
||||
collections::BTreeMap, net::SocketAddr, ops::Bound, path::Path, str::FromStr, sync::Arc,
|
||||
collections::{BTreeMap, HashMap},
|
||||
fmt::Write,
|
||||
net::{IpAddr, SocketAddr},
|
||||
ops::Bound,
|
||||
path::Path,
|
||||
str::FromStr,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use async_zip::tokio::read::ZipEntryReader;
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Host, Request, State},
|
||||
extract::{Host, Query as XQuery, Request, State},
|
||||
http::{Response, Uri},
|
||||
response::{IntoResponse, Redirect},
|
||||
routing::{any, get},
|
||||
Router,
|
||||
routing::{any, get, post},
|
||||
Json, RequestExt, Router,
|
||||
};
|
||||
use futures_lite::AsyncReadExt as LiteAsyncReadExt;
|
||||
use governor::{Quota, RateLimiter};
|
||||
use headers::{ContentType, HeaderMapExt};
|
||||
use http::{HeaderMap, StatusCode};
|
||||
use serde::Deserialize;
|
||||
|
@ -29,7 +36,7 @@ use tower_http::{
|
|||
};
|
||||
|
||||
use crate::{
|
||||
artifact_api::ArtifactApi,
|
||||
artifact_api::{Artifact, ArtifactApi, WorkflowRun},
|
||||
cache::{Cache, CacheEntry, GetFileResult, GetFileResultFile},
|
||||
config::Config,
|
||||
error::Error,
|
||||
|
@ -52,6 +59,12 @@ struct AppInner {
|
|||
cache: Cache,
|
||||
api: ArtifactApi,
|
||||
viewers: Viewers,
|
||||
lim_pr_comment: Option<
|
||||
governor::DefaultKeyedRateLimiter<
|
||||
IpAddr,
|
||||
governor::middleware::NoOpMiddleware<governor::clock::QuantaInstant>,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
|
||||
impl Default for App {
|
||||
|
@ -65,6 +78,25 @@ struct FileQparams {
|
|||
viewer: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct UrlQuery {
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PrCommentReq {
|
||||
url: String,
|
||||
pr: u64,
|
||||
#[serde(default)]
|
||||
recreate: bool,
|
||||
title: Option<String>,
|
||||
#[serde(default)]
|
||||
artifact_titles: HashMap<String, String>,
|
||||
}
|
||||
|
||||
const DATE_FORMAT: &[time::format_description::FormatItem] =
|
||||
time::macros::format_description!("[day].[month].[year] [hour]:[minute]:[second]");
|
||||
|
||||
const FAVICON_PATH: &str = "/favicon.ico";
|
||||
pub(crate) const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
|
@ -126,6 +158,7 @@ impl App {
|
|||
.route("/.well-known/api/artifacts", get(Self::get_artifacts))
|
||||
.route("/.well-known/api/artifact", get(Self::get_artifact))
|
||||
.route("/.well-known/api/files", get(Self::get_files))
|
||||
.route("/.well-known/api/prComment", post(Self::pr_comment))
|
||||
// Prevent access to the .well-known folder since it enables abuse
|
||||
// (e.g. SSL certificate registration by an attacker)
|
||||
.route("/.well-known/*path", any(|| async { Error::Inaccessible }))
|
||||
|
@ -331,8 +364,9 @@ impl App {
|
|||
.query()
|
||||
.and_then(|q| serde_urlencoded::from_str::<Params>(q).ok())
|
||||
{
|
||||
let query = RunQuery::from_forge_url(¶ms.url, &state.i.cfg.load().site_aliases)?;
|
||||
let artifacts = state.i.api.list(&query).await?;
|
||||
let query =
|
||||
RunQuery::from_forge_url_alias(¶ms.url, &state.i.cfg.load().site_aliases)?;
|
||||
let artifacts = state.i.api.list(&query, true).await?;
|
||||
|
||||
if artifacts.is_empty() {
|
||||
Err(Error::NotFound("artifacts".into()))
|
||||
|
@ -545,7 +579,7 @@ impl App {
|
|||
.typed_header(headers::ContentLength(content_length))
|
||||
.typed_header(
|
||||
headers::ContentRange::bytes(range, total_len)
|
||||
.map_err(|e| Error::Internal(e.to_string().into()))?,
|
||||
.map_err(|e| Error::Other(e.to_string().into()))?,
|
||||
)
|
||||
.body(Body::from_stream(ReaderStream::new(
|
||||
bufreader.take(content_length),
|
||||
|
@ -562,11 +596,18 @@ impl App {
|
|||
async fn get_artifacts(
|
||||
State(state): State<AppState>,
|
||||
Host(host): Host,
|
||||
url_query: XQuery<UrlQuery>,
|
||||
) -> Result<Response<Body>, ErrorJson> {
|
||||
let query = match &url_query.url {
|
||||
Some(url) => RunQuery::from_forge_url(url)?,
|
||||
None => {
|
||||
let subdomain = util::get_subdomain(&host, &state.i.cfg.load().root_domain)?;
|
||||
let query = ArtifactQuery::from_subdomain(subdomain, &state.i.cfg.load().site_aliases)?;
|
||||
ArtifactQuery::from_subdomain(subdomain, &state.i.cfg.load().site_aliases)?.into()
|
||||
}
|
||||
};
|
||||
|
||||
state.i.cfg.check_filterlist(&query)?;
|
||||
let artifacts = state.i.api.list(&query.into()).await?;
|
||||
let artifacts = state.i.api.list(&query, true).await?;
|
||||
Ok(Response::builder().cache().json(&artifacts)?)
|
||||
}
|
||||
|
||||
|
@ -603,6 +644,83 @@ impl App {
|
|||
.json(&files)?)
|
||||
}
|
||||
|
||||
/// Create a comment under a workflow's pull request with links to view the artifacts
|
||||
///
|
||||
/// To prevent abuse/spamming, Artifactview will only create a comment if
|
||||
/// - The workflow is still running
|
||||
/// - The workflow was triggered by the given pull request
|
||||
async fn pr_comment(
|
||||
State(state): State<AppState>,
|
||||
request: Request,
|
||||
) -> Result<ErrorJson, ErrorJson> {
|
||||
let ip = util::get_ip_address(&request, state.i.cfg.load().real_ip_header.as_deref())?;
|
||||
let req = request
|
||||
.extract::<Json<PrCommentReq>, _>()
|
||||
.await
|
||||
.map_err(|e| Error::BadRequest(e.body_text().into()))?;
|
||||
let query = RunQuery::from_forge_url(&req.url)?;
|
||||
|
||||
if let Some(limiter) = &state.i.lim_pr_comment {
|
||||
limiter.check_key(&ip).map_err(Error::from)?;
|
||||
}
|
||||
|
||||
let run = state.i.api.workflow_run(&query).await?;
|
||||
if !run.from_pr {
|
||||
return Err(
|
||||
Error::BadRequest("workflow run not triggered by pull request".into()).into(),
|
||||
);
|
||||
}
|
||||
if run.done {
|
||||
return Err(Error::BadRequest("workflow is not running".into()).into());
|
||||
}
|
||||
if let Some(pr_number) = run.pr_number {
|
||||
if pr_number != req.pr {
|
||||
return Err(Error::BadRequest(
|
||||
format!(
|
||||
"workflow was triggered by pr#{}, expected: {}",
|
||||
pr_number, req.pr
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
} else {
|
||||
let pr = state.i.api.get_pr(query.as_ref(), req.pr).await?;
|
||||
if run.head_sha != pr.head.sha {
|
||||
return Ok(ErrorJson::ok("head of pr does not match workflow run"));
|
||||
}
|
||||
}
|
||||
|
||||
let artifacts = match state.i.api.list(&query, false).await {
|
||||
Ok(a) => a,
|
||||
Err(Error::NotFound(_)) => return Ok(ErrorJson::ok("no artifacts")),
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let old_comment = state.i.api.find_comment(query.as_ref(), req.pr).await?;
|
||||
let content = pr_comment_text(
|
||||
&query,
|
||||
old_comment.as_ref().map(|c| c.body.as_str()),
|
||||
&run,
|
||||
&artifacts,
|
||||
req.title.as_deref(),
|
||||
&req.artifact_titles,
|
||||
&state.i.cfg,
|
||||
);
|
||||
|
||||
let c_id = state
|
||||
.i
|
||||
.api
|
||||
.add_comment(
|
||||
query.as_ref(),
|
||||
req.pr,
|
||||
&content,
|
||||
old_comment.map(|c| c.id),
|
||||
req.recreate,
|
||||
)
|
||||
.await?;
|
||||
Ok(ErrorJson::ok(format!("created comment #{c_id}")))
|
||||
}
|
||||
|
||||
fn favicon() -> Result<Response<Body>, Error> {
|
||||
Ok(Response::builder()
|
||||
.typed_header(headers::ContentType::from_str("image/x-icon").unwrap())
|
||||
|
@ -642,10 +760,14 @@ impl AppState {
|
|||
let api = ArtifactApi::new(cfg.clone());
|
||||
Self {
|
||||
i: Arc::new(AppInner {
|
||||
cfg,
|
||||
cache,
|
||||
api,
|
||||
viewers: Viewers::new(),
|
||||
lim_pr_comment: cfg
|
||||
.load()
|
||||
.limit_artifacts_per_min
|
||||
.map(|lim| RateLimiter::keyed(Quota::per_minute(lim))),
|
||||
cfg,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -689,3 +811,175 @@ fn path_components(
|
|||
}
|
||||
path_components
|
||||
}
|
||||
|
||||
/// Build pull request comment text
|
||||
#[allow(clippy::assigning_clones)]
|
||||
fn pr_comment_text(
|
||||
query: &RunQuery,
|
||||
old_comment: Option<&str>,
|
||||
run: &WorkflowRun,
|
||||
artifacts: &[Artifact],
|
||||
title: Option<&str>,
|
||||
artifact_titles: &HashMap<String, String>,
|
||||
cfg: &Config,
|
||||
) -> String {
|
||||
let mut content = format!("### {} ", title.unwrap_or("Latest build artifacts"));
|
||||
let mut prevln = "- ".to_owned();
|
||||
|
||||
let mut prev_builds = None;
|
||||
let mut np_content = None;
|
||||
if let Some(old_comment) = old_comment {
|
||||
prev_builds = util::extract_delim(old_comment, "</summary>", "<!--NEXT_PREV");
|
||||
np_content = util::extract_delim(old_comment, "<!--NEXT_PREV", "-->");
|
||||
}
|
||||
|
||||
let write_commit = |s: &mut String, sha: &str| {
|
||||
_ = write!(
|
||||
s,
|
||||
"[[{}](https://{}/{}/{}/commit/{})]",
|
||||
&sha[..10],
|
||||
query.host,
|
||||
query.user,
|
||||
query.repo,
|
||||
sha
|
||||
);
|
||||
};
|
||||
|
||||
write_commit(&mut content, &run.head_sha);
|
||||
write_commit(&mut prevln, &run.head_sha);
|
||||
_ = content.write_str("\n\n");
|
||||
|
||||
for a in artifacts.iter().filter(|a| !a.expired) {
|
||||
// Move leading emoji into a prefix variable since including them in the link does not look good
|
||||
let mut name_pfx = String::new();
|
||||
let mut name = artifact_titles.get(&a.name).unwrap_or(&a.name).to_owned();
|
||||
if let Some((i, c)) = name
|
||||
.char_indices()
|
||||
.find(|(_, c)| !unic_emoji_char::is_emoji(*c))
|
||||
{
|
||||
if i > 0 && c == ' ' {
|
||||
name[..i + 1].clone_into(&mut name_pfx);
|
||||
name = name[i + 1..].to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
let url = cfg.url_with_subdomain(&query.subdomain_with_artifact(a.id));
|
||||
// Do not process the same run twice
|
||||
if np_content.as_ref().is_some_and(|c| c.contains(&url)) {
|
||||
np_content = None;
|
||||
}
|
||||
|
||||
_ = writeln!(
|
||||
&mut content,
|
||||
r#"{}<a href="{}" target="_blank" rel="noopener noreferrer">{}</a><br>"#,
|
||||
name_pfx, url, name,
|
||||
);
|
||||
_ = write!(
|
||||
&mut prevln,
|
||||
r#" <a href="{}" target="_blank" rel="noopener noreferrer">{}</a>,"#,
|
||||
url, a.name
|
||||
);
|
||||
}
|
||||
|
||||
prevln = prevln.trim_matches([' ', ',']).to_owned();
|
||||
if let Some(date_started) = &run.date_started {
|
||||
_ = write!(
|
||||
&mut prevln,
|
||||
" ({} UTC)",
|
||||
date_started
|
||||
.to_offset(time::UtcOffset::UTC)
|
||||
.format(&DATE_FORMAT)
|
||||
.unwrap_or_default()
|
||||
);
|
||||
}
|
||||
|
||||
if np_content.is_some() || prev_builds.is_some() {
|
||||
_ = write!(
|
||||
&mut content,
|
||||
"<details>\n<summary>Previous builds</summary>\n\n"
|
||||
);
|
||||
if let Some(prev_builds) = prev_builds {
|
||||
_ = writeln!(&mut content, "{prev_builds}");
|
||||
}
|
||||
if let Some(np_content) = np_content {
|
||||
_ = writeln!(&mut content, "{np_content}");
|
||||
}
|
||||
_ = writeln!(&mut content, "<!--NEXT_PREV {prevln} -->\n</details>");
|
||||
} else {
|
||||
_ = writeln!(&mut content, "<!--NEXT_PREV {prevln} -->");
|
||||
}
|
||||
|
||||
_ = write!(&mut content, "\n<sup>generated by [Artifactview {VERSION}](https://codeberg.org/ThetaDev/artifactview)</sup>");
|
||||
content
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use time::macros::datetime;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn pr_comment() {
|
||||
let mut query = RunQuery::from_forge_url(
|
||||
"https://code.thetadev.de/ThetaDev/test-actions/actions/runs/104",
|
||||
)
|
||||
.unwrap();
|
||||
let artifacts: [Artifact; 3] = [
|
||||
Artifact {
|
||||
id: 1,
|
||||
name: "Hello".to_owned(),
|
||||
size: 0,
|
||||
expired: false,
|
||||
download_url: String::new(),
|
||||
user_download_url: None,
|
||||
},
|
||||
Artifact {
|
||||
id: 2,
|
||||
name: "Test".to_owned(),
|
||||
size: 0,
|
||||
expired: false,
|
||||
download_url: String::new(),
|
||||
user_download_url: None,
|
||||
},
|
||||
Artifact {
|
||||
id: 3,
|
||||
name: "Expired".to_owned(),
|
||||
size: 0,
|
||||
expired: true,
|
||||
download_url: String::new(),
|
||||
user_download_url: None,
|
||||
},
|
||||
];
|
||||
let mut artifact_titles = HashMap::new();
|
||||
artifact_titles.insert("Hello".to_owned(), "🏠 Hello World ;-)".to_owned());
|
||||
let cfg = Config::default();
|
||||
|
||||
let footer = format!("<sup>generated by [Artifactview {VERSION}](https://codeberg.org/ThetaDev/artifactview)</sup>");
|
||||
|
||||
let mut old_comment = None;
|
||||
for i in 1..=3 {
|
||||
query.run = i.into();
|
||||
let run = WorkflowRun {
|
||||
head_sha: format!("{i}5eed48a8382513147a949117ef4aa659989d397"),
|
||||
from_pr: true,
|
||||
pr_number: None,
|
||||
date_started: Some(datetime!(2024-06-15 15:30 UTC).replace_hour(i).unwrap()),
|
||||
done: false,
|
||||
};
|
||||
let comment = pr_comment_text(
|
||||
&query,
|
||||
old_comment.as_deref(),
|
||||
&run,
|
||||
&artifacts,
|
||||
None,
|
||||
&artifact_titles,
|
||||
&cfg,
|
||||
);
|
||||
let res = comment.replace(&footer, ""); // Remove footer since it depends on the version
|
||||
insta::assert_snapshot!(format!("pr_comment_{i}"), res);
|
||||
|
||||
old_comment = Some(comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
//! API-Client to fetch CI artifacts from Github and Forgejo
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use futures_lite::StreamExt;
|
||||
use http::header;
|
||||
use http::{header, Method};
|
||||
use once_cell::sync::Lazy;
|
||||
use quick_cache::sync::Cache as QuickCache;
|
||||
use regex::Regex;
|
||||
use reqwest::{Client, ClientBuilder, IntoUrl, RequestBuilder, Response, Url};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use secrecy::ExposeSecret;
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::{fs::File, io::AsyncWriteExt};
|
||||
|
||||
use crate::{
|
||||
|
@ -19,9 +22,10 @@ pub struct ArtifactApi {
|
|||
http: Client,
|
||||
cfg: Config,
|
||||
qc: QuickCache<String, Vec<Artifact>>,
|
||||
user_ids: QuickCache<String, u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Artifact {
|
||||
pub id: u64,
|
||||
pub name: String,
|
||||
|
@ -35,7 +39,7 @@ pub struct Artifact {
|
|||
pub user_download_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GithubArtifact {
|
||||
id: u64,
|
||||
name: String,
|
||||
|
@ -44,24 +48,24 @@ struct GithubArtifact {
|
|||
archive_download_url: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ForgejoArtifact {
|
||||
name: String,
|
||||
size: u64,
|
||||
status: ForgejoArtifactStatus,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ApiError {
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ArtifactsWrap<T> {
|
||||
artifacts: Vec<T>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum ForgejoArtifactStatus {
|
||||
Completed,
|
||||
|
@ -100,6 +104,154 @@ impl ForgejoArtifact {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct WorkflowRun {
|
||||
pub head_sha: String,
|
||||
pub from_pr: bool,
|
||||
pub pr_number: Option<u64>,
|
||||
pub date_started: Option<OffsetDateTime>,
|
||||
pub done: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ForgejoWorkflowRun {
|
||||
state: ForgejoWorkflowState,
|
||||
logs: ForgejoWorkflowLogs,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForgejoWorkflowState {
|
||||
run: ForgejoWorkflowStateRun,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ForgejoWorkflowStateRun {
|
||||
done: bool,
|
||||
commit: ForgejoWorkflowCommit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ForgejoWorkflowCommit {
|
||||
link: String,
|
||||
branch: ForgejoWorkflowBranch,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ForgejoWorkflowBranch {
|
||||
link: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForgejoWorkflowLogs {
|
||||
steps_log: Vec<ForgejoWorkflowLogStep>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ForgejoWorkflowLogStep {
|
||||
started: i64,
|
||||
lines: Vec<LogMessage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct LogMessage {
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct IdEntity {
|
||||
id: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Comment {
|
||||
pub id: u64,
|
||||
pub body: String,
|
||||
user: IdEntity,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct CommentBody<'a> {
|
||||
body: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct PullRequest {
|
||||
pub head: Commit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Commit {
|
||||
pub sha: String,
|
||||
}
|
||||
|
||||
const GITHUB_ACCEPT: &str = "application/vnd.github+json";
|
||||
const COMMENT_TAG_PATTERN: &str = "<!-- Artifactview -->";
|
||||
|
||||
impl TryFrom<ForgejoWorkflowRun> for WorkflowRun {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(value: ForgejoWorkflowRun) -> Result<Self> {
|
||||
static RE_COMMIT_SHA: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r#"^/[\w\-\.]+/[\w\-\.]+/commit/([a-f\d]+)$"#).unwrap());
|
||||
static RE_PULL_ID: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r#"^/[\w\-\.]+/[\w\-\.]+/pulls/(\d+)$"#).unwrap());
|
||||
|
||||
let from_pr = value
|
||||
.logs
|
||||
.steps_log
|
||||
.first()
|
||||
.and_then(|l| l.lines.first())
|
||||
.map(|l| l.message.contains("be triggered by event: pull_request"))
|
||||
.unwrap_or(true);
|
||||
|
||||
Ok(Self {
|
||||
head_sha: RE_COMMIT_SHA
|
||||
.captures(&value.state.run.commit.link)
|
||||
.map(|cap| cap[1].to_string())
|
||||
.ok_or(Error::Other(
|
||||
"could not parse workflow run commit sha".into(),
|
||||
))?,
|
||||
from_pr,
|
||||
pr_number: if from_pr {
|
||||
RE_PULL_ID
|
||||
.captures(&value.state.run.commit.branch.link)
|
||||
.and_then(|cap| cap[1].parse().ok())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
date_started: value
|
||||
.logs
|
||||
.steps_log
|
||||
.first()
|
||||
.and_then(|l| OffsetDateTime::from_unix_timestamp(l.started).ok()),
|
||||
done: value.state.run.done,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GitHubWorkflowRun {
|
||||
head_sha: String,
|
||||
event: String,
|
||||
conclusion: Option<String>,
|
||||
#[serde(with = "time::serde::rfc3339::option")]
|
||||
run_started_at: Option<OffsetDateTime>,
|
||||
}
|
||||
|
||||
impl From<GitHubWorkflowRun> for WorkflowRun {
|
||||
fn from(value: GitHubWorkflowRun) -> Self {
|
||||
Self {
|
||||
head_sha: value.head_sha,
|
||||
from_pr: value.event == "pull_request",
|
||||
pr_number: None,
|
||||
date_started: value.run_started_at,
|
||||
done: value.conclusion.is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ArtifactApi {
|
||||
pub fn new(cfg: Config) -> Self {
|
||||
Self {
|
||||
|
@ -112,14 +264,14 @@ impl ArtifactApi {
|
|||
.build()
|
||||
.unwrap(),
|
||||
qc: QuickCache::new(cfg.load().mem_cache_size),
|
||||
user_ids: QuickCache::new(50),
|
||||
cfg,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list(&self, query: &RunQuery) -> Result<Vec<Artifact>> {
|
||||
pub async fn list(&self, query: &RunQuery, cached: bool) -> Result<Vec<Artifact>> {
|
||||
let cache_key = query.cache_key();
|
||||
self.qc
|
||||
.get_or_insert_async(&cache_key, async {
|
||||
let fut = async {
|
||||
let res = if query.is_github() {
|
||||
self.list_github(query.as_ref()).await
|
||||
} else {
|
||||
|
@ -130,8 +282,12 @@ impl ArtifactApi {
|
|||
} else {
|
||||
res
|
||||
}
|
||||
})
|
||||
.await
|
||||
};
|
||||
if cached {
|
||||
self.qc.get_or_insert_async(&cache_key, fut).await
|
||||
} else {
|
||||
fut.await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn fetch(&self, query: &ArtifactQuery) -> Result<Artifact> {
|
||||
|
@ -177,7 +333,7 @@ impl ArtifactApi {
|
|||
|
||||
let url = Url::parse(&artifact.download_url)?;
|
||||
let req = if url.domain() == Some("api.github.com") {
|
||||
self.get_github(url)
|
||||
self.get_github_any(url)
|
||||
} else {
|
||||
self.http.get(url)
|
||||
};
|
||||
|
@ -212,8 +368,7 @@ impl ArtifactApi {
|
|||
);
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.get(url)
|
||||
.get_forgejo(url)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
|
@ -236,10 +391,8 @@ impl ArtifactApi {
|
|||
query.user, query.repo, query.run
|
||||
);
|
||||
|
||||
let resp = Self::handle_github_error(self.get_github(url).send().await?)
|
||||
.await?
|
||||
.json::<ArtifactsWrap<GithubArtifact>>()
|
||||
.await?;
|
||||
let resp =
|
||||
Self::send_api_req::<ArtifactsWrap<GithubArtifact>>(self.get_github(url)).await?;
|
||||
|
||||
Ok(resp
|
||||
.artifacts
|
||||
|
@ -254,14 +407,12 @@ impl ArtifactApi {
|
|||
query.user, query.repo, query.artifact
|
||||
);
|
||||
|
||||
let artifact = Self::handle_github_error(self.get_github(url).send().await?)
|
||||
.await?
|
||||
.json::<GithubArtifact>()
|
||||
.await?;
|
||||
let artifact = Self::send_api_req::<GithubArtifact>(self.get_github(url)).await?;
|
||||
Ok(artifact.into_artifact(query.as_ref()))
|
||||
}
|
||||
|
||||
async fn handle_github_error(resp: Response) -> Result<Response> {
|
||||
async fn send_api_req_empty(req: RequestBuilder) -> Result<Response> {
|
||||
let resp = req.send().await?;
|
||||
if let Err(e) = resp.error_for_status_ref() {
|
||||
let status = resp.status();
|
||||
let msg = resp.json::<ApiError>().await.ok();
|
||||
|
@ -274,21 +425,330 @@ impl ArtifactApi {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_github<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
async fn send_api_req<T: DeserializeOwned>(req: RequestBuilder) -> Result<T> {
|
||||
Ok(Self::send_api_req_empty(req).await?.json().await?)
|
||||
}
|
||||
|
||||
fn get_github_any<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
let mut builder = self.http.get(url);
|
||||
|
||||
if let Some(github_token) = &self.cfg.load().github_token {
|
||||
builder = builder.header(header::AUTHORIZATION, format!("Bearer {github_token}"));
|
||||
builder = builder.header(
|
||||
header::AUTHORIZATION,
|
||||
format!("Bearer {}", github_token.expose_secret()),
|
||||
);
|
||||
}
|
||||
builder
|
||||
}
|
||||
|
||||
fn get_github<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
self.get_github_any(url)
|
||||
.header(header::ACCEPT, GITHUB_ACCEPT)
|
||||
}
|
||||
|
||||
/// Authorized GitHub request
|
||||
fn req_github<U: IntoUrl>(&self, method: Method, url: U) -> Result<RequestBuilder> {
|
||||
Ok(self
|
||||
.http
|
||||
.request(method, url)
|
||||
.header(header::ACCEPT, GITHUB_ACCEPT)
|
||||
.header(header::CONTENT_TYPE, GITHUB_ACCEPT)
|
||||
.header(
|
||||
header::AUTHORIZATION,
|
||||
format!(
|
||||
"Bearer {}",
|
||||
self.cfg
|
||||
.load()
|
||||
.github_token
|
||||
.as_ref()
|
||||
.map(ExposeSecret::expose_secret)
|
||||
.ok_or(Error::Other("GitHub token required".into()))?
|
||||
),
|
||||
))
|
||||
}
|
||||
|
||||
fn get_forgejo<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
self.http
|
||||
.get(url)
|
||||
.header(header::ACCEPT, mime::APPLICATION_JSON.essence_str())
|
||||
}
|
||||
|
||||
/// Authorized Forgejo request
|
||||
fn req_forgejo<U: IntoUrl>(&self, method: Method, url: U) -> Result<RequestBuilder> {
|
||||
let u = url.into_url()?;
|
||||
let host = u.host_str().ok_or(Error::InvalidUrl)?;
|
||||
let token = self
|
||||
.cfg
|
||||
.load()
|
||||
.forgejo_tokens
|
||||
.get(host)
|
||||
.ok_or_else(|| Error::Other(format!("Forgejo token for {host} required").into()))?
|
||||
.expose_secret();
|
||||
Ok(self
|
||||
.http
|
||||
.request(method, u)
|
||||
.header(header::ACCEPT, mime::APPLICATION_JSON.essence_str())
|
||||
.header(header::CONTENT_TYPE, mime::APPLICATION_JSON.essence_str())
|
||||
.header(header::AUTHORIZATION, format!("token {token}")))
|
||||
}
|
||||
|
||||
pub async fn workflow_run(&self, query: &RunQuery) -> Result<WorkflowRun> {
|
||||
if query.is_github() {
|
||||
self.workflow_run_github(query).await
|
||||
} else {
|
||||
self.workflow_run_forgejo(query).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn workflow_run_forgejo(&self, query: &RunQuery) -> Result<WorkflowRun> {
|
||||
// Since the workflow needs to be fetched with a POST request, we need a CSRF token
|
||||
let resp = self
|
||||
.http
|
||||
.get(format!("https://{}", query.host))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
let mut i_like_gitea = None;
|
||||
let mut csrf = None;
|
||||
for (k, v) in resp
|
||||
.headers()
|
||||
.get_all(header::SET_COOKIE)
|
||||
.into_iter()
|
||||
.filter_map(|v| v.to_str().ok())
|
||||
.filter_map(|v| v.split(';').next())
|
||||
.filter_map(|v| v.split_once('='))
|
||||
{
|
||||
match k {
|
||||
"i_like_gitea" => i_like_gitea = Some(v),
|
||||
"_csrf" => csrf = Some(v),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let i_like_gitea =
|
||||
i_like_gitea.ok_or(Error::Other("missing header: i_like_gitea".into()))?;
|
||||
let csrf = csrf.ok_or(Error::Other("missing header: _csrf".into()))?;
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.post(format!(
|
||||
"https://{}/{}/{}/actions/runs/{}/jobs/0",
|
||||
query.host, query.user, query.repo, query.run
|
||||
))
|
||||
.header(header::CONTENT_TYPE, mime::APPLICATION_JSON.essence_str())
|
||||
.header(header::COOKIE, format!("i_like_gitea={i_like_gitea}"))
|
||||
.header("x-csrf-token", csrf)
|
||||
.body(r#"{"logCursors":[{"step":0,"cursor":null,"expanded":true}]}"#)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
let run: WorkflowRun = resp.json::<ForgejoWorkflowRun>().await?.try_into()?;
|
||||
Ok(run)
|
||||
}
|
||||
|
||||
async fn workflow_run_github(&self, query: &RunQuery) -> Result<WorkflowRun> {
|
||||
let run = Self::send_api_req::<GitHubWorkflowRun>(self.get_github(format!(
|
||||
"https://api.github.com/repos/{}/{}/actions/runs/{}",
|
||||
query.user, query.repo, query.run
|
||||
)))
|
||||
.await?;
|
||||
Ok(run.into())
|
||||
}
|
||||
|
||||
pub async fn add_comment(
|
||||
&self,
|
||||
query: QueryRef<'_>,
|
||||
issue_id: u64,
|
||||
content: &str,
|
||||
old_comment_id: Option<u64>,
|
||||
recreate: bool,
|
||||
) -> Result<u64> {
|
||||
let body = format!("{COMMENT_TAG_PATTERN}\n{content}");
|
||||
if query.is_github() {
|
||||
self.add_comment_github(query, issue_id, &body, old_comment_id, recreate)
|
||||
.await
|
||||
} else {
|
||||
self.add_comment_forgejo(query, issue_id, &body, old_comment_id, recreate)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_comment_forgejo(
|
||||
&self,
|
||||
query: QueryRef<'_>,
|
||||
issue_id: u64,
|
||||
body: &str,
|
||||
old_comment_id: Option<u64>,
|
||||
recreate: bool,
|
||||
) -> Result<u64> {
|
||||
if let Some(old_comment_id) = old_comment_id {
|
||||
let url = format!(
|
||||
"https://{}/api/v1/repos/{}/{}/issues/comments/{}",
|
||||
query.host, query.user, query.repo, old_comment_id
|
||||
);
|
||||
if recreate {
|
||||
Self::send_api_req_empty(self.req_forgejo(Method::DELETE, url)?).await?;
|
||||
} else {
|
||||
Self::send_api_req_empty(
|
||||
self.req_forgejo(Method::PATCH, url)?
|
||||
.json(&CommentBody { body }),
|
||||
)
|
||||
.await?;
|
||||
return Ok(old_comment_id);
|
||||
}
|
||||
}
|
||||
|
||||
let new_c = Self::send_api_req::<IdEntity>(
|
||||
self.req_forgejo(
|
||||
Method::POST,
|
||||
format!(
|
||||
"https://{}/api/v1/repos/{}/{}/issues/{}/comments",
|
||||
query.host, query.user, query.repo, issue_id
|
||||
),
|
||||
)?
|
||||
.json(&CommentBody { body }),
|
||||
)
|
||||
.await?;
|
||||
Ok(new_c.id)
|
||||
}
|
||||
|
||||
async fn add_comment_github(
|
||||
&self,
|
||||
query: QueryRef<'_>,
|
||||
issue_id: u64,
|
||||
body: &str,
|
||||
old_comment_id: Option<u64>,
|
||||
recreate: bool,
|
||||
) -> Result<u64> {
|
||||
if let Some(old_comment_id) = old_comment_id {
|
||||
let url = format!(
|
||||
"https://api.github.com/repos/{}/{}/issues/{}/comments/{}",
|
||||
query.user, query.repo, issue_id, old_comment_id
|
||||
);
|
||||
if recreate {
|
||||
Self::send_api_req_empty(self.req_github(Method::DELETE, url)?).await?;
|
||||
} else {
|
||||
Self::send_api_req_empty(
|
||||
self.req_github(Method::PATCH, url)?
|
||||
.json(&CommentBody { body }),
|
||||
)
|
||||
.await?;
|
||||
return Ok(old_comment_id);
|
||||
}
|
||||
}
|
||||
|
||||
let new_c = Self::send_api_req::<IdEntity>(
|
||||
self.req_github(
|
||||
Method::POST,
|
||||
format!(
|
||||
"https://api.github.com/repos/{}/{}/issues/{}/comments",
|
||||
query.user, query.repo, issue_id
|
||||
),
|
||||
)?
|
||||
.json(&CommentBody { body }),
|
||||
)
|
||||
.await?;
|
||||
Ok(new_c.id)
|
||||
}
|
||||
|
||||
pub async fn find_comment(
|
||||
&self,
|
||||
query: QueryRef<'_>,
|
||||
issue_id: u64,
|
||||
) -> Result<Option<Comment>> {
|
||||
let user_id = self.get_user_id(query).await?;
|
||||
if query.is_github() {
|
||||
self.find_comment_github(query, issue_id, user_id).await
|
||||
} else {
|
||||
self.find_comment_forgejo(query, issue_id, user_id).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn find_comment_forgejo(
|
||||
&self,
|
||||
query: QueryRef<'_>,
|
||||
issue_id: u64,
|
||||
user_id: u64,
|
||||
) -> Result<Option<Comment>> {
|
||||
let comments = Self::send_api_req::<Vec<Comment>>(self.get_forgejo(format!(
|
||||
"https://{}/api/v1/repos/{}/{}/issues/{}/comments",
|
||||
query.host, query.user, query.repo, issue_id
|
||||
)))
|
||||
.await?;
|
||||
|
||||
Ok(comments
|
||||
.into_iter()
|
||||
.find(|c| c.user.id == user_id && c.body.starts_with(COMMENT_TAG_PATTERN)))
|
||||
}
|
||||
|
||||
async fn find_comment_github(
|
||||
&self,
|
||||
query: QueryRef<'_>,
|
||||
issue_id: u64,
|
||||
user_id: u64,
|
||||
) -> Result<Option<Comment>> {
|
||||
for page in 1..=5 {
|
||||
let comments = Self::send_api_req::<Vec<Comment>>(self.get_github(format!(
|
||||
"https://api.github.com/repos/{}/{}/issues/{}/comments?page={}",
|
||||
query.user, query.repo, issue_id, page
|
||||
)))
|
||||
.await?;
|
||||
if let Some(comment) = comments
|
||||
.into_iter()
|
||||
.find(|c| c.user.id == user_id && c.body.starts_with(COMMENT_TAG_PATTERN))
|
||||
{
|
||||
return Ok(Some(comment));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub async fn get_pr(&self, query: QueryRef<'_>, pr_id: u64) -> Result<PullRequest> {
|
||||
let req = if query.is_github() {
|
||||
self.get_github(format!(
|
||||
"https://api.github.com/repos/{}/{}/pulls/{}",
|
||||
query.user, query.repo, pr_id
|
||||
))
|
||||
} else {
|
||||
self.get_forgejo(format!(
|
||||
"https://{}/api/v1/repos/{}/{}/pulls/{}",
|
||||
query.host, query.user, query.repo, pr_id
|
||||
))
|
||||
};
|
||||
Self::send_api_req(req).await
|
||||
}
|
||||
|
||||
async fn get_user_id(&self, query: QueryRef<'_>) -> Result<u64> {
|
||||
self.user_ids
|
||||
.get_or_insert_async(query.host, async {
|
||||
let user =
|
||||
if query.is_github() {
|
||||
Self::send_api_req::<IdEntity>(
|
||||
self.req_github(Method::GET, "https://api.github.com/user")?,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
Self::send_api_req::<IdEntity>(self.req_forgejo(
|
||||
Method::GET,
|
||||
format!("https://{}/api/v1/user", query.host),
|
||||
)?)
|
||||
.await?
|
||||
};
|
||||
Ok::<_, Error>(user.id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::{config::Config, query::ArtifactQuery};
|
||||
use time::macros::datetime;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
query::{ArtifactQuery, RunQuery},
|
||||
};
|
||||
|
||||
use super::ArtifactApi;
|
||||
|
||||
|
@ -321,4 +781,31 @@ mod tests {
|
|||
assert_eq!(res.id, 1440556464);
|
||||
assert_eq!(res.size, 334);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn workflow_run_forgejo() {
|
||||
let query =
|
||||
RunQuery::from_forge_url("https://codeberg.org/forgejo/forgejo/actions/runs/20471")
|
||||
.unwrap();
|
||||
let api = ArtifactApi::new(Config::default());
|
||||
let res = api.workflow_run(&query).await.unwrap();
|
||||
assert_eq!(res.head_sha, "03581511024aca9b56bc6083565bdcebeacb9d05");
|
||||
assert!(res.from_pr);
|
||||
assert_eq!(res.date_started, Some(datetime!(2024-06-21 9:13:23 UTC)));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn workflow_run_github() {
|
||||
let query =
|
||||
RunQuery::from_forge_url("https://github.com/orhun/git-cliff/actions/runs/9588266559")
|
||||
.unwrap();
|
||||
let api = ArtifactApi::new(Config::default());
|
||||
let res = api.workflow_run(&query).await.unwrap();
|
||||
dbg!(&res);
|
||||
assert_eq!(res.head_sha, "0500cb2c5c5ec225e109584236940ee068be2372");
|
||||
assert!(res.from_pr);
|
||||
assert_eq!(res.date_started, Some(datetime!(2024-06-21 9:13:23 UTC)));
|
||||
}
|
||||
}
|
||||
|
|
12
src/cache.rs
12
src/cache.rs
|
@ -166,10 +166,10 @@ impl Cache {
|
|||
let metadata = tokio::fs::metadata(&zip_path).await?;
|
||||
let modified = metadata
|
||||
.modified()
|
||||
.map_err(|_| Error::Internal("no file modified time".into()))?;
|
||||
.map_err(|_| Error::Other("no file modified time".into()))?;
|
||||
let accessed = metadata
|
||||
.accessed()
|
||||
.map_err(|_| Error::Internal("no file accessed time".into()))?;
|
||||
.map_err(|_| Error::Other("no file accessed time".into()))?;
|
||||
if modified != entry.last_modified {
|
||||
tracing::info!("cached file {zip_path:?} changed");
|
||||
entry = Arc::new(
|
||||
|
@ -182,7 +182,7 @@ impl Cache {
|
|||
let now = SystemTime::now();
|
||||
if now
|
||||
.duration_since(accessed)
|
||||
.map_err(|e| Error::Internal(e.to_string().into()))?
|
||||
.map_err(|e| Error::Other(e.to_string().into()))?
|
||||
> Duration::from_secs(1800)
|
||||
{
|
||||
let file = std::fs::File::open(&zip_path)?;
|
||||
|
@ -215,10 +215,10 @@ impl Cache {
|
|||
.metadata()
|
||||
.await?
|
||||
.accessed()
|
||||
.map_err(|_| Error::Internal("no file accessed time".into()))?;
|
||||
.map_err(|_| Error::Other("no file accessed time".into()))?;
|
||||
if now
|
||||
.duration_since(accessed)
|
||||
.map_err(|e| Error::Internal(e.to_string().into()))?
|
||||
.map_err(|e| Error::Other(e.to_string().into()))?
|
||||
> max_age
|
||||
{
|
||||
let path = entry.path();
|
||||
|
@ -289,7 +289,7 @@ impl CacheEntry {
|
|||
name,
|
||||
last_modified: meta
|
||||
.modified()
|
||||
.map_err(|_| Error::Internal("no file modified time".into()))?,
|
||||
.map_err(|_| Error::Other("no file modified time".into()))?,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -5,11 +5,12 @@ use std::{
|
|||
sync::Arc,
|
||||
};
|
||||
|
||||
use secrecy::Secret;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{
|
||||
error::{Error, Result},
|
||||
query::{ArtifactQuery, QueryFilterList},
|
||||
query::{Query, QueryFilterList},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -48,7 +49,9 @@ pub struct ConfigData {
|
|||
/// GitHub API token for downloading GitHub artifacts
|
||||
///
|
||||
/// Using a fine-grained token with public read permissions is recommended.
|
||||
pub github_token: Option<String>,
|
||||
pub github_token: Option<Secret<String>>,
|
||||
/// Forgejo/Gitea API tokens by host
|
||||
pub forgejo_tokens: HashMap<String, Secret<String>>,
|
||||
/// Number of artifact indexes to keep in memory
|
||||
pub mem_cache_size: usize,
|
||||
/// Get the client IP address from a HTTP request header
|
||||
|
@ -61,6 +64,8 @@ pub struct ConfigData {
|
|||
pub real_ip_header: Option<String>,
|
||||
/// Limit the amount of downloaded artifacts per IP address and minute
|
||||
pub limit_artifacts_per_min: Option<NonZeroU32>,
|
||||
/// Limit the amount of PR comment API requests per IP address and minute
|
||||
pub limit_pr_comments_per_min: Option<NonZeroU32>,
|
||||
/// List of sites/users/repos that can NOT be accessed
|
||||
pub repo_blacklist: QueryFilterList,
|
||||
/// List of sites/users/repos that can ONLY be accessed
|
||||
|
@ -89,9 +94,11 @@ impl Default for ConfigData {
|
|||
max_age_h: NonZeroU32::new(12).unwrap(),
|
||||
zip_timeout_ms: Some(NonZeroU32::new(1000).unwrap()),
|
||||
github_token: None,
|
||||
forgejo_tokens: HashMap::new(),
|
||||
mem_cache_size: 50,
|
||||
real_ip_header: None,
|
||||
limit_artifacts_per_min: Some(NonZeroU32::new(5).unwrap()),
|
||||
limit_pr_comments_per_min: Some(NonZeroU32::new(5).unwrap()),
|
||||
repo_blacklist: QueryFilterList::default(),
|
||||
repo_whitelist: QueryFilterList::default(),
|
||||
suggested_sites: vec![
|
||||
|
@ -124,7 +131,7 @@ impl ConfigData {
|
|||
impl Config {
|
||||
pub fn new() -> Result<Self> {
|
||||
let data =
|
||||
envy::from_env::<ConfigData>().map_err(|e| Error::Internal(e.to_string().into()))?;
|
||||
envy::from_env::<ConfigData>().map_err(|e| Error::Other(e.to_string().into()))?;
|
||||
Self::from_data(data)
|
||||
}
|
||||
|
||||
|
@ -173,7 +180,7 @@ impl Config {
|
|||
.unwrap_or("codeberg.org")
|
||||
}
|
||||
|
||||
pub fn check_filterlist(&self, query: &ArtifactQuery) -> Result<()> {
|
||||
pub fn check_filterlist<Q: Query>(&self, query: &Q) -> Result<()> {
|
||||
if !self.i.data.repo_blacklist.passes(query, true) {
|
||||
Err(Error::Forbidden("repository is blacklisted".into()))
|
||||
} else if !self.i.data.repo_whitelist.passes(query, false) {
|
||||
|
|
|
@ -20,8 +20,8 @@ pub enum Error {
|
|||
Io(#[from] std::io::Error),
|
||||
#[error("Zip: {0}")]
|
||||
Zip(#[from] async_zip::error::ZipError),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(Cow<'static, str>),
|
||||
#[error("Error: {0}")]
|
||||
Other(Cow<'static, str>),
|
||||
|
||||
#[error("Invalid request: {0}")]
|
||||
BadRequest(Cow<'static, str>),
|
||||
|
@ -58,13 +58,13 @@ impl From<reqwest::Error> for Error {
|
|||
|
||||
impl From<std::num::TryFromIntError> for Error {
|
||||
fn from(value: std::num::TryFromIntError) -> Self {
|
||||
Self::Internal(value.to_string().into())
|
||||
Self::Other(value.to_string().into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<url::ParseError> for Error {
|
||||
fn from(value: url::ParseError) -> Self {
|
||||
Self::Internal(value.to_string().into())
|
||||
Self::Other(value.to_string().into())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
12
src/query.rs
12
src/query.rs
|
@ -148,7 +148,11 @@ impl ArtifactQuery {
|
|||
}
|
||||
|
||||
impl RunQuery {
|
||||
pub fn from_forge_url(url: &str, aliases: &HashMap<String, String>) -> Result<Self> {
|
||||
pub fn from_forge_url(url: &str) -> Result<Self> {
|
||||
Self::from_forge_url_alias(url, &HashMap::new())
|
||||
}
|
||||
|
||||
pub fn from_forge_url_alias(url: &str, aliases: &HashMap<String, String>) -> Result<Self> {
|
||||
let (host, mut path_segs) = util::parse_url(url)?;
|
||||
|
||||
let user = path_segs
|
||||
|
@ -331,12 +335,12 @@ impl FromStr for QueryFilter {
|
|||
|
||||
if let Some(user) = &user {
|
||||
if !RE_REPO_NAME.is_match(user) {
|
||||
return Err(Error::Internal("invalid username".into()));
|
||||
return Err(Error::Other("invalid username".into()));
|
||||
}
|
||||
}
|
||||
if let Some(repo) = &repo {
|
||||
if !RE_REPO_NAME.is_match(repo) {
|
||||
return Err(Error::Internal("invalid repository name".into()));
|
||||
return Err(Error::Other("invalid repository name".into()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -370,7 +374,7 @@ impl FromStr for QueryFilterList {
|
|||
}
|
||||
|
||||
impl QueryFilterList {
|
||||
pub fn passes(&self, query: &ArtifactQuery, blacklist: bool) -> bool {
|
||||
pub fn passes<Q: Query>(&self, query: &Q, blacklist: bool) -> bool {
|
||||
if self.0.is_empty() {
|
||||
true
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
source: src/app.rs
|
||||
expression: res
|
||||
---
|
||||
### Latest build artifacts [[15eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/15eed48a8382513147a949117ef4aa659989d397)]
|
||||
|
||||
🏠 <a href="https://code-thetadev-de--thetadev--test-actions--1-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello World ;-)</a><br>
|
||||
<a href="https://code-thetadev-de--thetadev--test-actions--1-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a><br>
|
||||
<!--NEXT_PREV - [[15eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/15eed48a8382513147a949117ef4aa659989d397)] <a href="https://code-thetadev-de--thetadev--test-actions--1-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello</a>, <a href="https://code-thetadev-de--thetadev--test-actions--1-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a> (15.06.2024 01:30:00 UTC) -->
|
14
src/snapshots/artifactview__app__tests__pr_comment_2.snap
Normal file
14
src/snapshots/artifactview__app__tests__pr_comment_2.snap
Normal file
|
@ -0,0 +1,14 @@
|
|||
---
|
||||
source: src/app.rs
|
||||
expression: res
|
||||
---
|
||||
### Latest build artifacts [[25eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/25eed48a8382513147a949117ef4aa659989d397)]
|
||||
|
||||
🏠 <a href="https://code-thetadev-de--thetadev--test-actions--2-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello World ;-)</a><br>
|
||||
<a href="https://code-thetadev-de--thetadev--test-actions--2-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a><br>
|
||||
<details>
|
||||
<summary>Previous builds</summary>
|
||||
|
||||
- [[15eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/15eed48a8382513147a949117ef4aa659989d397)] <a href="https://code-thetadev-de--thetadev--test-actions--1-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello</a>, <a href="https://code-thetadev-de--thetadev--test-actions--1-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a> (15.06.2024 01:30:00 UTC)
|
||||
<!--NEXT_PREV - [[25eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/25eed48a8382513147a949117ef4aa659989d397)] <a href="https://code-thetadev-de--thetadev--test-actions--2-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello</a>, <a href="https://code-thetadev-de--thetadev--test-actions--2-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a> (15.06.2024 02:30:00 UTC) -->
|
||||
</details>
|
15
src/snapshots/artifactview__app__tests__pr_comment_3.snap
Normal file
15
src/snapshots/artifactview__app__tests__pr_comment_3.snap
Normal file
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
source: src/app.rs
|
||||
expression: res
|
||||
---
|
||||
### Latest build artifacts [[35eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/35eed48a8382513147a949117ef4aa659989d397)]
|
||||
|
||||
🏠 <a href="https://code-thetadev-de--thetadev--test-actions--3-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello World ;-)</a><br>
|
||||
<a href="https://code-thetadev-de--thetadev--test-actions--3-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a><br>
|
||||
<details>
|
||||
<summary>Previous builds</summary>
|
||||
|
||||
- [[15eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/15eed48a8382513147a949117ef4aa659989d397)] <a href="https://code-thetadev-de--thetadev--test-actions--1-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello</a>, <a href="https://code-thetadev-de--thetadev--test-actions--1-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a> (15.06.2024 01:30:00 UTC)
|
||||
- [[25eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/25eed48a8382513147a949117ef4aa659989d397)] <a href="https://code-thetadev-de--thetadev--test-actions--2-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello</a>, <a href="https://code-thetadev-de--thetadev--test-actions--2-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a> (15.06.2024 02:30:00 UTC)
|
||||
<!--NEXT_PREV - [[35eed48a83](https://code.thetadev.de/thetadev/test-actions/commit/35eed48a8382513147a949117ef4aa659989d397)] <a href="https://code-thetadev-de--thetadev--test-actions--3-1.localhost:3000" target="_blank" rel="noopener noreferrer">Hello</a>, <a href="https://code-thetadev-de--thetadev--test-actions--3-2.localhost:3000" target="_blank" rel="noopener noreferrer">Test</a> (15.06.2024 03:30:00 UTC) -->
|
||||
</details>
|
20
src/util.rs
20
src/util.rs
|
@ -194,7 +194,7 @@ pub fn get_ip_address(request: &Request, real_ip_header: Option<&str>) -> Result
|
|||
let socket_addr = request
|
||||
.extensions()
|
||||
.get::<ConnectInfo<SocketAddr>>()
|
||||
.ok_or(Error::Internal("could get request ip address".into()))?
|
||||
.ok_or(Error::Other("could get request ip address".into()))?
|
||||
.0;
|
||||
Ok(socket_addr.ip())
|
||||
}
|
||||
|
@ -263,6 +263,15 @@ pub struct ErrorJson {
|
|||
msg: String,
|
||||
}
|
||||
|
||||
impl ErrorJson {
|
||||
pub fn ok<S: Into<String>>(msg: S) -> Self {
|
||||
Self {
|
||||
status: 200,
|
||||
msg: msg.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for ErrorJson {
|
||||
fn from(value: Error) -> Self {
|
||||
Self {
|
||||
|
@ -284,6 +293,15 @@ impl IntoResponse for ErrorJson {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn extract_delim<'a>(s: &'a str, start: &str, end: &str) -> Option<&'a str> {
|
||||
if let Some(np) = s.find(start) {
|
||||
if let Some(np_end) = s[np + start.len()..].find(end) {
|
||||
return Some(s[np + start.len()..np + start.len() + np_end].trim());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
|
|
320
tests/testfiles/giteaWorkflowRun.json
Normal file
320
tests/testfiles/giteaWorkflowRun.json
Normal file
|
@ -0,0 +1,320 @@
|
|||
{
|
||||
"state": {
|
||||
"run": {
|
||||
"link": "/ThetaDev/test-actions/actions/runs/92",
|
||||
"title": "Update README.md",
|
||||
"status": "success",
|
||||
"canCancel": false,
|
||||
"canApprove": false,
|
||||
"canRerun": true,
|
||||
"canDeleteArtifact": true,
|
||||
"done": true,
|
||||
"jobs": [
|
||||
{
|
||||
"id": 377,
|
||||
"name": "Test",
|
||||
"status": "success",
|
||||
"canRerun": true,
|
||||
"duration": "2s"
|
||||
}
|
||||
],
|
||||
"commit": {
|
||||
"localeCommit": "Commit",
|
||||
"localePushedBy": "pushed by",
|
||||
"localeWorkflow": "Workflow",
|
||||
"shortSHA": "6185409d45",
|
||||
"link": "/ThetaDev/test-actions/commit/6185409d457e0a7833ee122811b138a950273229",
|
||||
"pusher": { "displayName": "ThetaDev", "link": "/ThetaDev" },
|
||||
"branch": { "name": "#3", "link": "/ThetaDev/test-actions/pulls/3" }
|
||||
}
|
||||
},
|
||||
"currentJob": {
|
||||
"title": "Test",
|
||||
"detail": "Success",
|
||||
"steps": [
|
||||
{
|
||||
"summary": "Set up job",
|
||||
"duration": "1s",
|
||||
"status": "success"
|
||||
},
|
||||
{ "summary": "Test", "duration": "0s", "status": "success" },
|
||||
{
|
||||
"summary": "Comment PR",
|
||||
"duration": "1s",
|
||||
"status": "success"
|
||||
},
|
||||
{
|
||||
"summary": "Complete job",
|
||||
"duration": "0s",
|
||||
"status": "success"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"logs": {
|
||||
"stepsLog": [
|
||||
{
|
||||
"step": 0,
|
||||
"cursor": 51,
|
||||
"lines": [
|
||||
{
|
||||
"index": 1,
|
||||
"message": "ocloud(version:v3.4.1) received task 431 of job 377, be triggered by event: pull_request",
|
||||
"timestamp": 1718902104.1911685
|
||||
},
|
||||
{
|
||||
"index": 2,
|
||||
"message": "workflow prepared",
|
||||
"timestamp": 1718902104.1916893
|
||||
},
|
||||
{
|
||||
"index": 3,
|
||||
"message": "evaluating expression 'success()'",
|
||||
"timestamp": 1718902104.1919434
|
||||
},
|
||||
{
|
||||
"index": 4,
|
||||
"message": "expression 'success()' evaluated to 'true'",
|
||||
"timestamp": 1718902104.1920443
|
||||
},
|
||||
{
|
||||
"index": 5,
|
||||
"message": "🚀 Start image=thetadev256/cimaster:latest",
|
||||
"timestamp": 1718902104.1920674
|
||||
},
|
||||
{
|
||||
"index": 6,
|
||||
"message": " 🐳 docker pull image=thetadev256/cimaster:latest platform= username= forcePull=true",
|
||||
"timestamp": 1718902104.203115
|
||||
},
|
||||
{
|
||||
"index": 7,
|
||||
"message": " 🐳 docker pull thetadev256/cimaster:latest",
|
||||
"timestamp": 1718902104.2031355
|
||||
},
|
||||
{
|
||||
"index": 8,
|
||||
"message": "pulling image 'docker.io/thetadev256/cimaster:latest' ()",
|
||||
"timestamp": 1718902104.2031558
|
||||
},
|
||||
{
|
||||
"index": 9,
|
||||
"message": "Pulling from thetadev256/cimaster :: latest",
|
||||
"timestamp": 1718902105.179988
|
||||
},
|
||||
{
|
||||
"index": 10,
|
||||
"message": "Digest: sha256:260659581e2900354877f31d5fec14db1c40999ad085a90a1a27c44b9cab8c48 :: ",
|
||||
"timestamp": 1718902105.1935806
|
||||
},
|
||||
{
|
||||
"index": 11,
|
||||
"message": "Status: Image is up to date for thetadev256/cimaster:latest :: ",
|
||||
"timestamp": 1718902105.1936345
|
||||
},
|
||||
{
|
||||
"index": 12,
|
||||
"message": "[{host 26303131f98bfa59ece2ca2dc1742040c8d125e22f1c07de603bd235bccf1b84 2024-04-02 20:48:57.065188715 +0000 UTC local host false {default map[] []} false false false {} false map[] map[] map[] [] map[]} {GITEA-ACTIONS-TASK-375_WORKFLOW-Build-and-push-cimaster-image_JOB-build-build-network 59ca72b83dbd990bda7af5e760fdb0e31010b855ca7e15df1a471fda8908aa47 2024-05-30 20:56:22.698163954 +0000 UTC local bridge false {default map[] [{172.24.0.0/16 172.24.0.1 map[]}]} false false false {} false map[] map[] map[] [] map[]} {none 627a84562dca9a81bd4a1fe570919035f1d608382d2b66b6b8559756d7aa2a6c 2024-04-02 20:48:57.050063369 +0000 UTC local null false {default map[] []} false false false {} false map[] map[] map[] [] map[]} {bridge 9b05952ef8d41f6a92bbc3c7f85c9fd5602e941b9e8e3cbcf84716de27d77aa9 2024-06-20 09:15:48.433095842 +0000 UTC local bridge false {default map[] [{172.17.0.0/16 172.17.0.1 map[]}]} false false false {} false map[] map[com.docker.network.bridge.default_bridge:true com.docker.network.bridge.enable_icc:true com.docker.network.bridge.enable_ip_masquerade:true com.docker.network.bridge.host_binding_ipv4:0.0.0.0 com.docker.network.bridge.name:docker0 com.docker.network.driver.mtu:1500] map[] [] map[]}]",
|
||||
"timestamp": 1718902105.203977
|
||||
},
|
||||
{
|
||||
"index": 13,
|
||||
"message": " 🐳 docker create image=thetadev256/cimaster:latest platform= entrypoint=[\"tail\" \"-f\" \"/dev/null\"] cmd=[] network=\"GITEA-ACTIONS-TASK-431_WORKFLOW-Rust-test_JOB-Test-Test-network\"",
|
||||
"timestamp": 1718902105.2669988
|
||||
},
|
||||
{
|
||||
"index": 14,
|
||||
"message": "Common container.Config ==\u003e \u0026{Hostname: Domainname: User: AttachStdin:false AttachStdout:false AttachStderr:false ExposedPorts:map[] Tty:false OpenStdin:false StdinOnce:false Env:[RUNNER_TOOL_CACHE=/opt/hostedtoolcache RUNNER_OS=Linux RUNNER_ARCH=ARM64 RUNNER_TEMP=/tmp LANG=C.UTF-8] Cmd:[] Healthcheck:\u003cnil\u003e ArgsEscaped:false Image:thetadev256/cimaster:latest Volumes:map[] WorkingDir:/workspace/ThetaDev/test-actions Entrypoint:[] NetworkDisabled:false MacAddress: OnBuild:[] Labels:map[] StopSignal: StopTimeout:\u003cnil\u003e Shell:[]}",
|
||||
"timestamp": 1718902105.2674763
|
||||
},
|
||||
{
|
||||
"index": 15,
|
||||
"message": "Common container.HostConfig ==\u003e \u0026{Binds:[] ContainerIDFile: LogConfig:{Type: Config:map[]} NetworkMode:GITEA-ACTIONS-TASK-431_WORKFLOW-Rust-test_JOB-Test-Test-network PortBindings:map[] RestartPolicy:{Name: MaximumRetryCount:0} AutoRemove:true VolumeDriver: VolumesFrom:[] ConsoleSize:[0 0] Annotations:map[] CapAdd:[] CapDrop:[] CgroupnsMode: DNS:[] DNSOptions:[] DNSSearch:[] ExtraHosts:[] GroupAdd:[] IpcMode: Cgroup: Links:[] OomScoreAdj:0 PidMode: Privileged:false PublishAllPorts:false ReadonlyRootfs:false SecurityOpt:[] StorageOpt:map[] Tmpfs:map[] UTSMode: UsernsMode: ShmSize:0 Sysctls:map[] Runtime: Isolation: Resources:{CPUShares:0 Memory:0 NanoCPUs:0 CgroupParent: BlkioWeight:0 BlkioWeightDevice:[] BlkioDeviceReadBps:[] BlkioDeviceWriteBps:[] BlkioDeviceReadIOps:[] BlkioDeviceWriteIOps:[] CPUPeriod:0 CPUQuota:0 CPURealtimePeriod:0 CPURealtimeRuntime:0 CpusetCpus: CpusetMems: Devices:[] DeviceCgroupRules:[] DeviceRequests:[] KernelMemory:0 KernelMemoryTCP:0 MemoryReservation:0 MemorySwap:0 MemorySwappiness:\u003cnil\u003e OomKillDisable:\u003cnil\u003e PidsLimit:\u003cnil\u003e Ulimits:[] CPUCount:0 CPUPercent:0 IOMaximumIOps:0 IOMaximumBandwidth:0} Mounts:[{Type:volume Source:GITEA-ACTIONS-TASK-431_WORKFLOW-Rust-test_JOB-Test-env Target:/var/run/act ReadOnly:false Consistency: BindOptions:\u003cnil\u003e VolumeOptions:\u003cnil\u003e TmpfsOptions:\u003cnil\u003e ClusterOptions:\u003cnil\u003e} {Type:volume Source:GITEA-ACTIONS-TASK-431_WORKFLOW-Rust-test_JOB-Test Target:/workspace/ThetaDev/test-actions ReadOnly:false Consistency: BindOptions:\u003cnil\u003e VolumeOptions:\u003cnil\u003e TmpfsOptions:\u003cnil\u003e ClusterOptions:\u003cnil\u003e} {Type:volume Source:act-toolcache Target:/opt/hostedtoolcache ReadOnly:false Consistency: BindOptions:\u003cnil\u003e VolumeOptions:\u003cnil\u003e TmpfsOptions:\u003cnil\u003e ClusterOptions:\u003cnil\u003e}] MaskedPaths:[] ReadonlyPaths:[] Init:\u003cnil\u003e}",
|
||||
"timestamp": 1718902105.2731254
|
||||
},
|
||||
{
|
||||
"index": 16,
|
||||
"message": "input.NetworkAliases ==\u003e [Test]",
|
||||
"timestamp": 1718902105.2733588
|
||||
},
|
||||
{
|
||||
"index": 17,
|
||||
"message": "Created container name=GITEA-ACTIONS-TASK-431_WORKFLOW-Rust-test_JOB-Test id=953c4349622d86e68aa9b0d25b341f0ff4b36d87d5ffbae957d9efd72c3f6d64 from image thetadev256/cimaster:latest (platform: )",
|
||||
"timestamp": 1718902105.3252785
|
||||
},
|
||||
{
|
||||
"index": 18,
|
||||
"message": "ENV ==\u003e [RUNNER_TOOL_CACHE=/opt/hostedtoolcache RUNNER_OS=Linux RUNNER_ARCH=ARM64 RUNNER_TEMP=/tmp LANG=C.UTF-8]",
|
||||
"timestamp": 1718902105.3253257
|
||||
},
|
||||
{
|
||||
"index": 19,
|
||||
"message": " 🐳 docker run image=thetadev256/cimaster:latest platform= entrypoint=[\"tail\" \"-f\" \"/dev/null\"] cmd=[] network=\"GITEA-ACTIONS-TASK-431_WORKFLOW-Rust-test_JOB-Test-Test-network\"",
|
||||
"timestamp": 1718902105.3253412
|
||||
},
|
||||
{
|
||||
"index": 20,
|
||||
"message": "Starting container: 953c4349622d86e68aa9b0d25b341f0ff4b36d87d5ffbae957d9efd72c3f6d64",
|
||||
"timestamp": 1718902105.3253546
|
||||
},
|
||||
{
|
||||
"index": 21,
|
||||
"message": "Started container: 953c4349622d86e68aa9b0d25b341f0ff4b36d87d5ffbae957d9efd72c3f6d64",
|
||||
"timestamp": 1718902105.5463858
|
||||
},
|
||||
{
|
||||
"index": 22,
|
||||
"message": " 🐳 docker exec cmd=[chown -R 1000:1000 /workspace/ThetaDev/test-actions] user=0 workdir=",
|
||||
"timestamp": 1718902105.6031785
|
||||
},
|
||||
{
|
||||
"index": 23,
|
||||
"message": "Exec command '[chown -R 1000:1000 /workspace/ThetaDev/test-actions]'",
|
||||
"timestamp": 1718902105.6032245
|
||||
},
|
||||
{
|
||||
"index": 24,
|
||||
"message": "Working directory '/workspace/ThetaDev/test-actions'",
|
||||
"timestamp": 1718902105.6032348
|
||||
},
|
||||
{
|
||||
"index": 25,
|
||||
"message": "Writing entry to tarball workflow/event.json len:8795",
|
||||
"timestamp": 1718902105.6331673
|
||||
},
|
||||
{
|
||||
"index": 26,
|
||||
"message": "Writing entry to tarball workflow/envs.txt len:0",
|
||||
"timestamp": 1718902105.6332214
|
||||
},
|
||||
{
|
||||
"index": 27,
|
||||
"message": "Extracting content to '/var/run/act/'",
|
||||
"timestamp": 1718902105.6332443
|
||||
},
|
||||
{
|
||||
"index": 28,
|
||||
"message": " ☁ git clone 'https://code.thetadev.de/actions/comment-pull-request' # ref=v1",
|
||||
"timestamp": 1718902105.6492677
|
||||
},
|
||||
{
|
||||
"index": 29,
|
||||
"message": " cloning https://code.thetadev.de/actions/comment-pull-request to /data/.cache/act/https---code.thetadev.de-actions-comment-pull-request@v1",
|
||||
"timestamp": 1718902105.6493008
|
||||
},
|
||||
{
|
||||
"index": 30,
|
||||
"message": "Cloned https://code.thetadev.de/actions/comment-pull-request to /data/.cache/act/https---code.thetadev.de-actions-comment-pull-request@v1",
|
||||
"timestamp": 1718902105.6817598
|
||||
},
|
||||
{
|
||||
"index": 31,
|
||||
"message": "Checked out v1",
|
||||
"timestamp": 1718902105.7090926
|
||||
},
|
||||
{
|
||||
"index": 32,
|
||||
"message": "Read action \u0026{Comment Pull Request Comments a pull request with the provided message map[GITHUB_TOKEN:{Github token of the repository (automatically created by Github) false ${{ github.token }}} comment_tag:{A tag on your comment that will be used to identify a comment in case of replacement. false } create_if_not_exists:{Whether a comment should be created even if comment_tag is not found. false true} filePath:{Path of the file that should be commented false } message:{Message that should be printed in the pull request false } pr_number:{Manual pull request number false } reactions:{You can set some reactions on your comments through the `reactions` input. false } recreate:{Delete and recreate the comment instead of updating it false false}] map[] {node20 map[] act/index.js always() always() [] []} {blue message-circle}} from 'Unknown'",
|
||||
"timestamp": 1718902105.709367
|
||||
},
|
||||
{
|
||||
"index": 33,
|
||||
"message": "setupEnv =\u003e map[ACT:true ACTIONS_CACHE_URL:http://192.168.96.3:44491/ ACTIONS_RESULTS_URL:https://code.thetadev.de ACTIONS_RUNTIME_TOKEN:*** ACTIONS_RUNTIME_URL:https://code.thetadev.de/api/actions_pipeline/ CI:true GITEA_ACTIONS:true GITEA_ACTIONS_RUNNER_VERSION:v3.4.1 GITHUB_ACTION:0 GITHUB_ACTIONS:true GITHUB_ACTION_PATH: GITHUB_ACTION_REF: GITHUB_ACTION_REPOSITORY: GITHUB_ACTOR:ThetaDev GITHUB_API_URL:https://code.thetadev.de/api/v1 GITHUB_BASE_REF:main GITHUB_EVENT_NAME:pull_request GITHUB_EVENT_PATH:/var/run/act/workflow/event.json GITHUB_GRAPHQL_URL: GITHUB_HEAD_REF:thetadev-patch-2 GITHUB_JOB:Test GITHUB_REF:refs/pull/3/head GITHUB_REF_NAME:3 GITHUB_REF_TYPE: GITHUB_REPOSITORY:ThetaDev/test-actions GITHUB_REPOSITORY_OWNER:ThetaDev GITHUB_RETENTION_DAYS: GITHUB_RUN_ID:292 GITHUB_RUN_NUMBER:92 GITHUB_SERVER_URL:https://code.thetadev.de GITHUB_SHA:6185409d457e0a7833ee122811b138a950273229 GITHUB_TOKEN:*** GITHUB_WORKFLOW:Rust test GITHUB_WORKSPACE:/workspace/ThetaDev/test-actions ImageOS:cimasterlatest JOB_CONTAINER_NAME:GITEA-ACTIONS-TASK-431_WORKFLOW-Rust-test_JOB-Test RUNNER_PERFLOG:/dev/null RUNNER_TRACKING_ID:]",
|
||||
"timestamp": 1718902105.7244232
|
||||
},
|
||||
{
|
||||
"index": 34,
|
||||
"message": "evaluating expression ''",
|
||||
"timestamp": 1718902105.7316134
|
||||
},
|
||||
{
|
||||
"index": 35,
|
||||
"message": "expression '' evaluated to 'true'",
|
||||
"timestamp": 1718902105.7316737
|
||||
},
|
||||
{
|
||||
"index": 36,
|
||||
"message": "⭐ Run Main Test",
|
||||
"timestamp": 1718902105.7316883
|
||||
},
|
||||
{
|
||||
"index": 37,
|
||||
"message": "Writing entry to tarball workflow/outputcmd.txt len:0",
|
||||
"timestamp": 1718902105.7317095
|
||||
},
|
||||
{
|
||||
"index": 38,
|
||||
"message": "Writing entry to tarball workflow/statecmd.txt len:0",
|
||||
"timestamp": 1718902105.7317333
|
||||
},
|
||||
{
|
||||
"index": 39,
|
||||
"message": "Writing entry to tarball workflow/pathcmd.txt len:0",
|
||||
"timestamp": 1718902105.7317476
|
||||
},
|
||||
{
|
||||
"index": 40,
|
||||
"message": "Writing entry to tarball workflow/envs.txt len:0",
|
||||
"timestamp": 1718902105.7317617
|
||||
},
|
||||
{
|
||||
"index": 41,
|
||||
"message": "Writing entry to tarball workflow/SUMMARY.md len:0",
|
||||
"timestamp": 1718902105.7317736
|
||||
},
|
||||
{
|
||||
"index": 42,
|
||||
"message": "Extracting content to '/var/run/act'",
|
||||
"timestamp": 1718902105.731786
|
||||
},
|
||||
{
|
||||
"index": 43,
|
||||
"message": "expression 'echo \"${{ secrets.FORGEJO_CI_TOKEN }}\"\\n' rewritten to 'format('echo \"{0}\"\\n', secrets.FORGEJO_CI_TOKEN)'",
|
||||
"timestamp": 1718902105.754891
|
||||
},
|
||||
{
|
||||
"index": 44,
|
||||
"message": "evaluating expression 'format('echo \"{0}\"\\n', secrets.FORGEJO_CI_TOKEN)'",
|
||||
"timestamp": 1718902105.7549253
|
||||
},
|
||||
{
|
||||
"index": 45,
|
||||
"message": "expression 'format('echo \"{0}\"\\n', secrets.FORGEJO_CI_TOKEN)' evaluated to '%!t(string=echo \"***\"\\n)'",
|
||||
"timestamp": 1718902105.7549586
|
||||
},
|
||||
{
|
||||
"index": 46,
|
||||
"message": "Wrote command \\n\\necho \"***\"\\n\\n\\n to 'workflow/0'",
|
||||
"timestamp": 1718902105.754978
|
||||
},
|
||||
{
|
||||
"index": 47,
|
||||
"message": "Writing entry to tarball workflow/0 len:50",
|
||||
"timestamp": 1718902105.755002
|
||||
},
|
||||
{
|
||||
"index": 48,
|
||||
"message": "Extracting content to '/var/run/act'",
|
||||
"timestamp": 1718902105.755024
|
||||
},
|
||||
{
|
||||
"index": 49,
|
||||
"message": " 🐳 docker exec cmd=[bash --noprofile --norc -e -o pipefail /var/run/act/workflow/0] user= workdir=",
|
||||
"timestamp": 1718902105.7571557
|
||||
},
|
||||
{
|
||||
"index": 50,
|
||||
"message": "Exec command '[bash --noprofile --norc -e -o pipefail /var/run/act/workflow/0]'",
|
||||
"timestamp": 1718902105.7571852
|
||||
},
|
||||
{
|
||||
"index": 51,
|
||||
"message": "Working directory '/workspace/ThetaDev/test-actions'",
|
||||
"timestamp": 1718902105.7572272
|
||||
}
|
||||
],
|
||||
"started": 1718902104
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
220
tests/testfiles/githubWorkflowRun.json
Normal file
220
tests/testfiles/githubWorkflowRun.json
Normal file
|
@ -0,0 +1,220 @@
|
|||
{
|
||||
"id": 9598566319,
|
||||
"name": "db-tests",
|
||||
"node_id": "WFR_kwLOBFIx288AAAACPB5_rw",
|
||||
"head_branch": "fix-ui-tab",
|
||||
"head_sha": "7ae95457156ea964402747ae263d5a2a7de48883",
|
||||
"path": ".github/workflows/pull-db-tests.yml",
|
||||
"display_title": "WIP: Fix tab performance",
|
||||
"run_number": 20434,
|
||||
"event": "pull_request",
|
||||
"status": "completed",
|
||||
"conclusion": "success",
|
||||
"workflow_id": 56971384,
|
||||
"check_suite_id": 25125296548,
|
||||
"check_suite_node_id": "CS_kwDOBFIx288AAAAF2ZWZpA",
|
||||
"url": "https://api.github.com/repos/go-gitea/gitea/actions/runs/9598566319",
|
||||
"html_url": "https://github.com/go-gitea/gitea/actions/runs/9598566319",
|
||||
"pull_requests": [],
|
||||
"created_at": "2024-06-20T13:41:06Z",
|
||||
"updated_at": "2024-06-20T14:10:02Z",
|
||||
"actor": {
|
||||
"login": "wxiaoguang",
|
||||
"id": 2114189,
|
||||
"node_id": "MDQ6VXNlcjIxMTQxODk=",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2114189?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/wxiaoguang",
|
||||
"html_url": "https://github.com/wxiaoguang",
|
||||
"followers_url": "https://api.github.com/users/wxiaoguang/followers",
|
||||
"following_url": "https://api.github.com/users/wxiaoguang/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/wxiaoguang/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/wxiaoguang/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/wxiaoguang/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/wxiaoguang/orgs",
|
||||
"repos_url": "https://api.github.com/users/wxiaoguang/repos",
|
||||
"events_url": "https://api.github.com/users/wxiaoguang/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/wxiaoguang/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"run_attempt": 1,
|
||||
"referenced_workflows": [
|
||||
{
|
||||
"path": "go-gitea/gitea/.github/workflows/files-changed.yml@d8d6749d313098583fc1d527ce8a4aafb81ca12d",
|
||||
"sha": "d8d6749d313098583fc1d527ce8a4aafb81ca12d",
|
||||
"ref": "refs/pull/31437/merge"
|
||||
}
|
||||
],
|
||||
"run_started_at": "2024-06-20T13:41:06Z",
|
||||
"triggering_actor": {
|
||||
"login": "wxiaoguang",
|
||||
"id": 2114189,
|
||||
"node_id": "MDQ6VXNlcjIxMTQxODk=",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2114189?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/wxiaoguang",
|
||||
"html_url": "https://github.com/wxiaoguang",
|
||||
"followers_url": "https://api.github.com/users/wxiaoguang/followers",
|
||||
"following_url": "https://api.github.com/users/wxiaoguang/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/wxiaoguang/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/wxiaoguang/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/wxiaoguang/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/wxiaoguang/orgs",
|
||||
"repos_url": "https://api.github.com/users/wxiaoguang/repos",
|
||||
"events_url": "https://api.github.com/users/wxiaoguang/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/wxiaoguang/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"jobs_url": "https://api.github.com/repos/go-gitea/gitea/actions/runs/9598566319/jobs",
|
||||
"logs_url": "https://api.github.com/repos/go-gitea/gitea/actions/runs/9598566319/logs",
|
||||
"check_suite_url": "https://api.github.com/repos/go-gitea/gitea/check-suites/25125296548",
|
||||
"artifacts_url": "https://api.github.com/repos/go-gitea/gitea/actions/runs/9598566319/artifacts",
|
||||
"cancel_url": "https://api.github.com/repos/go-gitea/gitea/actions/runs/9598566319/cancel",
|
||||
"rerun_url": "https://api.github.com/repos/go-gitea/gitea/actions/runs/9598566319/rerun",
|
||||
"previous_attempt_url": null,
|
||||
"workflow_url": "https://api.github.com/repos/go-gitea/gitea/actions/workflows/56971384",
|
||||
"head_commit": {
|
||||
"id": "7ae95457156ea964402747ae263d5a2a7de48883",
|
||||
"tree_id": "edb45bf6711cdcff1ee0347e330a0bd5b89996ec",
|
||||
"message": "fix",
|
||||
"timestamp": "2024-06-20T13:40:55Z",
|
||||
"author": { "name": "wxiaoguang", "email": "wxiaoguang@gmail.com" },
|
||||
"committer": { "name": "wxiaoguang", "email": "wxiaoguang@gmail.com" }
|
||||
},
|
||||
"repository": {
|
||||
"id": 72495579,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk3MjQ5NTU3OQ==",
|
||||
"name": "gitea",
|
||||
"full_name": "go-gitea/gitea",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "go-gitea",
|
||||
"id": 12724356,
|
||||
"node_id": "MDEyOk9yZ2FuaXphdGlvbjEyNzI0MzU2",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12724356?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/go-gitea",
|
||||
"html_url": "https://github.com/go-gitea",
|
||||
"followers_url": "https://api.github.com/users/go-gitea/followers",
|
||||
"following_url": "https://api.github.com/users/go-gitea/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/go-gitea/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/go-gitea/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/go-gitea/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/go-gitea/orgs",
|
||||
"repos_url": "https://api.github.com/users/go-gitea/repos",
|
||||
"events_url": "https://api.github.com/users/go-gitea/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/go-gitea/received_events",
|
||||
"type": "Organization",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/go-gitea/gitea",
|
||||
"description": "Git with a cup of tea! Painless self-hosted all-in-one software development service, including Git hosting, code review, team collaboration, package registry and CI/CD",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/go-gitea/gitea",
|
||||
"forks_url": "https://api.github.com/repos/go-gitea/gitea/forks",
|
||||
"keys_url": "https://api.github.com/repos/go-gitea/gitea/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/go-gitea/gitea/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/go-gitea/gitea/teams",
|
||||
"hooks_url": "https://api.github.com/repos/go-gitea/gitea/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/go-gitea/gitea/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/go-gitea/gitea/events",
|
||||
"assignees_url": "https://api.github.com/repos/go-gitea/gitea/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/go-gitea/gitea/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/go-gitea/gitea/tags",
|
||||
"blobs_url": "https://api.github.com/repos/go-gitea/gitea/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/go-gitea/gitea/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/go-gitea/gitea/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/go-gitea/gitea/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/go-gitea/gitea/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/go-gitea/gitea/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/go-gitea/gitea/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/go-gitea/gitea/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/go-gitea/gitea/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/go-gitea/gitea/subscription",
|
||||
"commits_url": "https://api.github.com/repos/go-gitea/gitea/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/go-gitea/gitea/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/go-gitea/gitea/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/go-gitea/gitea/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/go-gitea/gitea/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/go-gitea/gitea/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/go-gitea/gitea/merges",
|
||||
"archive_url": "https://api.github.com/repos/go-gitea/gitea/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/go-gitea/gitea/downloads",
|
||||
"issues_url": "https://api.github.com/repos/go-gitea/gitea/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/go-gitea/gitea/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/go-gitea/gitea/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/go-gitea/gitea/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/go-gitea/gitea/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/go-gitea/gitea/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/go-gitea/gitea/deployments"
|
||||
},
|
||||
"head_repository": {
|
||||
"id": 398521154,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnkzOTg1MjExNTQ=",
|
||||
"name": "gitea",
|
||||
"full_name": "wxiaoguang/gitea",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "wxiaoguang",
|
||||
"id": 2114189,
|
||||
"node_id": "MDQ6VXNlcjIxMTQxODk=",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2114189?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/wxiaoguang",
|
||||
"html_url": "https://github.com/wxiaoguang",
|
||||
"followers_url": "https://api.github.com/users/wxiaoguang/followers",
|
||||
"following_url": "https://api.github.com/users/wxiaoguang/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/wxiaoguang/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/wxiaoguang/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/wxiaoguang/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/wxiaoguang/orgs",
|
||||
"repos_url": "https://api.github.com/users/wxiaoguang/repos",
|
||||
"events_url": "https://api.github.com/users/wxiaoguang/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/wxiaoguang/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/wxiaoguang/gitea",
|
||||
"description": "Git with a cup of tea, painless self-hosted git service",
|
||||
"fork": true,
|
||||
"url": "https://api.github.com/repos/wxiaoguang/gitea",
|
||||
"forks_url": "https://api.github.com/repos/wxiaoguang/gitea/forks",
|
||||
"keys_url": "https://api.github.com/repos/wxiaoguang/gitea/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/wxiaoguang/gitea/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/wxiaoguang/gitea/teams",
|
||||
"hooks_url": "https://api.github.com/repos/wxiaoguang/gitea/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/wxiaoguang/gitea/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/wxiaoguang/gitea/events",
|
||||
"assignees_url": "https://api.github.com/repos/wxiaoguang/gitea/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/wxiaoguang/gitea/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/wxiaoguang/gitea/tags",
|
||||
"blobs_url": "https://api.github.com/repos/wxiaoguang/gitea/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/wxiaoguang/gitea/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/wxiaoguang/gitea/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/wxiaoguang/gitea/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/wxiaoguang/gitea/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/wxiaoguang/gitea/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/wxiaoguang/gitea/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/wxiaoguang/gitea/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/wxiaoguang/gitea/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/wxiaoguang/gitea/subscription",
|
||||
"commits_url": "https://api.github.com/repos/wxiaoguang/gitea/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/wxiaoguang/gitea/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/wxiaoguang/gitea/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/wxiaoguang/gitea/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/wxiaoguang/gitea/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/wxiaoguang/gitea/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/wxiaoguang/gitea/merges",
|
||||
"archive_url": "https://api.github.com/repos/wxiaoguang/gitea/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/wxiaoguang/gitea/downloads",
|
||||
"issues_url": "https://api.github.com/repos/wxiaoguang/gitea/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/wxiaoguang/gitea/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/wxiaoguang/gitea/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/wxiaoguang/gitea/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/wxiaoguang/gitea/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/wxiaoguang/gitea/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/wxiaoguang/gitea/deployments"
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue