mirror of
https://github.com/grafana/grafana.git
synced 2026-01-07 22:41:10 +08:00
Compare commits
286 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
34b8880d53 | ||
|
|
ccfa71bc71 | ||
|
|
fc4978fdee | ||
|
|
f2a07cc62e | ||
|
|
6740611758 | ||
|
|
07f2ff99d6 | ||
|
|
dea96e6763 | ||
|
|
3426447fb4 | ||
|
|
833587b4c2 | ||
|
|
3696026205 | ||
|
|
4e6eb817ec | ||
|
|
c857fb0d49 | ||
|
|
6678215a1c | ||
|
|
58ea9c81c8 | ||
|
|
7a0abab141 | ||
|
|
151fd1fecd | ||
|
|
5163782a13 | ||
|
|
d6764b59b0 | ||
|
|
c00888db42 | ||
|
|
61847c32d9 | ||
|
|
0c4d98def4 | ||
|
|
6855cdff7c | ||
|
|
6352cc18ba | ||
|
|
9d4a7b0290 | ||
|
|
58e890dec6 | ||
|
|
71a9bf0203 | ||
|
|
da32c785c6 | ||
|
|
5acc7ea503 | ||
|
|
6e9b0b6ee1 | ||
|
|
abca3c81b0 | ||
|
|
4347b32f0e | ||
|
|
9d4a77127d | ||
|
|
951f26304b | ||
|
|
f427f11bda | ||
|
|
77f610a1f3 | ||
|
|
2cc19d6a7a | ||
|
|
178b2ea0e5 | ||
|
|
5e03bce5d2 | ||
|
|
feb4fbaae9 | ||
|
|
c046d8cec2 | ||
|
|
72d10b8ddb | ||
|
|
5a1c145aee | ||
|
|
4ad607ecc9 | ||
|
|
99392d016f | ||
|
|
a61f38238c | ||
|
|
3c673048d8 | ||
|
|
f39a38a9b5 | ||
|
|
9240425952 | ||
|
|
217f9438c6 | ||
|
|
4f215e06f1 | ||
|
|
ccef062388 | ||
|
|
08eab136b6 | ||
|
|
43696bad09 | ||
|
|
9e6962be65 | ||
|
|
64dc3a946d | ||
|
|
f45404c8ab | ||
|
|
0176693fe9 | ||
|
|
86f86d6fba | ||
|
|
b992d6c228 | ||
|
|
3760a3ab23 | ||
|
|
bb646150f5 | ||
|
|
bf158b2c4a | ||
|
|
b4b0ee91be | ||
|
|
bfcf311f8f | ||
|
|
5f7e249f40 | ||
|
|
742947f6c3 | ||
|
|
af6e673891 | ||
|
|
f5211483ea | ||
|
|
b780f77e74 | ||
|
|
72ead468b2 | ||
|
|
4e2cbb7028 | ||
|
|
a622b4ae58 | ||
|
|
902ea4299c | ||
|
|
eac0c08b13 | ||
|
|
8cfc6571d8 | ||
|
|
786174ce94 | ||
|
|
0a3ccc1701 | ||
|
|
b9a77c6208 | ||
|
|
133c136847 | ||
|
|
592829cbed | ||
|
|
f3592a4e2b | ||
|
|
51d3926cde | ||
|
|
5512b07939 | ||
|
|
c745d2fd43 | ||
|
|
e928c334c2 | ||
|
|
f195dcc37a | ||
|
|
33a7e11344 | ||
|
|
c65399eaad | ||
|
|
bcb405e40c | ||
|
|
6c4f476597 | ||
|
|
44a659d1dc | ||
|
|
ed5f8c1426 | ||
|
|
8d2a6073d1 | ||
|
|
a4b4958279 | ||
|
|
7964cc6789 | ||
|
|
6c29821253 | ||
|
|
70a0dc57ac | ||
|
|
79016c5d39 | ||
|
|
67c46e85a9 | ||
|
|
845e73cecd | ||
|
|
1a711659f5 | ||
|
|
98c540ce8a | ||
|
|
954f55785a | ||
|
|
267c4d376b | ||
|
|
57cdd0239f | ||
|
|
edff57c2ef | ||
|
|
bbcf11723a | ||
|
|
103f8fa094 | ||
|
|
8dd9ecf7cb | ||
|
|
4b91dc7168 | ||
|
|
59067a72b8 | ||
|
|
1f802e1491 | ||
|
|
e4bb887ddd | ||
|
|
a7db2a20dc | ||
|
|
3aa311f869 | ||
|
|
c760ca90db | ||
|
|
73f0217abf | ||
|
|
4fbb9da0d2 | ||
|
|
cf9c019310 | ||
|
|
17219bf052 | ||
|
|
cfeb21b12c | ||
|
|
577d9d52f6 | ||
|
|
ed5966a452 | ||
|
|
81916e8a3f | ||
|
|
c4ffd13612 | ||
|
|
0194e87bf7 | ||
|
|
5ff1eecfeb | ||
|
|
ab43bcf08a | ||
|
|
1fe8b3a7b0 | ||
|
|
3baec71cce | ||
|
|
9271d76f1a | ||
|
|
5e380f40e5 | ||
|
|
5619aa7a5b | ||
|
|
d6a13c45d2 | ||
|
|
c6e35018c6 | ||
|
|
0ed6741259 | ||
|
|
37266efdb2 | ||
|
|
3d1fd54973 | ||
|
|
7b8440ad46 | ||
|
|
95b407e39a | ||
|
|
9bed8070ae | ||
|
|
07e9b15579 | ||
|
|
cea45e4f90 | ||
|
|
75ba1d438e | ||
|
|
485780476b | ||
|
|
0d4e9b540e | ||
|
|
e12111ee6e | ||
|
|
3f7725c891 | ||
|
|
d4497432a0 | ||
|
|
ee64547584 | ||
|
|
638993ccff | ||
|
|
cc1d39872a | ||
|
|
a4154b376f | ||
|
|
a434c52cee | ||
|
|
3415030a1d | ||
|
|
6325bd89b2 | ||
|
|
42a2058b4d | ||
|
|
e56828d9fa | ||
|
|
709c5e8db4 | ||
|
|
9b23169f00 | ||
|
|
1dcecdb89a | ||
|
|
b22b337c87 | ||
|
|
a0f7d34bda | ||
|
|
45e0238642 | ||
|
|
1eb4460cf9 | ||
|
|
b1efd95af1 | ||
|
|
c47396f7b5 | ||
|
|
611b5123ba | ||
|
|
90ec5d992a | ||
|
|
c20101c3e0 | ||
|
|
8d2b7b134b | ||
|
|
3e52d66c79 | ||
|
|
90c87a52f7 | ||
|
|
a6d95e8b6e | ||
|
|
2ee831969d | ||
|
|
302c542aba | ||
|
|
45c67c5b82 | ||
|
|
b3570b823d | ||
|
|
e8a7f71f31 | ||
|
|
98a175f158 | ||
|
|
4916a721f8 | ||
|
|
f16de24c2f | ||
|
|
81114a3972 | ||
|
|
82d548f1de | ||
|
|
62e720c06b | ||
|
|
c65d3ea147 | ||
|
|
3a05320d00 | ||
|
|
49dca774f3 | ||
|
|
889e1ab729 | ||
|
|
30071cee1a | ||
|
|
dd15b33f74 | ||
|
|
3a69f8e915 | ||
|
|
bbee1b26c5 | ||
|
|
de4bf9b9f3 | ||
|
|
f3df336b17 | ||
|
|
6513bbfaf2 | ||
|
|
857bd77fc2 | ||
|
|
a91f08222e | ||
|
|
8ad4703f6d | ||
|
|
5e26c6148d | ||
|
|
48e5f17392 | ||
|
|
db240265ff | ||
|
|
9a89e0c157 | ||
|
|
0e5a6a62be | ||
|
|
4ab256c132 | ||
|
|
dd42424a01 | ||
|
|
2549519180 | ||
|
|
5291563f6c | ||
|
|
6f40b883e1 | ||
|
|
e3a270a041 | ||
|
|
f8b8b4f578 | ||
|
|
7039d3ce27 | ||
|
|
784b924e9f | ||
|
|
b05581cc23 | ||
|
|
b6e20abe61 | ||
|
|
62a64d3233 | ||
|
|
43cced54a0 | ||
|
|
617cf0646d | ||
|
|
7442a7c66e | ||
|
|
dfa1a2cd41 | ||
|
|
ee81dc3b01 | ||
|
|
6cd4182ec9 | ||
|
|
716b254973 | ||
|
|
1d12f22190 | ||
|
|
fc5e1dbd15 | ||
|
|
22a357cc7d | ||
|
|
bd3e140852 | ||
|
|
7a823fe895 | ||
|
|
4eb9c6f06c | ||
|
|
9ccc02f5a3 | ||
|
|
64bd269f1e | ||
|
|
7b3275d4ed | ||
|
|
b312a60627 | ||
|
|
0075a3c014 | ||
|
|
8c70bebf90 | ||
|
|
411e19ed4b | ||
|
|
62d0c516cb | ||
|
|
78b225a5c2 | ||
|
|
a952824e35 | ||
|
|
b0dd5762f8 | ||
|
|
f778fdbbe8 | ||
|
|
d1cd7e7c64 | ||
|
|
0f5446d768 | ||
|
|
33ab22229f | ||
|
|
d74d4d6be9 | ||
|
|
e9bea6053c | ||
|
|
376cbf75b0 | ||
|
|
65d0e4227e | ||
|
|
41315b0935 | ||
|
|
0d29421a87 | ||
|
|
1bad6de8cc | ||
|
|
2fbfe987f0 | ||
|
|
c2d807a1c8 | ||
|
|
3c4754b19f | ||
|
|
c440fd4f5a | ||
|
|
28972eaf4b | ||
|
|
a7017f2729 | ||
|
|
31cc177e31 | ||
|
|
abf351f776 | ||
|
|
1a353a1eea | ||
|
|
2b97f6a507 | ||
|
|
4478259f70 | ||
|
|
38cb26bd5b | ||
|
|
1f0339179f | ||
|
|
f3f8972505 | ||
|
|
0a08cf10e5 | ||
|
|
a8f5445d47 | ||
|
|
631c12ec91 | ||
|
|
d388afece6 | ||
|
|
b0fe99911a | ||
|
|
8392ebdacb | ||
|
|
147704deb9 | ||
|
|
078d716be9 | ||
|
|
04cb471599 | ||
|
|
6564f22772 | ||
|
|
13cd3ea28b | ||
|
|
d5e0665081 | ||
|
|
909141592d | ||
|
|
fda235a862 | ||
|
|
154231a58d | ||
|
|
197e4344da | ||
|
|
64b008e28b | ||
|
|
bc9ac1199b | ||
|
|
2b15e1a962 | ||
|
|
4308a77e27 | ||
|
|
f18749927c |
10646
.betterer.results
10646
.betterer.results
File diff suppressed because it is too large
Load Diff
69
.betterer.ts
69
.betterer.ts
@@ -1,69 +0,0 @@
|
||||
import { regexp } from '@betterer/regexp';
|
||||
import { BettererFileTest } from '@betterer/betterer';
|
||||
import { ESLint, Linter } from 'eslint';
|
||||
import { existsSync } from 'fs';
|
||||
|
||||
export default {
|
||||
'no enzyme tests': () => regexp(/from 'enzyme'/g).include('**/*.test.*'),
|
||||
'better eslint': () => countEslintErrors().include('**/*.{ts,tsx}'),
|
||||
'no undocumented stories': () => countUndocumentedStories().include('**/*.story.tsx'),
|
||||
};
|
||||
|
||||
function countUndocumentedStories() {
|
||||
return new BettererFileTest(async (filePaths, fileTestResult) => {
|
||||
filePaths.forEach((filePath) => {
|
||||
if (!existsSync(filePath.replace(/\.story.tsx$/, '.mdx'))) {
|
||||
// In this case the file contents don't matter:
|
||||
const file = fileTestResult.addFile(filePath, '');
|
||||
// Add the issue to the first character of the file:
|
||||
file.addIssue(0, 0, 'No undocumented stories are allowed, please add an .mdx file with some documentation');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function countEslintErrors() {
|
||||
return new BettererFileTest(async (filePaths, fileTestResult, resolver) => {
|
||||
const { baseDirectory } = resolver;
|
||||
const cli = new ESLint({ cwd: baseDirectory });
|
||||
|
||||
await Promise.all(
|
||||
filePaths.map(async (filePath) => {
|
||||
const linterOptions = (await cli.calculateConfigForFile(filePath)) as Linter.Config;
|
||||
|
||||
const rules: Partial<Linter.RulesRecord> = {
|
||||
'@typescript-eslint/no-explicit-any': 'error',
|
||||
};
|
||||
|
||||
if (!filePath.endsWith('.test.tsx') && !filePath.endsWith('.test.ts')) {
|
||||
rules['@typescript-eslint/consistent-type-assertions'] = [
|
||||
'error',
|
||||
{
|
||||
assertionStyle: 'never',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const runner = new ESLint({
|
||||
baseConfig: {
|
||||
...linterOptions,
|
||||
rules,
|
||||
},
|
||||
useEslintrc: false,
|
||||
cwd: baseDirectory,
|
||||
});
|
||||
|
||||
const lintResults = await runner.lintFiles([filePath]);
|
||||
lintResults
|
||||
.filter((lintResult) => lintResult.source)
|
||||
.forEach((lintResult) => {
|
||||
const { messages } = lintResult;
|
||||
const file = fileTestResult.addFile(filePath, '');
|
||||
messages.forEach((message, index) => {
|
||||
file.addIssue(0, 0, message.message, `${index}`);
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
13
.bingo/.gitignore
vendored
13
.bingo/.gitignore
vendored
@@ -1,13 +0,0 @@
|
||||
|
||||
# Ignore everything
|
||||
*
|
||||
|
||||
# But not these files:
|
||||
!.gitignore
|
||||
!*.mod
|
||||
!*.sum
|
||||
!README.md
|
||||
!Variables.mk
|
||||
!variables.env
|
||||
|
||||
*tmp.mod
|
||||
@@ -1,14 +0,0 @@
|
||||
# Project Development Dependencies.
|
||||
|
||||
This is directory which stores Go modules with pinned buildable package that is used within this repository, managed by https://github.com/bwplotka/bingo.
|
||||
|
||||
- Run `bingo get` to install all tools having each own module file in this directory.
|
||||
- Run `bingo get <tool>` to install <tool> that have own module file in this directory.
|
||||
- For Makefile: Make sure to put `include .bingo/Variables.mk` in your Makefile, then use $(<upper case tool name>) variable where <tool> is the .bingo/<tool>.mod.
|
||||
- For shell: Run `source .bingo/variables.env` to source all environment variable for each tool.
|
||||
- For go: Import `.bingo/variables.go` to for variable names.
|
||||
- See https://github.com/bwplotka/bingo or -h on how to add, remove or change binaries dependencies.
|
||||
|
||||
## Requirements
|
||||
|
||||
- Go 1.14+
|
||||
@@ -1,31 +0,0 @@
|
||||
# Auto generated binary variables helper managed by https://github.com/bwplotka/bingo v0.6. DO NOT EDIT.
|
||||
# All tools are designed to be build inside $GOBIN.
|
||||
BINGO_DIR := $(dir $(lastword $(MAKEFILE_LIST)))
|
||||
GOPATH ?= $(shell go env GOPATH)
|
||||
GOBIN ?= $(firstword $(subst :, ,${GOPATH}))/bin
|
||||
GO ?= $(shell which go)
|
||||
|
||||
# Below generated variables ensure that every time a tool under each variable is invoked, the correct version
|
||||
# will be used; reinstalling only if needed.
|
||||
# For example for drone variable:
|
||||
#
|
||||
# In your main Makefile (for non array binaries):
|
||||
#
|
||||
#include .bingo/Variables.mk # Assuming -dir was set to .bingo .
|
||||
#
|
||||
#command: $(DRONE)
|
||||
# @echo "Running drone"
|
||||
# @$(DRONE) <flags/args..>
|
||||
#
|
||||
DRONE := $(GOBIN)/drone-v1.5.0
|
||||
$(DRONE): $(BINGO_DIR)/drone.mod
|
||||
@# Install binary/ries using Go 1.14+ build command. This is using bwplotka/bingo-controlled, separate go module with pinned dependencies.
|
||||
@echo "(re)installing $(GOBIN)/drone-v1.5.0"
|
||||
@cd $(BINGO_DIR) && $(GO) build -mod=mod -modfile=drone.mod -o=$(GOBIN)/drone-v1.5.0 "github.com/drone/drone-cli/drone"
|
||||
|
||||
WIRE := $(GOBIN)/wire-v0.5.0
|
||||
$(WIRE): $(BINGO_DIR)/wire.mod
|
||||
@# Install binary/ries using Go 1.14+ build command. This is using bwplotka/bingo-controlled, separate go module with pinned dependencies.
|
||||
@echo "(re)installing $(GOBIN)/wire-v0.5.0"
|
||||
@cd $(BINGO_DIR) && $(GO) build -mod=mod -modfile=wire.mod -o=$(GOBIN)/wire-v0.5.0 "github.com/google/wire/cmd/wire"
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
module _ // Auto generated by https://github.com/bwplotka/bingo. DO NOT EDIT
|
||||
|
||||
go 1.17
|
||||
|
||||
replace github.com/docker/docker => github.com/docker/engine v17.12.0-ce-rc1.0.20200309214505-aa6a9891b09c+incompatible
|
||||
|
||||
require github.com/drone/drone-cli v1.5.0 // drone
|
||||
1030
.bingo/drone.sum
1030
.bingo/drone.sum
File diff suppressed because it is too large
Load Diff
@@ -1 +0,0 @@
|
||||
module _ // Fake go.mod auto-created by 'bingo' for go -moddir compatibility with non-Go projects. Commit this file, together with other .mod files.
|
||||
@@ -1,14 +0,0 @@
|
||||
# Auto generated binary variables helper managed by https://github.com/bwplotka/bingo v0.6. DO NOT EDIT.
|
||||
# All tools are designed to be build inside $GOBIN.
|
||||
# Those variables will work only until 'bingo get' was invoked, or if tools were installed via Makefile's Variables.mk.
|
||||
GOBIN=${GOBIN:=$(go env GOBIN)}
|
||||
|
||||
if [ -z "$GOBIN" ]; then
|
||||
GOBIN="$(go env GOPATH)/bin"
|
||||
fi
|
||||
|
||||
|
||||
DRONE="${GOBIN}/drone-v1.5.0"
|
||||
|
||||
WIRE="${GOBIN}/wire-v0.5.0"
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
module _ // Auto generated by https://github.com/bwplotka/bingo. DO NOT EDIT
|
||||
|
||||
go 1.16
|
||||
|
||||
require github.com/google/wire v0.5.0 // cmd/wire
|
||||
@@ -1,8 +1,7 @@
|
||||
[run]
|
||||
init_cmds = [
|
||||
["make", "gen-go"],
|
||||
["GO_BUILD_DEV=1", "make", "build-cli"],
|
||||
["GO_BUILD_DEV=1", "make", "build-server"],
|
||||
["go", "run", "build.go", "-dev", "build-cli"],
|
||||
["go", "run", "build.go", "-dev", "build-server"],
|
||||
["./bin/grafana-server", "-packaging=dev", "cfg:app_mode=development"]
|
||||
]
|
||||
watch_all = true
|
||||
@@ -13,10 +12,8 @@ watch_dirs = [
|
||||
"$WORKDIR/conf",
|
||||
]
|
||||
watch_exts = [".go", ".ini", ".toml", ".template.html"]
|
||||
ignore_files = ["wire_gen.go"]
|
||||
build_delay = 1500
|
||||
cmds = [
|
||||
["make", "gen-go"],
|
||||
["GO_BUILD_DEV=1", "make", "build-server"],
|
||||
["go", "run", "build.go", "-dev", "build-server"],
|
||||
["./bin/grafana-server", "-packaging=dev", "cfg:app_mode=development"]
|
||||
]
|
||||
|
||||
11
.circleci/config.yml
Normal file
11
.circleci/config.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
version: 2.1
|
||||
jobs:
|
||||
build:
|
||||
docker:
|
||||
- image: alpine:3.7
|
||||
steps:
|
||||
- run:
|
||||
name: The First Step
|
||||
command: |
|
||||
echo 'Fake step!'
|
||||
|
||||
@@ -12,14 +12,7 @@ Dockerfile
|
||||
docs
|
||||
dump.rdb
|
||||
node_modules
|
||||
**/node_modules
|
||||
/local
|
||||
/tmp
|
||||
*.yml
|
||||
!.yarnrc.yml
|
||||
*.md
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/releases
|
||||
!.yarn/plugins
|
||||
!.yarn/versions
|
||||
!.yarn/cache
|
||||
|
||||
18
.drone.star
18
.drone.star
@@ -4,18 +4,14 @@
|
||||
# 3. Run `make drone`
|
||||
# More information about this process here: https://github.com/grafana/deployment_tools/blob/master/docs/infrastructure/drone/signing.md
|
||||
|
||||
load('scripts/drone/pipelines/pr.star', 'pr_pipelines')
|
||||
load('scripts/drone/pipelines/main.star', 'main_pipelines')
|
||||
load('scripts/drone/pipelines/docs.star', 'docs_pipelines')
|
||||
load('scripts/drone/pipelines/release.star', 'release_pipelines', 'publish_image_pipelines', 'publish_artifacts_pipelines', 'publish_npm_pipelines', 'publish_packages_pipeline')
|
||||
load('scripts/drone/version.star', 'version_branch_pipelines')
|
||||
load('scripts/drone/pipelines/cron.star', 'cronjobs')
|
||||
load('scripts/drone/vault.star', 'secrets')
|
||||
load('scripts/pr.star', 'pr_pipelines')
|
||||
load('scripts/main.star', 'main_pipelines')
|
||||
load('scripts/release.star', 'release_pipelines', 'test_release_pipelines')
|
||||
load('scripts/version.star', 'version_branch_pipelines')
|
||||
load('scripts/job.star', 'cronjobs')
|
||||
load('scripts/vault.star', 'secrets')
|
||||
|
||||
def main(ctx):
|
||||
edition = 'oss'
|
||||
return pr_pipelines(edition=edition) + main_pipelines(edition=edition) + release_pipelines() + \
|
||||
publish_image_pipelines('public') + publish_image_pipelines('security') + \
|
||||
publish_artifacts_pipelines('security') + publish_artifacts_pipelines('public') + \
|
||||
publish_npm_pipelines('public') + publish_packages_pipeline() + \
|
||||
version_branch_pipelines() + cronjobs(edition=edition) + secrets()
|
||||
test_release_pipelines() + version_branch_pipelines() + cronjobs(edition=edition) + secrets()
|
||||
|
||||
6949
.drone.yml
6949
.drone.yml
File diff suppressed because it is too large
Load Diff
@@ -1,25 +1,8 @@
|
||||
.git
|
||||
.github
|
||||
.yarn
|
||||
build
|
||||
node_modules
|
||||
compiled
|
||||
data
|
||||
deployment_tools_config.json
|
||||
build
|
||||
vendor
|
||||
devenv
|
||||
data
|
||||
dist
|
||||
e2e/tmp
|
||||
node_modules
|
||||
pkg
|
||||
public/lib/monaco
|
||||
scripts/grafana-server/tmp
|
||||
vendor
|
||||
|
||||
# TS generate from cue by cuetsy
|
||||
**/*.gen.ts
|
||||
|
||||
# Auto-generated localisation files
|
||||
public/locales/_build/
|
||||
public/locales/**/*.js
|
||||
|
||||
# Auto-generated icon file
|
||||
packages/grafana-ui/src/components/Icon/iconBundle.ts
|
||||
|
||||
18
.eslintrc
18
.eslintrc
@@ -1,24 +1,12 @@
|
||||
{
|
||||
"extends": ["@grafana/eslint-config"],
|
||||
"root": true,
|
||||
"plugins": ["@emotion", "lodash", "jest", "import"],
|
||||
"settings": {
|
||||
"import/internal-regex": "^(app/)|(@grafana)",
|
||||
"import/external-module-folders": ["node_modules", ".yarn"]
|
||||
},
|
||||
"plugins": ["no-only-tests", "@emotion", "lodash"],
|
||||
"rules": {
|
||||
"no-only-tests/no-only-tests": "error",
|
||||
"react/prop-types": "off",
|
||||
"@emotion/jsx-import": "error",
|
||||
"lodash/import-scope": [2, "member"],
|
||||
"jest/no-focused-tests": "error",
|
||||
"import/order": [
|
||||
"error",
|
||||
{
|
||||
"groups": [["builtin", "external"], "internal", "parent", "sibling", "index"],
|
||||
"newlines-between": "always",
|
||||
"alphabetize": { "order": "asc" }
|
||||
}
|
||||
]
|
||||
"lodash/import-scope": [2, "member"]
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
|
||||
178
.github/CODEOWNERS
vendored
178
.github/CODEOWNERS
vendored
@@ -14,170 +14,80 @@
|
||||
# Documentation owner: Jita Chatterjee
|
||||
/docs/ @grafana/docs-squad @pkolyvas
|
||||
/contribute/ @marcusolsson @grafana/docs-squad @pkolyvas
|
||||
/docs/sources/developers/plugins/ @marcusolsson @grafana/docs-squad @grafana/plugins-platform-frontend @grafana/plugins-platform-backend
|
||||
/docs/sources/developers/plugins/backend @marcusolsson @grafana/docs-squad @grafana/plugins-platform-backend
|
||||
/docs/sources/developers/plugins/ @marcusolsson @grafana/docs-squad
|
||||
/docs/sources/enterprise/ @osg-grafana @grafana/docs-squad
|
||||
|
||||
# Backend code
|
||||
*.go @grafana/backend-platform
|
||||
go.mod @grafana/backend-platform
|
||||
go.sum @grafana/backend-platform
|
||||
/.bingo @grafana/backend-platform
|
||||
|
||||
# Continuous Integration
|
||||
.drone.yml @grafana/grafana-release-eng
|
||||
.drone.star @grafana/grafana-release-eng
|
||||
/scripts/drone/ @grafana/grafana-release-eng
|
||||
/pkg/build/ @grafana/grafana-release-eng
|
||||
|
||||
# Cloud Datasources backend code
|
||||
/pkg/tsdb/cloudwatch @grafana/cloud-datasources
|
||||
/pkg/tsdb/cloudwatch @grafana/cloud-datasources @grafana/observability-squad
|
||||
/pkg/tsdb/azuremonitor @grafana/cloud-datasources
|
||||
/pkg/tsdb/cloudmonitoring @grafana/cloud-datasources
|
||||
|
||||
# Observability backend code
|
||||
/pkg/tsdb/prometheus @grafana/observability-metrics
|
||||
/pkg/tsdb/influxdb @grafana/observability-metrics
|
||||
/pkg/tsdb/elasticsearch @grafana/observability-logs-and-traces
|
||||
/pkg/tsdb/graphite @grafana/observability-metrics
|
||||
/pkg/tsdb/jaeger @grafana/observability-logs-and-traces
|
||||
/pkg/tsdb/loki @grafana/observability-logs-and-traces
|
||||
/pkg/tsdb/zipkin @grafana/observability-logs-and-traces
|
||||
/pkg/tsdb/tempo @grafana/observability-logs-and-traces
|
||||
/pkg/tsdb/influxdb @grafana/observability-squad
|
||||
/pkg/tsdb/elasticsearch @grafana/observability-squad
|
||||
/pkg/tsdb/graphite @grafana/observability-squad
|
||||
/pkg/tsdb/jaeger @grafana/observability-squad
|
||||
/pkg/tsdb/loki @grafana/observability-squad
|
||||
/pkg/tsdb/zipkin @grafana/observability-squad
|
||||
/pkg/tsdb/tempo @grafana/observability-squad
|
||||
|
||||
# BI backend code
|
||||
/pkg/tsdb/mysql @grafana/grafana-bi-squad
|
||||
/pkg/tsdb/postgres @grafana/grafana-bi-squad
|
||||
/pkg/tsdb/mssql @grafana/grafana-bi-squad
|
||||
# Unified Alerting
|
||||
/pkg/services/ngalert @grafana/alerting-squad
|
||||
/pkg/services/sqlstore/migrations/ualert @grafana/alerting-squad
|
||||
|
||||
# Database migrations
|
||||
/pkg/services/sqlstore/migrations @grafana/backend-platform @grafana/hosted-grafana-team
|
||||
*_mig.go @grafana/backend-platform @grafana/hosted-grafana-team
|
||||
|
||||
# Grafana edge
|
||||
/pkg/services/live/ @grafana/grafana-edge-squad
|
||||
/pkg/services/searchV2/ @grafana/grafana-edge-squad
|
||||
/pkg/services/store/ @grafana/grafana-edge-squad
|
||||
/pkg/services/export/ @grafana/grafana-edge-squad
|
||||
/pkg/infra/filestore/ @grafana/grafana-edge-squad
|
||||
pkg/tsdb/testdatasource/sims/ @grafana/grafana-edge-squad
|
||||
|
||||
# Alerting
|
||||
/pkg/services/ngalert @grafana/alerting-squad-backend
|
||||
/pkg/services/sqlstore/migrations/ualert @grafana/alerting-squad-backend
|
||||
/pkg/services/alerting @grafana/alerting-squad-backend
|
||||
/pkg/tests/api/alerting @grafana/alerting-squad-backend
|
||||
/public/app/features/alerting @grafana/alerting-squad-frontend
|
||||
|
||||
# Library Services
|
||||
/pkg/services/libraryelements @grafana/user-essentials
|
||||
/pkg/services/librarypanels @grafana/user-essentials
|
||||
|
||||
# Plugins
|
||||
/pkg/api/pluginproxy @grafana/plugins-platform-backend
|
||||
/pkg/plugins @grafana/plugins-platform-backend
|
||||
/pkg/services/datasourceproxy @grafana/plugins-platform-backend
|
||||
/pkg/services/datasources @grafana/plugins-platform-backend
|
||||
|
||||
# Dashboard previews / crawler (behind feature flag)
|
||||
/pkg/services/thumbs @grafana/grafana-edge-squad
|
||||
|
||||
# Backend code docs
|
||||
/contribute/style-guides/backend.md @grafana/backend-platform
|
||||
/contribute/architecture/backend @grafana/backend-platform
|
||||
/contribute/engineering/backend @grafana/backend-platform
|
||||
|
||||
/e2e @grafana/user-essentials
|
||||
/packages @grafana/user-essentials @grafana/plugins-platform-frontend @grafana/grafana-bi-squad
|
||||
/packages/grafana-e2e-selectors @grafana/user-essentials
|
||||
/packages/grafana-e2e @grafana/user-essentials
|
||||
/packages/grafana-toolkit @grafana/plugins-platform-frontend
|
||||
/packages/grafana-ui/.storybook @grafana/plugins-platform-frontend
|
||||
/packages/grafana-ui/src/components/DateTimePickers @grafana/grafana-bi-squad
|
||||
/packages/grafana-ui/src/components/GraphNG @grafana/grafana-bi-squad
|
||||
/packages/grafana-ui/src/components/Table @grafana/grafana-bi-squad
|
||||
/packages/grafana-ui/src/components/TimeSeries @grafana/grafana-bi-squad
|
||||
/packages/grafana-ui/src/components/uPlot @grafana/grafana-bi-squad
|
||||
/packages/grafana-ui/src/utils/storybook @grafana/plugins-platform-frontend
|
||||
/packages/jaeger-ui-components/ @grafana/observability-logs-and-traces
|
||||
/plugins-bundled @grafana/plugins-platform-frontend
|
||||
# public folder
|
||||
/public/app/core/components/TimePicker @grafana/grafana-bi-squad
|
||||
/public/app/core/components/Layers @grafana/grafana-edge-squad
|
||||
/public/app/features/canvas/ @grafana/grafana-edge-squad
|
||||
/public/app/features/comments/ @grafana/grafana-edge-squad
|
||||
/public/app/features/dimensions/ @grafana/grafana-edge-squad
|
||||
/public/app/features/geo/ @grafana/grafana-edge-squad
|
||||
/public/app/features/live/ @grafana/grafana-edge-squad
|
||||
/public/app/features/explore/ @grafana/observability-experience-squad
|
||||
/public/app/features/plugins @grafana/plugins-platform-frontend
|
||||
/public/app/features/transformers/spatial @grafana/grafana-edge-squad
|
||||
/public/app/plugins/panel/alertlist @grafana/alerting-squad
|
||||
/public/app/plugins/panel/barchart @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/heatmap @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/histogram @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/logs @grafana/observability-logs-and-traces
|
||||
/public/app/plugins/panel/nodeGraph @grafana/observability-logs-and-traces
|
||||
/public/app/plugins/panel/piechart @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/state-timeline @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/status-history @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/table @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/timeseries @grafana/grafana-bi-squad
|
||||
/public/app/plugins/panel/geomap @grafana/grafana-edge-squad
|
||||
/public/app/plugins/panel/canvas @grafana/grafana-edge-squad
|
||||
/public/app/plugins/panel/candlestick @grafana/grafana-edge-squad
|
||||
/public/app/plugins/panel/icon @grafana/grafana-edge-squad
|
||||
/scripts/build/release-packages.sh @grafana/plugins-platform-frontend
|
||||
/scripts/circle-release-next-packages.sh @grafana/plugins-platform-frontend
|
||||
/scripts/ci-frontend-metrics.sh @grafana/user-essentials @grafana/plugins-platform-frontend @grafana/grafana-bi-squad
|
||||
/scripts/ci-reference-docs-build.sh @grafana/plugins-platform-frontend
|
||||
/scripts/ci-reference-docs-lint.sh @grafana/plugins-platform-frontend
|
||||
/scripts/grunt @grafana/frontend-ops
|
||||
/scripts/webpack @grafana/frontend-ops
|
||||
/scripts/generate-a11y-report.sh @grafana/user-essentials
|
||||
package.json @grafana/frontend-ops
|
||||
tsconfig.json @grafana/frontend-ops
|
||||
lerna.json @grafana/frontend-ops
|
||||
.babelrc @grafana/frontend-ops
|
||||
.prettierrc.js @grafana/frontend-ops
|
||||
.eslintrc @grafana/frontend-ops
|
||||
.pa11yci.conf.js @grafana/user-essentials
|
||||
.pa11yci-pr.conf.js @grafana/user-essentials
|
||||
/e2e @grafana/grafana-frontend-platform
|
||||
/packages @grafana/grafana-frontend-platform
|
||||
/plugins-bundled @grafana/grafana-frontend-platform
|
||||
/public @grafana/grafana-frontend-platform
|
||||
/scripts/build/release-packages.sh @grafana/grafana-frontend-platform
|
||||
/scripts/circle-release-next-packages.sh @grafana/grafana-frontend-platform
|
||||
/scripts/ci-frontend-metrics.sh @grafana/grafana-frontend-platform
|
||||
/scripts/grunt @grafana/grafana-frontend-platform
|
||||
/scripts/webpack @grafana/grafana-frontend-platform
|
||||
package.json @grafana/grafana-frontend-platform
|
||||
tsconfig.json @grafana/grafana-frontend-platform
|
||||
lerna.json @grafana/grafana-frontend-platform
|
||||
.babelrc @grafana/grafana-frontend-platform
|
||||
.prettierrc.js @grafana/grafana-frontend-platform
|
||||
.eslintrc @grafana/grafana-frontend-platform
|
||||
|
||||
# @grafana/ui component documentation
|
||||
*.mdx @marcusolsson @jessover9000 @grafana/plugins-platform-frontend
|
||||
*.mdx @marcusolsson @jessover9000 @grafana/grafana-frontend-platform
|
||||
|
||||
/public/app/features/explore/ @grafana/observability-squad
|
||||
/packages/jaeger-ui-components/ @grafana/observability-squad
|
||||
|
||||
# Core datasources
|
||||
/public/app/plugins/datasource/cloudwatch @grafana/cloud-datasources
|
||||
/public/app/plugins/datasource/elasticsearch @grafana/observability-logs-and-traces
|
||||
/public/app/plugins/datasource/cloudwatch @grafana/cloud-datasources @grafana/observability-squad
|
||||
/public/app/plugins/datasource/elasticsearch @grafana/observability-squad
|
||||
/public/app/plugins/datasource/grafana-azure-monitor-datasource @grafana/cloud-datasources
|
||||
/public/app/plugins/datasource/graphite @grafana/observability-metrics
|
||||
/public/app/plugins/datasource/influxdb @grafana/observability-metrics
|
||||
/public/app/plugins/datasource/jaeger @grafana/observability-logs-and-traces
|
||||
/public/app/plugins/datasource/loki @grafana/observability-logs-and-traces
|
||||
/public/app/plugins/datasource/mssql @grafana/grafana-bi-squad
|
||||
/public/app/plugins/datasource/mysql @grafana/grafana-bi-squad
|
||||
/public/app/plugins/datasource/graphite @grafana/observability-squad
|
||||
/public/app/plugins/datasource/influxdb @grafana/observability-squad
|
||||
/public/app/plugins/datasource/jaeger @grafana/observability-squad
|
||||
/public/app/plugins/datasource/loki @grafana/observability-squad
|
||||
/public/app/plugins/datasource/mssql @grafana/backend-platform
|
||||
/public/app/plugins/datasource/mysql @grafana/backend-platform
|
||||
/public/app/plugins/datasource/opentsdb @grafana/backend-platform
|
||||
/public/app/plugins/datasource/postgres @grafana/grafana-bi-squad
|
||||
/public/app/plugins/datasource/prometheus @grafana/observability-metrics
|
||||
/public/app/plugins/datasource/postgres @grafana/backend-platform
|
||||
/public/app/plugins/datasource/prometheus @grafana/observability-squad
|
||||
/public/app/plugins/datasource/cloud-monitoring @grafana/cloud-datasources
|
||||
/public/app/plugins/datasource/zipkin @grafana/observability-logs-and-traces
|
||||
/public/app/plugins/datasource/tempo @grafana/observability-logs-and-traces
|
||||
/public/app/plugins/datasource/zipkin @grafana/observability-squad
|
||||
/public/app/plugins/datasource/tempo @grafana/observability-squad
|
||||
/public/app/plugins/datasource/alertmanager @grafana/alerting-squad
|
||||
|
||||
# Cloud middleware
|
||||
/grafana-mixin/ @grafana/hosted-grafana-team
|
||||
|
||||
# Grafana authentication and authorization
|
||||
/pkg/services/accesscontrol @grafana/grafana-authnz-team
|
||||
/pkg/services/auth @grafana/grafana-authnz-team
|
||||
/pkg/services/dashboards/accesscontrol.go @grafana/grafana-authnz-team
|
||||
/pkg/services/datasources/permissions @grafana/grafana-authnz-team
|
||||
/pkg/services/datasources/permissions/accesscontrol.go @grafana/grafana-authnz-team
|
||||
/pkg/services/guardian @grafana/grafana-authnz-team
|
||||
/pkg/services/ldap @grafana/grafana-authnz-team
|
||||
/pkg/services/login @grafana/grafana-authnz-team
|
||||
/pkg/services/multildap @grafana/grafana-authnz-team
|
||||
/pkg/services/oauthtoken @grafana/grafana-authnz-team
|
||||
/pkg/services/teamguardian @grafana/grafana-authnz-team
|
||||
/pkg/services/serviceaccounts @grafana/grafana-authnz-team
|
||||
/grafana-mixin/ @grafana/cloud-middleware
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/1-bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/1-bug_report.md
vendored
@@ -5,7 +5,7 @@ labels: 'type: bug'
|
||||
---
|
||||
|
||||
<!--
|
||||
Please use this template to create your bug report. By providing as much info as possible you help us understand the issue, reproduce it and resolve it for you quicker. Therefore take a couple of extra minutes to make sure you have provided all info needed.
|
||||
Please use this template to create your bug report. By providing as much info as possible you help us understand the issue, reproduce it and resolve it for you quicker. Therefor take a couple of extra minutes to make sure you have provided all info needed.
|
||||
|
||||
PROTIP: record your screen and attach it as a gif to showcase the issue.
|
||||
|
||||
|
||||
43
.github/ISSUE_TEMPLATE/4-UX-design.yaml
vendored
43
.github/ISSUE_TEMPLATE/4-UX-design.yaml
vendored
@@ -1,43 +0,0 @@
|
||||
name: UX design issue
|
||||
description: Create an issue for delivering wireframes, mockups or other design solutions.
|
||||
title: "UX: "
|
||||
labels: ["type/ux"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: background
|
||||
attributes:
|
||||
label: "Background / Why we're doing this"
|
||||
description: Describe the problem and background of the issue. This could include research insights that inform the design changes, unmet user needs, or other usability issues.
|
||||
placeholder: Add UI improvements to make Grafana Alerting alert creation easier based on usability test results.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Is there existing research for this?
|
||||
description: Please link research results or insights in the Background section if you have any. If no research was conducted, you might want to consider usability testing your design later.
|
||||
options: [
|
||||
"Yes, I have linked it",
|
||||
"No research yet"
|
||||
]
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: problems-or-tasks
|
||||
attributes:
|
||||
label: Problems or tasks
|
||||
description: Describe problems the new design should solve or tasks the user needs to complete.
|
||||
placeholder:
|
||||
value: |
|
||||
- A problem we're trying to solve
|
||||
- A task the user needs to accomplish
|
||||
- …
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Deliverables
|
||||
description: Add a checklist of deliverables here. You can later add links to each deliverable.
|
||||
value: |
|
||||
- Figma mockup
|
||||
- Miro board
|
||||
- …
|
||||
256
.github/commands.json
vendored
256
.github/commands.json
vendored
@@ -1,221 +1,53 @@
|
||||
[
|
||||
{
|
||||
"type":"label",
|
||||
"name":"bot/question",
|
||||
"addLabel":"type/question",
|
||||
"removeLabel":"bot/question",
|
||||
"action":"close",
|
||||
"comment":"Please ask your question on [community.grafana.com/](https://community.grafana.com/). To avoid having your issue closed in the future, please read our [CONTRIBUTING](https://github.com/grafana/grafana/blob/main/CONTRIBUTING.md) guidelines.\n\nHappy graphing!"
|
||||
"type": "label",
|
||||
"name": "bot/question",
|
||||
"addLabel": "type/question",
|
||||
"removeLabel": "bot/question",
|
||||
"action": "close",
|
||||
"comment": "Please ask your question on [community.grafana.com/](https://community.grafana.com/). To avoid having your issue closed in the future, please read our [CONTRIBUTING](https://github.com/grafana/grafana/blob/main/CONTRIBUTING.md) guidelines.\n\nHappy graphing!"
|
||||
},
|
||||
{
|
||||
"type":"comment",
|
||||
"name":"duplicate",
|
||||
"allowUsers":[],
|
||||
"action":"updateLabels",
|
||||
"addLabel":"type/duplicate"
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"bot/duplicate",
|
||||
"addLabel":"type/duplicate",
|
||||
"removeLabel":"bot/duplicate",
|
||||
"action":"close",
|
||||
"comment":"Thanks for creating this issue! It looks like this has already been reported by another user. We’ve closed this in favor of the existing one. Please consider adding any details you think is missing to that issue.\n\nTo avoid having your issue closed in the future, please read our [CONTRIBUTING](https://github.com/grafana/grafana/blob/main/CONTRIBUTING.md) guidelines.\n\nHappy graphing!"
|
||||
},
|
||||
{
|
||||
"type":"comment",
|
||||
"name":"needsMoreInfo",
|
||||
"allowUsers":[],
|
||||
"action":"updateLabels",
|
||||
"addLabel":"bot/needs more info"
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"bot/needs more info",
|
||||
"action":"updateLabels",
|
||||
"addLabel":"needs more info",
|
||||
"removeLabel":"bot/needs more info",
|
||||
"comment":"Thanks for creating this issue! We think it's missing some basic information. \r\n\r\nFollow the issue template and add additional information that will help us replicate the problem. \r\nFor data visualization issues: \r\n- Query results from the inspect drawer (data tab & query inspector)\r\n- Panel settings can be extracted in the panel inspect drawer JSON tab\r\n\r\nFor dashboard related issues: \r\n- Dashboard JSON can be found in the dashboard settings JSON model view\r\n\r\nFor authentication, provisioning and alerting issues, Grafana server logs are useful. \r\n\r\nHappy graphing!"
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"bot/no new info",
|
||||
"action":"close",
|
||||
"comment":"We've closed this issue since it needs more information and hasn't had any activity recently. We can re-open it after you you add more information. To avoid having your issue closed in the future, please read our [CONTRIBUTING](https://github.com/grafana/grafana/blob/main/CONTRIBUTING.md) guidelines.\n\nHappy graphing!"
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"bot/close feature request",
|
||||
"action":"close",
|
||||
"addLabel":"not implemented",
|
||||
"comment":"This feature request has been open for a long time with few received upvotes or comments, so we are closing it. We're trying to limit open GitHub issues in order to better track planned work and features. \r\n\r\nThis doesn't mean that we'll never ever implement it or that we will never accept a PR for it. A closed issue can still attract upvotes and act as a ticket to track feature demand\/interest. \r\n\r\nThank You to you for taking the time to create this issue!"
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"oss-user-essentials",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/78"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"area/plugins-catalog",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/76"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"type/docs",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/69"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Azure",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/97"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/CloudWatch",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/97"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/CloudWatch Logs",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/97"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/GoogleCloudMonitoring",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/97"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Prometheus",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/112"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/InfluxDB",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/112"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Graphite",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/112"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/OpenTSDB",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/112"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/OpenSearch",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/110"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Loki",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/110"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Tempo",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/110"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Elasticsearch",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/110"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Jaeger",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/110"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"datasource/Zipkin",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/110"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"area/explore",
|
||||
"action":"addToProject",
|
||||
"addToProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/111"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"oss-user-essentials",
|
||||
"action":"removeFromProject",
|
||||
"removeFromProject":{
|
||||
"url":"https://github.com/orgs/grafana/projects/78"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"label",
|
||||
"name":"oss-user-essentials",
|
||||
"action":"removeFromProject",
|
||||
"removeFromProject":{
|
||||
"url":"https://github.com/grafana/grafana/projects/33"
|
||||
}
|
||||
"type": "comment",
|
||||
"name": "duplicate",
|
||||
"allowUsers": [],
|
||||
"action": "updateLabels",
|
||||
"addLabel": "type/duplicate"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"name": "team/grafana-partners",
|
||||
"action": "addToProject",
|
||||
"addToProject": {
|
||||
"url": "https://github.com/orgs/grafana/projects/87"
|
||||
}
|
||||
"name": "bot/duplicate",
|
||||
"addLabel": "type/duplicate",
|
||||
"removeLabel": "bot/duplicate",
|
||||
"action": "close",
|
||||
"comment": "Thanks for creating this issue! It looks like this has already been reported by another user. We’ve closed this in favor of the existing one. Please consider adding any details you think is missing to that issue.\n\nTo avoid having your issue closed in the future, please read our [CONTRIBUTING](https://github.com/grafana/grafana/blob/main/CONTRIBUTING.md) guidelines.\n\nHappy graphing!"
|
||||
},
|
||||
{
|
||||
"type": "comment",
|
||||
"name": "needsMoreInfo",
|
||||
"allowUsers": [],
|
||||
"action": "updateLabels",
|
||||
"addLabel": "bot/needs more info"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"name": "bot/needs more info",
|
||||
"action": "updateLabels",
|
||||
"addLabel": "needs more info",
|
||||
"removeLabel": "bot/needs more info",
|
||||
"comment": "Thanks for creating this issue! We think it's missing some basic information. \r\n\r\nFollow the issue template and add additional information that will help us replicate the problem. \r\nFor data visualization issues: \r\n- Query results from the inspect drawer (data tab & query inspector)\r\n- Panel settings can be extracted in the panel inspect drawer JSON tab\r\n\r\nFor dashboard related issues: \r\n- Dashboard JSON can be found in the dashboard settings JSON model view\r\n\r\nFor authentication, provisioning and alerting issues, Grafana server logs are useful. \r\n\r\nHappy graphing!"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"name": "bot/no new info",
|
||||
"action": "close",
|
||||
"comment": "We've closed this issue since it needs more information and hasn't had any activity recently. We can re-open it after you you add more information. To avoid having your issue closed in the future, please read our [CONTRIBUTING](https://github.com/grafana/grafana/blob/main/CONTRIBUTING.md) guidelines.\n\nHappy graphing!"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"name": "bot/close feature request",
|
||||
"action": "close",
|
||||
"addLabel": "not implemented",
|
||||
"comment": "This feature request has been open for a long time with few received upvotes or comments, so we are closing it. We're trying to limit open GitHub issues in order to better track planned work and features. \r\n\r\nThis doesn't mean that we'll never ever implement it or that we will never accept a PR for it. A closed issue can still attract upvotes and act as a ticket to track feature demand\/interest. \r\n\r\nThank You to you for taking the time to create this issue!"
|
||||
}
|
||||
]
|
||||
|
||||
49
.github/pr-checks.json
vendored
49
.github/pr-checks.json
vendored
@@ -1,49 +0,0 @@
|
||||
[
|
||||
{
|
||||
"type": "check-milestone",
|
||||
"title": "Milestone Check",
|
||||
"targetUrl": "https://github.com/grafana/grafana/blob/main/contribute/merge-pull-request.md#assign-a-milestone",
|
||||
"success": "Milestone set",
|
||||
"failure": "Milestone not set"
|
||||
},
|
||||
{
|
||||
"type": "check-label",
|
||||
"title": "Backport Check",
|
||||
"labels": {
|
||||
"exists": "Backport enabled",
|
||||
"notExists": "Backport decision needed",
|
||||
"matches": [
|
||||
"backport v*"
|
||||
]
|
||||
},
|
||||
"skip": {
|
||||
"message": "Backport skipped",
|
||||
"matches": [
|
||||
"backport",
|
||||
"no-backport"
|
||||
]
|
||||
},
|
||||
"targetUrl": "https://github.com/grafana/grafana/blob/main/contribute/merge-pull-request.md#should-the-pull-request-be-backported"
|
||||
},
|
||||
{
|
||||
"type": "check-changelog",
|
||||
"title": "Changelog Check",
|
||||
"labels": {
|
||||
"exists": "Changelog enabled",
|
||||
"notExists": "Changelog decision needed",
|
||||
"matches": [
|
||||
"add to changelog"
|
||||
]
|
||||
},
|
||||
"breakingChangeLabels": [
|
||||
"breaking change"
|
||||
],
|
||||
"skip": {
|
||||
"message": "Changelog skipped",
|
||||
"matches": [
|
||||
"no-changelog"
|
||||
]
|
||||
},
|
||||
"targetUrl": "https://github.com/grafana/grafana/blob/main/contribute/merge-pull-request.md#include-in-changelog-and-release-notes"
|
||||
}
|
||||
]
|
||||
20
.github/pr-commands.json
vendored
20
.github/pr-commands.json
vendored
@@ -66,6 +66,9 @@
|
||||
"packaging/**/*",
|
||||
"scripts/build/**/*",
|
||||
"scripts/*.sh",
|
||||
"scripts/*.star",
|
||||
".drone.star",
|
||||
".drone.yml",
|
||||
"Makefile",
|
||||
"Dockerfile",
|
||||
"Dockerfile.ubuntu"
|
||||
@@ -73,16 +76,6 @@
|
||||
"action": "updateLabel",
|
||||
"addLabel": "type/build-packaging"
|
||||
},
|
||||
{
|
||||
"type": "changedfiles",
|
||||
"matches": [
|
||||
"scripts/*.star",
|
||||
".drone.star",
|
||||
".drone.yml"
|
||||
],
|
||||
"action": "updateLabel",
|
||||
"addLabel": "type/ci"
|
||||
},
|
||||
{
|
||||
"type": "changedfiles",
|
||||
"matches": [ "public/app/plugins/datasource/grafana-azure-monitor-datasource/**/*", "pkg/tsdb/azuremonitor/**/*"],
|
||||
@@ -179,17 +172,10 @@
|
||||
"action": "updateLabel",
|
||||
"addLabel": "area/dashboard/templating"
|
||||
},
|
||||
{
|
||||
"type": "changedfiles",
|
||||
"matches": ["/pkg/services/ngalert/**/*", "/pkg/services/sqlstore/migrations/ualert/**/*", "/pkg/services/alerting/**/*", "/public/app/features/alerting/**/*", "/pkg/tests/api/alerting/**/*"],
|
||||
"action": "updateLabel",
|
||||
"addLabel": "area/alerting"
|
||||
},
|
||||
{
|
||||
"type": "author",
|
||||
"name": "pr/external",
|
||||
"notMemberOf": { "org": "grafana" },
|
||||
"ignoreList": ["renovate[bot]","dependabot[bot]"],
|
||||
"action": "updateLabel",
|
||||
"addLabel": "pr/external"
|
||||
}
|
||||
|
||||
58
.github/renovate.json5
vendored
58
.github/renovate.json5
vendored
@@ -1,58 +0,0 @@
|
||||
{
|
||||
"extends": [
|
||||
"config:base"
|
||||
],
|
||||
"enabledManagers": ["npm"],
|
||||
"ignoreDeps": [
|
||||
"@grafana/slate-react", // should be updated when the `slate` package is updated
|
||||
"@types/systemjs",
|
||||
"@types/d3-force", // we should bump this once we move to esm modules
|
||||
"@types/d3-interpolate", // we should bump this once we move to esm modules
|
||||
"@types/d3-scale-chromatic", // we should bump this once we move to esm modules
|
||||
"@types/grafana__slate-react", // should be updated when the `slate` package is updated
|
||||
"@types/react-icons", // jaeger-ui-components is being refactored to use @grafana/ui icons instead
|
||||
"commander", // we are planning to remove this, so no need to update it
|
||||
"d3",
|
||||
"d3-force", // we should bump this once we move to esm modules
|
||||
"d3-interpolate", // we should bump this once we move to esm modules
|
||||
"d3-scale-chromatic", // we should bump this once we move to esm modules
|
||||
"execa", // we should bump this once we move to esm modules
|
||||
"history", // we should bump this together with react-router-dom
|
||||
"@mdx-js/react", // storybook peer-depends on it's 1.x version, we should upgrade this when we upgrade storybook
|
||||
"monaco-editor", // due to us exposing this via @grafana/ui/CodeEditor's props bumping can break plugins
|
||||
"node-fetch", // we should bump this once we move to esm modules
|
||||
"react-hook-form", // due to us exposing these hooks via @grafana/ui form components bumping can break plugins
|
||||
"react-icons", // jaeger-ui-components is being refactored to use @grafana/ui icons instead
|
||||
"react-router-dom", // we should bump this together with history
|
||||
"slate",
|
||||
"slate-plain-serializer",
|
||||
"systemjs",
|
||||
"copy-webpack-plugin", // try to upgrade with newer yarn release. Not working with 3.1.1
|
||||
"ts-loader", // we should remove ts-loader and use babel-loader instead
|
||||
"ora" // we should bump this once we move to esm modules
|
||||
],
|
||||
"ignorePaths": ["packages/grafana-toolkit/package.json", "emails/**", "plugins-bundled/**", "**/mocks/**"],
|
||||
"labels": ["area/frontend", "dependencies"],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchUpdateTypes": ["patch"],
|
||||
"excludePackagePatterns": ["@storybook"],
|
||||
"extends": ["schedule:monthly"],
|
||||
"groupName": "Monthly patch updates"
|
||||
},
|
||||
{
|
||||
"matchPackagePatterns": ["@storybook"],
|
||||
"extends": ["schedule:monthly"],
|
||||
"groupName": "Storybook updates"
|
||||
}
|
||||
],
|
||||
"pin": {
|
||||
"enabled": false
|
||||
},
|
||||
"prConcurrentLimit": 10,
|
||||
"reviewers": ["team:grafana/frontend-ops"],
|
||||
"separateMajorMinor": false,
|
||||
"vulnerabilityAlerts": {
|
||||
"addLabels": ["area/security"]
|
||||
}
|
||||
}
|
||||
47
.github/stale.yml
vendored
Normal file
47
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
# General configuration
|
||||
# Label to use when marking as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Pull request specific configuration
|
||||
pulls:
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 14
|
||||
# Number of days of inactivity before a stale Issue or Pull Request is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
daysUntilClose: 30
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This pull request has been automatically marked as stale because it has not had
|
||||
activity in the last 2 weeks. It will be closed in 30 days if no further activity occurs. Please
|
||||
feel free to give a status update now, ping for review, or re-open when it's ready.
|
||||
Thank you for your contributions!
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
closeComment: >
|
||||
This pull request has been automatically closed because it has not had
|
||||
activity in the last 30 days. Please feel free to give a status update now, ping for review, or re-open when it's ready.
|
||||
Thank you for your contributions!
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 1
|
||||
|
||||
exemptLabels:
|
||||
- help wanted
|
||||
- type/bug
|
||||
- type/feature-request
|
||||
- Epic
|
||||
- no stalebot
|
||||
|
||||
# Issue specific configuration
|
||||
issues:
|
||||
limitPerRun: 1
|
||||
daysUntilStale: 100000
|
||||
daysUntilClose: 100000
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had activity in the
|
||||
last 100 days. It will be closed in the next 100 days if no activity occurs.
|
||||
Thank you for your contributions.
|
||||
closeComment: >
|
||||
This issue has been automatically closed because it has not had activity in the
|
||||
last month and a half. If this issue is still valid, please ping a maintainer and ask them to check this again.
|
||||
Thank you for your contributions.
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
|
||||
63
.github/workflows/bump-version.yml
vendored
63
.github/workflows/bump-version.yml
vendored
@@ -5,52 +5,25 @@ on:
|
||||
version:
|
||||
required: true
|
||||
default: '7.x.x'
|
||||
workflow_call:
|
||||
inputs:
|
||||
version_call:
|
||||
description: Needs to match, exactly, the name of a version
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
token:
|
||||
required: true
|
||||
metricsWriteAPIKey:
|
||||
required: true
|
||||
env:
|
||||
YARN_ENABLE_IMMUTABLE_INSTALLS: false
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
- uses: actions-ecosystem/action-regex-match@v2.0.2
|
||||
if: ${{ github.event.inputs.version != '' }}
|
||||
- uses: actions-ecosystem/action-regex-match@v2
|
||||
id: regex-match
|
||||
with:
|
||||
text: ${{ github.event.inputs.version }}
|
||||
regex: '^(\d+.\d+).\d+(?:-beta.\d+)?$'
|
||||
- uses: actions-ecosystem/action-regex-match@v2.0.2
|
||||
if: ${{ inputs.version_call != '' }}
|
||||
id: regex-match-version-call
|
||||
with:
|
||||
text: ${{ inputs.version_call }}
|
||||
regex: '^(\d+.\d+).\d+(?:-beta\d+)?$'
|
||||
|
||||
- name: Validate input version
|
||||
if: ${{ steps.regex-match.outputs.match == '' && github.event.inputs.version != '' }}
|
||||
if: ${{ steps.regex-match.outputs.match == '' }}
|
||||
run: |
|
||||
echo "The input version format is not correct, please respect:\
|
||||
major.minor.patch or major.minor.patch-beta.number format. \
|
||||
example: 7.4.3 or 7.4.3-beta.1"
|
||||
exit 1
|
||||
- name: Validate input version call
|
||||
if: ${{ inputs.version_call != '' && steps.regex-match-version-call.outputs.match == '' }}
|
||||
run: |
|
||||
echo "The input version format is not correct, please respect:\
|
||||
major.minor.patch or major.minor.patch-beta<number> format. \
|
||||
example: 7.4.3 or 7.4.3-beta1"
|
||||
exit 1
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set intermedia variables
|
||||
id: intermedia
|
||||
@@ -60,8 +33,14 @@ jobs:
|
||||
echo "::set-output name=branch_name::v${{steps.regex-match.outputs.group1}}"
|
||||
echo "::set-output name=branch_exist::$(git ls-remote --heads https://github.com/grafana/grafana.git v${{ steps.regex-match.outputs.group1 }}.x | wc -l)"
|
||||
|
||||
- name: Check input version is aligned with branch(not main)
|
||||
if: steps.intermedia.outputs.branch_exist != '0' && !contains(steps.intermedia.outputs.short_ref, steps.intermedia.outputs.branch_name)
|
||||
run: |
|
||||
echo " You need to run the workflow on branch v${{steps.regex-match.outputs.group1}}.x
|
||||
exit 1
|
||||
|
||||
- name: Check input version is aligned with branch(main)
|
||||
if: ${{ github.event.inputs.version != '' && steps.intermedia.outputs.branch_exist == '0' && !contains(steps.intermedia.outputs.short_ref, 'main') }}
|
||||
if: steps.intermedia.outputs.branch_exist == '0' && !contains(steps.intermedia.outputs.short_ref, 'main')
|
||||
run: |
|
||||
echo "When you want to deliver a new new minor version, you might want to create a new branch first \
|
||||
with naming convention v[major].[minor].x, and just run the workflow on that branch. \
|
||||
@@ -69,26 +48,18 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
ref: main
|
||||
- uses: actions/setup-node@v3.2.0
|
||||
- uses: actions/setup-node@v2.1.5
|
||||
with:
|
||||
node-version: '16'
|
||||
node-version: '14'
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Run bump version (manually invoked)
|
||||
if: ${{ github.event.inputs.version != '' }}
|
||||
- name: Run bump version
|
||||
uses: ./actions/bump-version
|
||||
with:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
metricsWriteAPIKey: ${{ secrets.GRAFANA_MISC_STATS_API_KEY }}
|
||||
- name: Run bump version (workflow invoked)
|
||||
if: ${{ inputs.version_call != '' }}
|
||||
uses: ./actions/bump-version
|
||||
with:
|
||||
version_call: ${{ inputs.version_call }}
|
||||
token: ${{ secrets.token }}
|
||||
metricsWriteAPIKey: ${{ secrets.metricsWriteAPIKey }}
|
||||
token: ${{secrets.GH_BOT_ACCESS_TOKEN}}
|
||||
metricsWriteAPIKey: ${{secrets.GRAFANA_MISC_STATS_API_KEY}}
|
||||
|
||||
39
.github/workflows/close-milestone.yml
vendored
39
.github/workflows/close-milestone.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: Close milestone
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
description: Needs to match, exactly, the name of a milestone
|
||||
workflow_call:
|
||||
inputs:
|
||||
version_call:
|
||||
description: Needs to match, exactly, the name of a milestone
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
token:
|
||||
required: true
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
ref: main
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Close milestone (manually invoked)
|
||||
if: ${{ github.event.inputs.version != '' }}
|
||||
uses: ./actions/close-milestone
|
||||
with:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
- name: Close milestone (workflow invoked)
|
||||
if: ${{ inputs.version_call != '' }}
|
||||
uses: ./actions/close-milestone
|
||||
with:
|
||||
version_call: ${{ inputs.version_call }}
|
||||
token: ${{ secrets.token }}
|
||||
@@ -1,20 +0,0 @@
|
||||
name: Cloud data sources test code coverage
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'pkg/tsdb/azuremonitor/**'
|
||||
- 'pkg/tsdb/cloudwatch/**'
|
||||
- 'pkg/tsdb/cloudmonitoring/**'
|
||||
- 'public/app/plugins/datasource/grafana-azure-monitor-datasource/**'
|
||||
- 'public/app/plugins/datasource/cloudwatch/**'
|
||||
- 'public/app/plugins/datasource/cloud-monitoring/**'
|
||||
branches-ignore:
|
||||
- dependabot/**
|
||||
- backport-*
|
||||
|
||||
jobs:
|
||||
workflow-call:
|
||||
uses: grafana/code-coverage/.github/workflows/code-coverage.yml@v0.1.6
|
||||
with:
|
||||
frontend-path-regexp: public\/app\/plugins\/datasource\/(grafana-azure-monitor-datasource|cloud-monitoring|cloudwatch)
|
||||
backend-path-regexp: pkg\/tsdb\/(azuremonitor|cloudmonitoring|cloudwatch)
|
||||
27
.github/workflows/codeql-analysis.yml
vendored
27
.github/workflows/codeql-analysis.yml
vendored
@@ -8,12 +8,9 @@ name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [main, v1.8.x, v2.0.x, v2.1.x, v2.6.x, v3.0.x, v3.1.x, v4.0.x, v4.1.x, v4.2.x, v4.3.x, v4.4.x, v4.5.x, v4.6.x, v4.7.x, v5.0.x, v5.1.x, v5.2.x, v5.3.x, v5.4.x, v6.0.x, v6.1.x, v6.2.x, v6.3.x, v6.4.x, v6.5.x, v6.6.x, v6.7.x, v7.0.x, v7.1.x, v7.2.x]
|
||||
paths-ignore:
|
||||
- '**/*.cue'
|
||||
- '**/*.json'
|
||||
- '**/*.md'
|
||||
- '**/*.txt'
|
||||
- '**/*.yml'
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '0 4 * * 6'
|
||||
|
||||
@@ -33,7 +30,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
@@ -49,5 +46,21 @@ jobs:
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
|
||||
7
.github/workflows/commands.yml
vendored
7
.github/workflows/commands.yml
vendored
@@ -1,17 +1,16 @@
|
||||
name: Run commands when issues are labeled or comments added
|
||||
on:
|
||||
issues:
|
||||
types: [labeled, unlabeled]
|
||||
types: [labeled]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
concurrency:
|
||||
group: issue-commands-${{ github.event.issue.number }}
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
|
||||
149
.github/workflows/detect-breaking-changes-build.yml
vendored
149
.github/workflows/detect-breaking-changes-build.yml
vendored
@@ -1,149 +0,0 @@
|
||||
name: Levitate / Detect breaking changes
|
||||
|
||||
on: pull_request
|
||||
|
||||
jobs:
|
||||
buildPR:
|
||||
name: Build PR
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: './pr'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
path: './pr'
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16.16.0
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn config get cacheFolder)"
|
||||
|
||||
- name: Restore yarn cache
|
||||
uses: actions/cache@v2
|
||||
id: yarn-cache
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: yarn-cache-folder-${{ hashFiles('**/yarn.lock', '.yarnrc.yml') }}
|
||||
restore-keys: |
|
||||
yarn-cache-folder-
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build packages
|
||||
run: yarn packages:build
|
||||
|
||||
- name: Zip built packages
|
||||
run: zip -r ./pr_built_packages.zip ./packages/**/dist
|
||||
|
||||
- name: Upload build output as artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: buildPr
|
||||
path: './pr/pr_built_packages.zip'
|
||||
|
||||
buildBase:
|
||||
name: Build Base
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: './base'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
path: './base'
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16.16.0
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn config get cacheFolder)"
|
||||
|
||||
- name: Restore yarn cache
|
||||
uses: actions/cache@v2
|
||||
id: yarn-cache
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: yarn-cache-folder-${{ hashFiles('**/yarn.lock', '.yarnrc.yml') }}
|
||||
restore-keys: |
|
||||
yarn-cache-folder-
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build packages
|
||||
run: yarn packages:build
|
||||
|
||||
- name: Zip built packages
|
||||
run: zip -r ./base_built_packages.zip ./packages/**/dist
|
||||
|
||||
- name: Upload build output as artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: buildBase
|
||||
path: './base/base_built_packages.zip'
|
||||
|
||||
Detect:
|
||||
name: Detect breaking changes
|
||||
runs-on: ubuntu-latest
|
||||
needs: ['buildPR', 'buildBase']
|
||||
env:
|
||||
GITHUB_STEP_NUMBER: 7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get built packages from pr
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: buildPr
|
||||
|
||||
- name: Get built packages from base
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: buildBase
|
||||
|
||||
- name: Unzip artifact from pr
|
||||
run: unzip pr_built_packages.zip -d ./pr && rm pr_built_packages.zip
|
||||
|
||||
- name: Unzip artifact from base
|
||||
run: unzip base_built_packages.zip -d ./base && rm base_built_packages.zip
|
||||
|
||||
- name: Get link for the Github Action job
|
||||
id: job
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const script = require('./.github/workflows/scripts/pr-get-job-link.js')
|
||||
await script({github, context, core})
|
||||
|
||||
- name: Detect breaking changes
|
||||
id: breaking-changes
|
||||
run: ./scripts/check-breaking-changes.sh
|
||||
env:
|
||||
FORCE_COLOR: 3
|
||||
GITHUB_JOB_LINK: ${{ steps.job.outputs.link }}
|
||||
|
||||
- name: Persisting the check output
|
||||
run: |
|
||||
mkdir -p ./levitate
|
||||
echo "{ \"exit_code\": ${{ steps.breaking-changes.outputs.is_breaking }}, \"message\": \"${{ steps.breaking-changes.outputs.message }}\", \"job_link\": \"${{ steps.job.outputs.link }}#step:${GITHUB_STEP_NUMBER}:1\", \"pr_number\": \"${{ github.event.pull_request.number }}\" }" > ./levitate/result.json
|
||||
|
||||
- name: Upload check output as artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: levitate
|
||||
path: levitate/
|
||||
|
||||
- name: Exit
|
||||
run: exit ${{ steps.breaking-changes.outputs.is_breaking }}
|
||||
shell: bash
|
||||
184
.github/workflows/detect-breaking-changes-report.yml
vendored
184
.github/workflows/detect-breaking-changes-report.yml
vendored
@@ -1,184 +0,0 @@
|
||||
name: Levitate / Report breaking changes
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Levitate / Detect breaking changes"]
|
||||
types: [completed]
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
name: Report
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ARTIFACT_FOLDER: '${{ github.workspace }}/tmp'
|
||||
ARTIFACT_NAME: 'levitate'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: 'Download artifact'
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
RUN_ID: ${{ github.event.workflow_run.id }}
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
const { owner, repo } = context.repo;
|
||||
const runId = process.env.RUN_ID;
|
||||
const artifactName = process.env.ARTIFACT_NAME;
|
||||
const artifactFolder = process.env.ARTIFACT_FOLDER;
|
||||
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner,
|
||||
repo,
|
||||
run_id: runId,
|
||||
});
|
||||
const artifact = artifacts.data.artifacts.find(a => a.name === artifactName);
|
||||
|
||||
if (!artifact) {
|
||||
throw new Error(`Could not find artifact ${ artifactName } in workflow (${ runId })`);
|
||||
}
|
||||
|
||||
const download = await github.rest.actions.downloadArtifact({
|
||||
owner,
|
||||
repo,
|
||||
artifact_id: artifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
|
||||
fs.mkdirSync(artifactFolder, { recursive: true });
|
||||
fs.writeFileSync(`${ artifactFolder }/${ artifactName }.zip`, Buffer.from(download.data));
|
||||
|
||||
- name: Unzip artifact
|
||||
run: unzip "${ARTIFACT_FOLDER}/${ARTIFACT_NAME}.zip" -d "${ARTIFACT_FOLDER}"
|
||||
|
||||
- name: Parsing levitate result
|
||||
uses: actions/github-script@v6
|
||||
id: levitate-run
|
||||
with:
|
||||
script: |
|
||||
const filePath = `${ process.env.ARTIFACT_FOLDER }/result.json`;
|
||||
const script = require('./.github/workflows/scripts/json-file-to-job-output.js');
|
||||
await script({ core, filePath });
|
||||
|
||||
- name: Check if "levitate breaking change" label exists
|
||||
id: does-label-exist
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.workflow_run.pull_requests[0].number }}
|
||||
with:
|
||||
script: |
|
||||
const { data } = await github.rest.issues.listLabelsOnIssue({
|
||||
issue_number: process.env.PR_NUMBER,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
});
|
||||
const labels = data.map(({ name }) => name);
|
||||
const doesExist = labels.includes('levitate breaking change');
|
||||
|
||||
return doesExist ? 1 : 0;
|
||||
|
||||
- name: Comment on PR
|
||||
if: ${{ steps.levitate-run.outputs.exit_code == 1 }}
|
||||
uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
number: ${{ steps.levitate-run.outputs.pr_number }}
|
||||
message: |
|
||||
⚠️ **Possible breaking changes**
|
||||
|
||||
_(Open the links below in a new tab to go to the correct steps)_
|
||||
|
||||
${{ steps.levitate-run.outputs.message }}
|
||||
|
||||
[Console output](${{ steps.levitate-run.outputs.job_link }})
|
||||
[Read our guideline](https://github.com/grafana/grafana/blob/main/contribute/breaking-changes-guide.md)
|
||||
|
||||
- name: Remove comment on PR
|
||||
if: ${{ steps.levitate-run.outputs.exit_code == 0 }}
|
||||
uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
number: ${{ steps.levitate-run.outputs.pr_number }}
|
||||
delete: true
|
||||
|
||||
# Posts a notification to Slack if a PR has a breaking change and it did not have a breaking change before
|
||||
- name: Post to Slack
|
||||
id: slack
|
||||
if: ${{ steps.levitate-run.outputs.exit_code == 1 && steps.does-label-exist.outputs.result == 0 }}
|
||||
uses: slackapi/slack-github-action@v1.18.0
|
||||
with:
|
||||
payload: |
|
||||
{
|
||||
"pr_link": "https://github.com/grafana/grafana/pull/${{ steps.levitate-run.outputs.pr_number }}",
|
||||
"pr_number": "${{ steps.levitate-run.outputs.pr_number }}",
|
||||
"job_link": "${{ steps.levitate-run.outputs.job_link }}",
|
||||
"reporting_job_link": "${{ github.event.workflow_run.html_url }}",
|
||||
"message": "${{ steps.levitate-run.outputs.message }}"
|
||||
}
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_LEVITATE_WEBHOOK_URL }}
|
||||
|
||||
- name: Add "levitate breaking change" label
|
||||
if: ${{ steps.levitate-run.outputs.exit_code == 1 && steps.does-label-exist.outputs.result == 0 }}
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.levitate-run.outputs.pr_number }}
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.addLabels({
|
||||
issue_number: process.env.PR_NUMBER,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['levitate breaking change']
|
||||
})
|
||||
|
||||
- name: Remove "levitate breaking change" label
|
||||
if: ${{ steps.levitate-run.outputs.exit_code == 0 && steps.does-label-exist.outputs.result == 1 }}
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.levitate-run.outputs.pr_number }}
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.removeLabel({
|
||||
issue_number: process.env.PR_NUMBER,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'levitate breaking change'
|
||||
})
|
||||
|
||||
# This is very weird, the actual request goes through (comes back with a 201), but does not assign the team.
|
||||
# Related issue: https://github.com/renovatebot/renovate/issues/1908
|
||||
- name: Add "grafana/plugins-platform-frontend" as a reviewer
|
||||
if: ${{ steps.levitate-run.outputs.exit_code == 1 }}
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.levitate-run.outputs.pr_number }}
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
await github.rest.pulls.requestReviewers({
|
||||
pull_number: process.env.PR_NUMBER,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
reviewers: [],
|
||||
team_reviewers: ['grafana/plugins-platform-frontend']
|
||||
});
|
||||
|
||||
- name: Remove "grafana/plugins-platform-frontend" from the list of reviewers
|
||||
if: ${{ steps.levitate-run.outputs.exit_code == 0 }}
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.levitate-run.outputs.pr_number }}
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
await github.rest.pulls.removeRequestedReviewers({
|
||||
pull_number: process.env.PR_NUMBER,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
reviewers: [],
|
||||
team_reviewers: ['grafana/plugins-platform-frontend']
|
||||
});
|
||||
|
||||
|
||||
16
.github/workflows/doc-validator.yml
vendored
16
.github/workflows/doc-validator.yml
vendored
@@ -1,16 +0,0 @@
|
||||
name: "doc-validator"
|
||||
on:
|
||||
pull_request:
|
||||
paths: ["docs/sources/**"]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
doc-validator:
|
||||
runs-on: "ubuntu-latest"
|
||||
container:
|
||||
image: "grafana/doc-validator:latest"
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: "actions/checkout@v3"
|
||||
- name: "Run doc-validator tool"
|
||||
# Ensure that the CI always passes until all errors are resolved.
|
||||
run: "doc-validator ./docs/sources || true"
|
||||
26
.github/workflows/enterprise-pr-check.yml
vendored
26
.github/workflows/enterprise-pr-check.yml
vendored
@@ -1,26 +0,0 @@
|
||||
name: Enterprise PR check
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- 'v[0-9]+.[0-9]+.x'
|
||||
jobs:
|
||||
dispatch:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
ref: main
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Repository Dispatch
|
||||
uses: ./actions/repository-dispatch
|
||||
with:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
repository: grafana/grafana-enterprise
|
||||
event_type: oss-pull-request
|
||||
client_payload:
|
||||
'{"source_branch": "${{ github.head_ref }}", "target_branch": "${{ github.base_ref }}", "pr_number": "${{ github.event.number }}"}'
|
||||
2
.github/workflows/github-release.yml
vendored
2
.github/workflows/github-release.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
|
||||
2
.github/workflows/metrics-collector.yml
vendored
2
.github/workflows/metrics-collector.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
|
||||
21
.github/workflows/milestone.yml
vendored
21
.github/workflows/milestone.yml
vendored
@@ -1,21 +0,0 @@
|
||||
name: Close Milestone
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_input:
|
||||
description: 'The version to be released please respect: major.minor.patch or major.minor.patch-beta<number> format. example: 7.4.3 or 7.4.3-beta1'
|
||||
required: true
|
||||
jobs:
|
||||
call-remove-milestone:
|
||||
uses: grafana/grafana/.github/workflows/remove-milestone.yml@main
|
||||
with:
|
||||
version_call: ${{ github.event.inputs.version_input }}
|
||||
secrets:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
call-close-milestone:
|
||||
uses: grafana/grafana/.github/workflows/close-milestone.yml@main
|
||||
with:
|
||||
version_call: ${{ github.event.inputs.version_input }}
|
||||
secrets:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
needs: call-remove-milestone
|
||||
35
.github/workflows/pr-checks.yml
vendored
35
.github/workflows/pr-checks.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: PR Checks
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
- labeled
|
||||
- unlabeled
|
||||
- edited
|
||||
issues:
|
||||
types:
|
||||
- milestoned
|
||||
- demilestoned
|
||||
concurrency:
|
||||
group: pr-checks-${{ github.event.number }}
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.draft == false
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
ref: main
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Run PR Checks
|
||||
uses: ./actions/pr-checks
|
||||
with:
|
||||
token: ${{secrets.GITHUB_TOKEN}}
|
||||
configPath: pr-checks
|
||||
29
.github/workflows/pr-codeql-analysis-go.yml
vendored
29
.github/workflows/pr-codeql-analysis-go.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: "CodeQL for PR / go"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- '**/*.go'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: "go"
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
@@ -1,31 +0,0 @@
|
||||
name: "CodeQL for PR / javascript"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- '**/*.js'
|
||||
- '**/*.ts'
|
||||
- '**/*.tsx'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: "javascript"
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
29
.github/workflows/pr-codeql-analysis-python.yml
vendored
29
.github/workflows/pr-codeql-analysis-python.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: "CodeQL for PR / python"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- '**/*.py'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: "python"
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
18
.github/workflows/pr-commands-closed.yml
vendored
18
.github/workflows/pr-commands-closed.yml
vendored
@@ -1,18 +0,0 @@
|
||||
name: Run when PRs are closed
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
concurrency:
|
||||
group: pr-commands-closed-${{ github.event.number }}
|
||||
jobs:
|
||||
close_job:
|
||||
# this job will only run if the PR has been closed without being merged
|
||||
if: github.event.pull_request.merged == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo PR #${{ github.event.number }} has been closed without being merged, removing milestone.
|
||||
gh pr edit ${{ github.event.number }} --milestone "" --repo $GITHUB_REPOSITORY
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
5
.github/workflows/pr-commands.yml
vendored
5
.github/workflows/pr-commands.yml
vendored
@@ -4,14 +4,13 @@ on:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
concurrency:
|
||||
group: pr-commands-${{ github.event.number }}
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
|
||||
23
.github/workflows/prepare-release.yml
vendored
23
.github/workflows/prepare-release.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Prepare release
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_input:
|
||||
description: 'The version to be released please respect: major.minor.patch or major.minor.patch-beta<number> format. example: 7.4.3 or 7.4.3-beta1'
|
||||
required: true
|
||||
jobs:
|
||||
call-bump-version:
|
||||
uses: grafana/grafana/.github/workflows/bump-version.yml@main
|
||||
with:
|
||||
version_call: ${{ github.event.inputs.version_input }}
|
||||
secrets:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
metricsWriteAPIKey: ${{ secrets.GRAFANA_MISC_STATS_API_KEY }}
|
||||
call-update-changelog:
|
||||
uses: grafana/grafana/.github/workflows/update-changelog.yml@main
|
||||
with:
|
||||
version_call: ${{ github.event.inputs.version_input }}
|
||||
secrets:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
metricsWriteAPIKey: ${{ secrets.GRAFANA_MISC_STATS_API_KEY }}
|
||||
needs: call-bump-version
|
||||
25
.github/workflows/publish.yml
vendored
25
.github/workflows/publish.yml
vendored
@@ -3,7 +3,7 @@ name: publish_docs
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v9.0.x
|
||||
- v8.1.x
|
||||
paths:
|
||||
- 'docs/sources/**'
|
||||
- 'packages/grafana-*/**'
|
||||
@@ -14,23 +14,18 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v1
|
||||
- run: git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.GH_BOT_ACCESS_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync
|
||||
- uses: actions/cache@v2.1.5
|
||||
with:
|
||||
path: '**/node_modules'
|
||||
key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }}
|
||||
- name: generate-packages-docs
|
||||
uses: actions/setup-node@v3.2.0
|
||||
uses: actions/setup-node@v2.1.5
|
||||
id: generate-docs
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn config get cacheFolder)"
|
||||
- uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
yarn-
|
||||
- run: yarn install --immutable
|
||||
node-version: '14'
|
||||
- run: yarn install --pure-lockfile --no-progress
|
||||
- run: ./scripts/ci-reference-docs-build.sh
|
||||
- name: publish-to-git
|
||||
uses: ./.github/actions/website-sync
|
||||
@@ -41,7 +36,7 @@ jobs:
|
||||
host: github.com
|
||||
github_pat: '${{ secrets.GH_BOT_ACCESS_TOKEN }}'
|
||||
source_folder: docs/sources
|
||||
target_folder: content/docs/grafana/v9.0
|
||||
target_folder: content/docs/grafana/latest
|
||||
allow_no_changes: 'true'
|
||||
- shell: bash
|
||||
run: |
|
||||
|
||||
39
.github/workflows/remove-milestone.yml
vendored
39
.github/workflows/remove-milestone.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: Remove milestone
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
description: Needs to match, exactly, the name of a milestone
|
||||
workflow_call:
|
||||
inputs:
|
||||
version_call:
|
||||
description: Needs to match, exactly, the name of a milestone
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
token:
|
||||
required: true
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
ref: main
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Remove milestone from open issues (manually invoked)
|
||||
if: ${{ github.event.inputs.version != '' }}
|
||||
uses: ./actions/remove-milestone
|
||||
with:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
- name: Remove milestone from open issues (workflow invoked)
|
||||
if: ${{ inputs.version_call != '' }}
|
||||
uses: ./actions/remove-milestone
|
||||
with:
|
||||
version_call: ${{ inputs.version_call }}
|
||||
token: ${{ secrets.token }}
|
||||
@@ -1,27 +0,0 @@
|
||||
module.exports = async ({ core, filePath }) => {
|
||||
try {
|
||||
const fs = require('fs');
|
||||
const content = await readFile(fs, filePath);
|
||||
const result = JSON.parse(content);
|
||||
|
||||
core.startGroup('Parsing json file...');
|
||||
|
||||
for (const property in result) {
|
||||
core.info(`${property} <- ${result[property]}`);
|
||||
core.setOutput(property, result[property]);
|
||||
}
|
||||
|
||||
core.endGroup();
|
||||
} catch (error) {
|
||||
core.restFailed(error.message);
|
||||
}
|
||||
}
|
||||
|
||||
async function readFile(fs, path) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(path, (error, data) => {
|
||||
if (error) return reject(error);
|
||||
return resolve(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
9
.github/workflows/scripts/pr-get-job-link.js
vendored
9
.github/workflows/scripts/pr-get-job-link.js
vendored
@@ -1,9 +0,0 @@
|
||||
|
||||
module.exports = async ({ github, context, core }) => {
|
||||
const { owner, repo } = context.repo;
|
||||
const url = `https://api.github.com/repos/${owner}/${repo}/actions/runs/${context.runId}/jobs`
|
||||
const result = await github.request(url)
|
||||
const link = `https://github.com/grafana/grafana/runs/${result.data.jobs[0].id}?check_suite_focus=true`;
|
||||
|
||||
core.setOutput('link', link);
|
||||
}
|
||||
33
.github/workflows/stale.yml
vendored
33
.github/workflows/stale.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: 'Close stale issues and PRs'
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
# Number of days of inactivity before a stale Issue or Pull Request is closed.
|
||||
# Set to -1 to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
days-before-close: 14
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
days-before-stale: 30
|
||||
# We don't want any Issues to be marked as stale for now.
|
||||
days-before-issue-stale: -1
|
||||
exempt-issue-labels: no stalebot
|
||||
exempt-pr-labels: no stalebot
|
||||
operations-per-run: 500
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
stale-pr-message: >
|
||||
This pull request has been automatically marked as stale because it has not had
|
||||
activity in the last 30 days. It will be closed in 2 weeks if no further activity occurs. Please
|
||||
feel free to give a status update now, ping for review, or re-open when it's ready.
|
||||
Thank you for your contributions!
|
||||
close-pr-message: >
|
||||
This pull request has been automatically closed because it has not had
|
||||
activity in the last 2 weeks. Please feel free to give a status update now, ping for review, or re-open when it's ready.
|
||||
Thank you for your contributions!
|
||||
31
.github/workflows/update-changelog.yml
vendored
31
.github/workflows/update-changelog.yml
vendored
@@ -3,41 +3,22 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
required: true
|
||||
description: Needs to match, exactly, the name of a milestone
|
||||
workflow_call:
|
||||
inputs:
|
||||
version_call:
|
||||
description: Needs to match, exactly, the name of a milestone
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
token:
|
||||
required: true
|
||||
metricsWriteAPIKey:
|
||||
required: true
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "grafana/grafana-github-actions"
|
||||
path: ./actions
|
||||
ref: main
|
||||
ref: main
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Run update changelog (manually invoked)
|
||||
if: ${{ github.event.inputs.version != '' }}
|
||||
- name: Run update changelog
|
||||
uses: ./actions/update-changelog
|
||||
with:
|
||||
token: ${{ secrets.GH_BOT_ACCESS_TOKEN }}
|
||||
metricsWriteAPIKey: ${{ secrets.GRAFANA_MISC_STATS_API_KEY }}
|
||||
- name: Run update changelog (workflow invoked)
|
||||
if: ${{ inputs.version_call != '' }}
|
||||
uses: ./actions/update-changelog
|
||||
with:
|
||||
version_call: ${{ inputs.version_call }}
|
||||
token: ${{ secrets.token }}
|
||||
metricsWriteAPIKey: ${{ secrets.metricsWriteAPIKey }}
|
||||
token: ${{secrets.GH_BOT_ACCESS_TOKEN}}
|
||||
metricsWriteAPIKey: ${{secrets.GRAFANA_MISC_STATS_API_KEY}}
|
||||
|
||||
43
.gitignore
vendored
43
.gitignore
vendored
@@ -12,23 +12,11 @@ awsconfig
|
||||
/emails/dist
|
||||
/reports
|
||||
/e2e/tmp
|
||||
/scripts/grafana-server/tmp
|
||||
.yarnrc
|
||||
.yarn/
|
||||
vendor/
|
||||
/docs/menu.yaml
|
||||
/requests
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
# Yarn
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/releases
|
||||
!.yarn/plugins
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
# we temporarily commit this file because yarn downloading it
|
||||
# somehow produces different checksum values
|
||||
!.yarn/cache/pa11y-ci-https-1e9675e9e1-668c9119bd.zip
|
||||
.pnp.*
|
||||
|
||||
# Enterprise emails
|
||||
/emails/templates/enterprise_*
|
||||
@@ -39,7 +27,6 @@ tsconfig.tsbuildinfo
|
||||
|
||||
# Enterprise devenv
|
||||
/devenv/docker/blocks/grafana-enterprise
|
||||
/devenv/docker/blocks/saml-enterprise
|
||||
|
||||
/tmp
|
||||
tools/phantomjs/phantomjs
|
||||
@@ -66,7 +53,6 @@ public/css/*.min.css
|
||||
!.vscode/launch.json
|
||||
.vs/
|
||||
.eslintcache
|
||||
.stylelintcache
|
||||
|
||||
/data/*
|
||||
/bin/*
|
||||
@@ -89,8 +75,6 @@ profile.cov
|
||||
/pkg/cmd/grafana-server/grafana-server
|
||||
/pkg/cmd/grafana-server/debug
|
||||
/pkg/extensions/*
|
||||
/pkg/server/wireexts_enterprise.go
|
||||
/pkg/cmd/grafana-cli/runner/wireexts_enterprise.go
|
||||
!/pkg/extensions/main.go
|
||||
/public/app/extensions
|
||||
debug.test
|
||||
@@ -98,7 +82,6 @@ debug.test
|
||||
/packaging/**/*.rpm
|
||||
/packaging/**/*.deb
|
||||
/packaging/**/*.tar.gz
|
||||
/packaging/**/*.tar.gz.sha256
|
||||
|
||||
# Ignore OSX indexing
|
||||
.DS_Store
|
||||
@@ -138,31 +121,9 @@ compilation-stats.json
|
||||
/e2e/**/screenshots
|
||||
!/e2e/**/screenshots/expected/*
|
||||
/e2e/**/videos/*
|
||||
/e2e/benchmarks/**/results/*
|
||||
/e2e/benchmarks/**/results
|
||||
/e2e/build_results.zip
|
||||
|
||||
# grafana server
|
||||
/scripts/grafana-server/server.log
|
||||
|
||||
# a11y tests
|
||||
/pa11y-ci-results.json
|
||||
/pa11y-ci-report
|
||||
|
||||
# report dumping the whole system env
|
||||
/report.*.json
|
||||
|
||||
# auto generated frontend docs
|
||||
/docs/sources/packages_api
|
||||
|
||||
# auto generated Go files
|
||||
*_gen.go
|
||||
!pkg/services/featuremgmt/toggles_gen.go
|
||||
|
||||
# Auto-generated localisation files
|
||||
public/locales/_build/
|
||||
public/locales/**/*.js
|
||||
|
||||
deployment_tools_config.json
|
||||
|
||||
.betterer.cache
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Ignore husky hooks if no frontend code has been changed
|
||||
git diff --cached --name-only | grep -v --quiet "^pkg/" || exit 0
|
||||
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
yarn run precommit
|
||||
32
.linguirc
32
.linguirc
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"locales": [
|
||||
"en",
|
||||
"fr",
|
||||
"es",
|
||||
"pseudo-LOCALE"
|
||||
],
|
||||
"catalogs": [
|
||||
{
|
||||
"path": "public/locales/{locale}/messages",
|
||||
"include": [
|
||||
"public/app"
|
||||
],
|
||||
"exclude": [
|
||||
"**/*.d.ts",
|
||||
"**/*.test.ts",
|
||||
"**/node_modules/**",
|
||||
"public/app/plugins"
|
||||
]
|
||||
}
|
||||
],
|
||||
"fallbackLocales": {
|
||||
"pseudo-LOCALE": "en",
|
||||
"default": "en"
|
||||
},
|
||||
"pseudoLocale": "pseudo-LOCALE",
|
||||
"sourceLocale": "en",
|
||||
"format": "po",
|
||||
"formatOptions": {
|
||||
"lineNumbers": false
|
||||
}
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
var config = {
|
||||
defaults: {
|
||||
concurrency: 1,
|
||||
runners: ['axe'],
|
||||
useIncognitoBrowserContext: false,
|
||||
chromeLaunchConfig: {
|
||||
args: ['--no-sandbox'],
|
||||
},
|
||||
// see https://github.com/grafana/grafana/pull/41693#issuecomment-979921463 for context
|
||||
// on why we're ignoring singleValue/react-select-*-placeholder elements
|
||||
hideElements: '#updateVersion, [class*="-singleValue"], [id^="react-select-"][id$="-placeholder"]',
|
||||
},
|
||||
|
||||
urls: [
|
||||
{
|
||||
url: '${HOST}/login',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 12,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/login',
|
||||
wait: 500,
|
||||
actions: [
|
||||
"wait for element input[name='user'] to be added",
|
||||
"set field input[name='user'] to admin",
|
||||
"set field input[name='password'] to admin",
|
||||
"click element button[aria-label='Login button']",
|
||||
"wait for element [aria-label='Skip change password button'] to be visible",
|
||||
],
|
||||
threshold: 13,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/?orgId=1',
|
||||
wait: 500,
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/d/O6f11TZWk/panel-tests-bar-gauge',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/d/O6f11TZWk/panel-tests-bar-gauge?orgId=1&editview=settings',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/?orgId=1&search=open',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/alerting/list',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
// the unified alerting promotion alert's content contrast is too low
|
||||
// see https://github.com/grafana/grafana/pull/41829
|
||||
threshold: 5,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/datasources',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org/users',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org/teams',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/plugins',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org/apikeys',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/dashboards',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
threshold: 0,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
function myPa11yCiConfiguration(urls, defaults) {
|
||||
const HOST_SERVER = process.env.HOST || 'localhost';
|
||||
const PORT_SERVER = process.env.PORT || '3001';
|
||||
for (var idx = 0; idx < urls.length; idx++) {
|
||||
urls[idx] = { ...urls[idx], url: urls[idx].url.replace('${HOST}', `${HOST_SERVER}:${PORT_SERVER}`) };
|
||||
}
|
||||
|
||||
return {
|
||||
defaults: defaults,
|
||||
urls: urls,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = myPa11yCiConfiguration(config.urls, config.defaults);
|
||||
106
.pa11yci.conf.js
106
.pa11yci.conf.js
@@ -1,106 +0,0 @@
|
||||
var config = {
|
||||
defaults: {
|
||||
concurrency: 1,
|
||||
runners: ['axe'],
|
||||
useIncognitoBrowserContext: false,
|
||||
chromeLaunchConfig: {
|
||||
args: ['--no-sandbox'],
|
||||
},
|
||||
// see https://github.com/grafana/grafana/pull/41693#issuecomment-979921463 for context
|
||||
// on why we're ignoring singleValue/react-select-*-placeholder elements
|
||||
hideElements: '#updateVersion, [class*="-singleValue"], [id^="react-select-"][id$="-placeholder"]',
|
||||
},
|
||||
|
||||
urls: [
|
||||
{
|
||||
url: '${HOST}/login',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/login', //skip password and login
|
||||
actions: [
|
||||
"wait for element input[name='user'] to be added",
|
||||
"set field input[name='user'] to admin",
|
||||
"set field input[name='password'] to admin",
|
||||
"click element button[aria-label='Login button']",
|
||||
"wait for element [aria-label='Skip change password button'] to be visible",
|
||||
],
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/?orgId=1',
|
||||
wait: 500,
|
||||
},
|
||||
{
|
||||
url: '${HOST}/d/O6f11TZWk/panel-tests-bar-gauge',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/d/O6f11TZWk/panel-tests-bar-gauge?orgId=1&editview=settings',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/?orgId=1&search=open',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/alerting/list',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/datasources',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org/users',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org/teams',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/plugins',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/org/apikeys',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
{
|
||||
url: '${HOST}/dashboards',
|
||||
wait: 500,
|
||||
rootElement: '.main-view',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
function myPa11yCiConfiguration(urls, defaults) {
|
||||
const HOST_SERVER = process.env.HOST || 'localhost';
|
||||
const PORT_SERVER = process.env.PORT || '3001';
|
||||
for (var idx = 0; idx < urls.length; idx++) {
|
||||
urls[idx] = { ...urls[idx], url: urls[idx].url.replace('${HOST}', `${HOST_SERVER}:${PORT_SERVER}`) };
|
||||
}
|
||||
return {
|
||||
defaults: defaults,
|
||||
urls: urls,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = myPa11yCiConfiguration(config.urls, config.defaults);
|
||||
@@ -1,27 +1,13 @@
|
||||
.git
|
||||
.github
|
||||
.yarn
|
||||
build
|
||||
compiled
|
||||
data
|
||||
deployment_tools_config.json
|
||||
devenv
|
||||
dist
|
||||
e2e/tmp
|
||||
dist/
|
||||
pkg/
|
||||
node_modules
|
||||
pkg
|
||||
public/lib/monaco
|
||||
public/vendor/
|
||||
vendor/
|
||||
data/
|
||||
e2e/tmp
|
||||
public/build/
|
||||
public/sass/*.generated.scss
|
||||
scripts/grafana-server/tmp
|
||||
vendor
|
||||
|
||||
# TS generate from cue by cuetsy
|
||||
**/*.gen.ts
|
||||
|
||||
# Auto-generated localisation files
|
||||
public/locales/_build/
|
||||
public/locales/**/*.js
|
||||
|
||||
# Auto-generated theme files
|
||||
theme.light.generated.json
|
||||
theme.dark.generated.json
|
||||
devenv/
|
||||
public/lib/monaco
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"eslint.packageManager": "yarn",
|
||||
"eslint.nodePath": ".yarn/sdks",
|
||||
"workspace.workspaceFolderCheckCwd": false,
|
||||
"tsserver.tsdk": ".yarn/sdks/typescript/lib"
|
||||
}
|
||||
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@@ -8,18 +8,7 @@
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/pkg/cmd/grafana-server/",
|
||||
"env": {},
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": ["--homepath", "${workspaceFolder}", "--packaging", "dev"]
|
||||
},
|
||||
{
|
||||
"name": "Debug Jest test",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"runtimeExecutable": "yarn",
|
||||
"runtimeArgs": ["run", "jest", "--runInBand", "${file}"],
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
"port": 9229
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
546
.yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
vendored
546
.yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
vendored
File diff suppressed because one or more lines are too long
33
.yarn/plugins/@yarnpkg/plugin-outdated.cjs
vendored
33
.yarn/plugins/@yarnpkg/plugin-outdated.cjs
vendored
File diff suppressed because one or more lines are too long
9
.yarn/plugins/@yarnpkg/plugin-typescript.cjs
vendored
9
.yarn/plugins/@yarnpkg/plugin-typescript.cjs
vendored
File diff suppressed because one or more lines are too long
786
.yarn/releases/yarn-3.2.1.cjs
vendored
786
.yarn/releases/yarn-3.2.1.cjs
vendored
File diff suppressed because one or more lines are too long
20
.yarn/sdks/eslint/bin/eslint.js
vendored
20
.yarn/sdks/eslint/bin/eslint.js
vendored
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require eslint/bin/eslint.js
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real eslint/bin/eslint.js your application uses
|
||||
module.exports = absRequire(`eslint/bin/eslint.js`);
|
||||
20
.yarn/sdks/eslint/lib/api.js
vendored
20
.yarn/sdks/eslint/lib/api.js
vendored
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require eslint
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real eslint your application uses
|
||||
module.exports = absRequire(`eslint`);
|
||||
6
.yarn/sdks/eslint/package.json
vendored
6
.yarn/sdks/eslint/package.json
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "eslint",
|
||||
"version": "8.11.0-sdk",
|
||||
"main": "./lib/api.js",
|
||||
"type": "commonjs"
|
||||
}
|
||||
6
.yarn/sdks/integrations.yml
vendored
6
.yarn/sdks/integrations.yml
vendored
@@ -1,6 +0,0 @@
|
||||
# This file is automatically generated by @yarnpkg/sdks.
|
||||
# Manual changes might be lost!
|
||||
|
||||
integrations:
|
||||
- vscode
|
||||
- vim
|
||||
20
.yarn/sdks/prettier/index.js
vendored
20
.yarn/sdks/prettier/index.js
vendored
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require prettier/index.js
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real prettier/index.js your application uses
|
||||
module.exports = absRequire(`prettier/index.js`);
|
||||
6
.yarn/sdks/prettier/package.json
vendored
6
.yarn/sdks/prettier/package.json
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "prettier",
|
||||
"version": "2.6.0-sdk",
|
||||
"main": "./index.js",
|
||||
"type": "commonjs"
|
||||
}
|
||||
20
.yarn/sdks/typescript/bin/tsc
vendored
20
.yarn/sdks/typescript/bin/tsc
vendored
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require typescript/bin/tsc
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real typescript/bin/tsc your application uses
|
||||
module.exports = absRequire(`typescript/bin/tsc`);
|
||||
20
.yarn/sdks/typescript/bin/tsserver
vendored
20
.yarn/sdks/typescript/bin/tsserver
vendored
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require typescript/bin/tsserver
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real typescript/bin/tsserver your application uses
|
||||
module.exports = absRequire(`typescript/bin/tsserver`);
|
||||
20
.yarn/sdks/typescript/lib/tsc.js
vendored
20
.yarn/sdks/typescript/lib/tsc.js
vendored
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require typescript/lib/tsc.js
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real typescript/lib/tsc.js your application uses
|
||||
module.exports = absRequire(`typescript/lib/tsc.js`);
|
||||
208
.yarn/sdks/typescript/lib/tsserver.js
vendored
208
.yarn/sdks/typescript/lib/tsserver.js
vendored
@@ -1,208 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
const moduleWrapper = tsserver => {
|
||||
if (!process.versions.pnp) {
|
||||
return tsserver;
|
||||
}
|
||||
|
||||
const {isAbsolute} = require(`path`);
|
||||
const pnpApi = require(`pnpapi`);
|
||||
|
||||
const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//);
|
||||
const isPortal = str => str.startsWith("portal:/");
|
||||
const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`);
|
||||
|
||||
const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => {
|
||||
return `${locator.name}@${locator.reference}`;
|
||||
}));
|
||||
|
||||
// VSCode sends the zip paths to TS using the "zip://" prefix, that TS
|
||||
// doesn't understand. This layer makes sure to remove the protocol
|
||||
// before forwarding it to TS, and to add it back on all returned paths.
|
||||
|
||||
function toEditorPath(str) {
|
||||
// We add the `zip:` prefix to both `.zip/` paths and virtual paths
|
||||
if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) {
|
||||
// We also take the opportunity to turn virtual paths into physical ones;
|
||||
// this makes it much easier to work with workspaces that list peer
|
||||
// dependencies, since otherwise Ctrl+Click would bring us to the virtual
|
||||
// file instances instead of the real ones.
|
||||
//
|
||||
// We only do this to modules owned by the the dependency tree roots.
|
||||
// This avoids breaking the resolution when jumping inside a vendor
|
||||
// with peer dep (otherwise jumping into react-dom would show resolution
|
||||
// errors on react).
|
||||
//
|
||||
const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str;
|
||||
if (resolved) {
|
||||
const locator = pnpApi.findPackageLocator(resolved);
|
||||
if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) {
|
||||
str = resolved;
|
||||
}
|
||||
}
|
||||
|
||||
str = normalize(str);
|
||||
|
||||
if (str.match(/\.zip\//)) {
|
||||
switch (hostInfo) {
|
||||
// Absolute VSCode `Uri.fsPath`s need to start with a slash.
|
||||
// VSCode only adds it automatically for supported schemes,
|
||||
// so we have to do it manually for the `zip` scheme.
|
||||
// The path needs to start with a caret otherwise VSCode doesn't handle the protocol
|
||||
//
|
||||
// Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910
|
||||
//
|
||||
// Update 2021-10-08: VSCode changed their format in 1.61.
|
||||
// Before | ^zip:/c:/foo/bar.zip/package.json
|
||||
// After | ^/zip//c:/foo/bar.zip/package.json
|
||||
//
|
||||
// Update 2022-04-06: VSCode changed the format in 1.66.
|
||||
// Before | ^/zip//c:/foo/bar.zip/package.json
|
||||
// After | ^/zip/c:/foo/bar.zip/package.json
|
||||
//
|
||||
case `vscode <1.61`: {
|
||||
str = `^zip:${str}`;
|
||||
} break;
|
||||
|
||||
case `vscode <1.66`: {
|
||||
str = `^/zip/${str}`;
|
||||
} break;
|
||||
|
||||
case `vscode`: {
|
||||
str = `^/zip${str}`;
|
||||
} break;
|
||||
|
||||
// To make "go to definition" work,
|
||||
// We have to resolve the actual file system path from virtual path
|
||||
// and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip)
|
||||
case `coc-nvim`: {
|
||||
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
|
||||
str = resolve(`zipfile:${str}`);
|
||||
} break;
|
||||
|
||||
// Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server)
|
||||
// We have to resolve the actual file system path from virtual path,
|
||||
// everything else is up to neovim
|
||||
case `neovim`: {
|
||||
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
|
||||
str = `zipfile://${str}`;
|
||||
} break;
|
||||
|
||||
default: {
|
||||
str = `zip:${str}`;
|
||||
} break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
function fromEditorPath(str) {
|
||||
switch (hostInfo) {
|
||||
case `coc-nvim`: {
|
||||
str = str.replace(/\.zip::/, `.zip/`);
|
||||
// The path for coc-nvim is in format of /<pwd>/zipfile:/<pwd>/.yarn/...
|
||||
// So in order to convert it back, we use .* to match all the thing
|
||||
// before `zipfile:`
|
||||
return process.platform === `win32`
|
||||
? str.replace(/^.*zipfile:\//, ``)
|
||||
: str.replace(/^.*zipfile:/, ``);
|
||||
} break;
|
||||
|
||||
case `neovim`: {
|
||||
str = str.replace(/\.zip::/, `.zip/`);
|
||||
// The path for neovim is in format of zipfile:///<pwd>/.yarn/...
|
||||
return str.replace(/^zipfile:\/\//, ``);
|
||||
} break;
|
||||
|
||||
case `vscode`:
|
||||
default: {
|
||||
return process.platform === `win32`
|
||||
? str.replace(/^\^?(zip:|\/zip)\/+/, ``)
|
||||
: str.replace(/^\^?(zip:|\/zip)\/+/, `/`);
|
||||
} break;
|
||||
}
|
||||
}
|
||||
|
||||
// Force enable 'allowLocalPluginLoads'
|
||||
// TypeScript tries to resolve plugins using a path relative to itself
|
||||
// which doesn't work when using the global cache
|
||||
// https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238
|
||||
// VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but
|
||||
// TypeScript already does local loads and if this code is running the user trusts the workspace
|
||||
// https://github.com/microsoft/vscode/issues/45856
|
||||
const ConfiguredProject = tsserver.server.ConfiguredProject;
|
||||
const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype;
|
||||
ConfiguredProject.prototype.enablePluginsWithOptions = function() {
|
||||
this.projectService.allowLocalPluginLoads = true;
|
||||
return originalEnablePluginsWithOptions.apply(this, arguments);
|
||||
};
|
||||
|
||||
// And here is the point where we hijack the VSCode <-> TS communications
|
||||
// by adding ourselves in the middle. We locate everything that looks
|
||||
// like an absolute path of ours and normalize it.
|
||||
|
||||
const Session = tsserver.server.Session;
|
||||
const {onMessage: originalOnMessage, send: originalSend} = Session.prototype;
|
||||
let hostInfo = `unknown`;
|
||||
|
||||
Object.assign(Session.prototype, {
|
||||
onMessage(/** @type {string | object} */ message) {
|
||||
const isStringMessage = typeof message === 'string';
|
||||
const parsedMessage = isStringMessage ? JSON.parse(message) : message;
|
||||
|
||||
if (
|
||||
parsedMessage != null &&
|
||||
typeof parsedMessage === `object` &&
|
||||
parsedMessage.arguments &&
|
||||
typeof parsedMessage.arguments.hostInfo === `string`
|
||||
) {
|
||||
hostInfo = parsedMessage.arguments.hostInfo;
|
||||
if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) {
|
||||
if (/(\/|-)1\.([1-5][0-9]|60)\./.test(process.env.VSCODE_IPC_HOOK)) {
|
||||
hostInfo += ` <1.61`;
|
||||
} else if (/(\/|-)1\.(6[1-5])\./.test(process.env.VSCODE_IPC_HOOK)) {
|
||||
hostInfo += ` <1.66`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => {
|
||||
return typeof value === 'string' ? fromEditorPath(value) : value;
|
||||
});
|
||||
|
||||
return originalOnMessage.call(
|
||||
this,
|
||||
isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON)
|
||||
);
|
||||
},
|
||||
|
||||
send(/** @type {any} */ msg) {
|
||||
return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => {
|
||||
return typeof value === `string` ? toEditorPath(value) : value;
|
||||
})));
|
||||
}
|
||||
});
|
||||
|
||||
return tsserver;
|
||||
};
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require typescript/lib/tsserver.js
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real typescript/lib/tsserver.js your application uses
|
||||
module.exports = moduleWrapper(absRequire(`typescript/lib/tsserver.js`));
|
||||
208
.yarn/sdks/typescript/lib/tsserverlibrary.js
vendored
208
.yarn/sdks/typescript/lib/tsserverlibrary.js
vendored
@@ -1,208 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
const moduleWrapper = tsserver => {
|
||||
if (!process.versions.pnp) {
|
||||
return tsserver;
|
||||
}
|
||||
|
||||
const {isAbsolute} = require(`path`);
|
||||
const pnpApi = require(`pnpapi`);
|
||||
|
||||
const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//);
|
||||
const isPortal = str => str.startsWith("portal:/");
|
||||
const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`);
|
||||
|
||||
const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => {
|
||||
return `${locator.name}@${locator.reference}`;
|
||||
}));
|
||||
|
||||
// VSCode sends the zip paths to TS using the "zip://" prefix, that TS
|
||||
// doesn't understand. This layer makes sure to remove the protocol
|
||||
// before forwarding it to TS, and to add it back on all returned paths.
|
||||
|
||||
function toEditorPath(str) {
|
||||
// We add the `zip:` prefix to both `.zip/` paths and virtual paths
|
||||
if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) {
|
||||
// We also take the opportunity to turn virtual paths into physical ones;
|
||||
// this makes it much easier to work with workspaces that list peer
|
||||
// dependencies, since otherwise Ctrl+Click would bring us to the virtual
|
||||
// file instances instead of the real ones.
|
||||
//
|
||||
// We only do this to modules owned by the the dependency tree roots.
|
||||
// This avoids breaking the resolution when jumping inside a vendor
|
||||
// with peer dep (otherwise jumping into react-dom would show resolution
|
||||
// errors on react).
|
||||
//
|
||||
const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str;
|
||||
if (resolved) {
|
||||
const locator = pnpApi.findPackageLocator(resolved);
|
||||
if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) {
|
||||
str = resolved;
|
||||
}
|
||||
}
|
||||
|
||||
str = normalize(str);
|
||||
|
||||
if (str.match(/\.zip\//)) {
|
||||
switch (hostInfo) {
|
||||
// Absolute VSCode `Uri.fsPath`s need to start with a slash.
|
||||
// VSCode only adds it automatically for supported schemes,
|
||||
// so we have to do it manually for the `zip` scheme.
|
||||
// The path needs to start with a caret otherwise VSCode doesn't handle the protocol
|
||||
//
|
||||
// Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910
|
||||
//
|
||||
// Update 2021-10-08: VSCode changed their format in 1.61.
|
||||
// Before | ^zip:/c:/foo/bar.zip/package.json
|
||||
// After | ^/zip//c:/foo/bar.zip/package.json
|
||||
//
|
||||
// Update 2022-04-06: VSCode changed the format in 1.66.
|
||||
// Before | ^/zip//c:/foo/bar.zip/package.json
|
||||
// After | ^/zip/c:/foo/bar.zip/package.json
|
||||
//
|
||||
case `vscode <1.61`: {
|
||||
str = `^zip:${str}`;
|
||||
} break;
|
||||
|
||||
case `vscode <1.66`: {
|
||||
str = `^/zip/${str}`;
|
||||
} break;
|
||||
|
||||
case `vscode`: {
|
||||
str = `^/zip${str}`;
|
||||
} break;
|
||||
|
||||
// To make "go to definition" work,
|
||||
// We have to resolve the actual file system path from virtual path
|
||||
// and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip)
|
||||
case `coc-nvim`: {
|
||||
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
|
||||
str = resolve(`zipfile:${str}`);
|
||||
} break;
|
||||
|
||||
// Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server)
|
||||
// We have to resolve the actual file system path from virtual path,
|
||||
// everything else is up to neovim
|
||||
case `neovim`: {
|
||||
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
|
||||
str = `zipfile://${str}`;
|
||||
} break;
|
||||
|
||||
default: {
|
||||
str = `zip:${str}`;
|
||||
} break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
function fromEditorPath(str) {
|
||||
switch (hostInfo) {
|
||||
case `coc-nvim`: {
|
||||
str = str.replace(/\.zip::/, `.zip/`);
|
||||
// The path for coc-nvim is in format of /<pwd>/zipfile:/<pwd>/.yarn/...
|
||||
// So in order to convert it back, we use .* to match all the thing
|
||||
// before `zipfile:`
|
||||
return process.platform === `win32`
|
||||
? str.replace(/^.*zipfile:\//, ``)
|
||||
: str.replace(/^.*zipfile:/, ``);
|
||||
} break;
|
||||
|
||||
case `neovim`: {
|
||||
str = str.replace(/\.zip::/, `.zip/`);
|
||||
// The path for neovim is in format of zipfile:///<pwd>/.yarn/...
|
||||
return str.replace(/^zipfile:\/\//, ``);
|
||||
} break;
|
||||
|
||||
case `vscode`:
|
||||
default: {
|
||||
return process.platform === `win32`
|
||||
? str.replace(/^\^?(zip:|\/zip)\/+/, ``)
|
||||
: str.replace(/^\^?(zip:|\/zip)\/+/, `/`);
|
||||
} break;
|
||||
}
|
||||
}
|
||||
|
||||
// Force enable 'allowLocalPluginLoads'
|
||||
// TypeScript tries to resolve plugins using a path relative to itself
|
||||
// which doesn't work when using the global cache
|
||||
// https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238
|
||||
// VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but
|
||||
// TypeScript already does local loads and if this code is running the user trusts the workspace
|
||||
// https://github.com/microsoft/vscode/issues/45856
|
||||
const ConfiguredProject = tsserver.server.ConfiguredProject;
|
||||
const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype;
|
||||
ConfiguredProject.prototype.enablePluginsWithOptions = function() {
|
||||
this.projectService.allowLocalPluginLoads = true;
|
||||
return originalEnablePluginsWithOptions.apply(this, arguments);
|
||||
};
|
||||
|
||||
// And here is the point where we hijack the VSCode <-> TS communications
|
||||
// by adding ourselves in the middle. We locate everything that looks
|
||||
// like an absolute path of ours and normalize it.
|
||||
|
||||
const Session = tsserver.server.Session;
|
||||
const {onMessage: originalOnMessage, send: originalSend} = Session.prototype;
|
||||
let hostInfo = `unknown`;
|
||||
|
||||
Object.assign(Session.prototype, {
|
||||
onMessage(/** @type {string | object} */ message) {
|
||||
const isStringMessage = typeof message === 'string';
|
||||
const parsedMessage = isStringMessage ? JSON.parse(message) : message;
|
||||
|
||||
if (
|
||||
parsedMessage != null &&
|
||||
typeof parsedMessage === `object` &&
|
||||
parsedMessage.arguments &&
|
||||
typeof parsedMessage.arguments.hostInfo === `string`
|
||||
) {
|
||||
hostInfo = parsedMessage.arguments.hostInfo;
|
||||
if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) {
|
||||
if (/(\/|-)1\.([1-5][0-9]|60)\./.test(process.env.VSCODE_IPC_HOOK)) {
|
||||
hostInfo += ` <1.61`;
|
||||
} else if (/(\/|-)1\.(6[1-5])\./.test(process.env.VSCODE_IPC_HOOK)) {
|
||||
hostInfo += ` <1.66`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => {
|
||||
return typeof value === 'string' ? fromEditorPath(value) : value;
|
||||
});
|
||||
|
||||
return originalOnMessage.call(
|
||||
this,
|
||||
isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON)
|
||||
);
|
||||
},
|
||||
|
||||
send(/** @type {any} */ msg) {
|
||||
return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => {
|
||||
return typeof value === `string` ? toEditorPath(value) : value;
|
||||
})));
|
||||
}
|
||||
});
|
||||
|
||||
return tsserver;
|
||||
};
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require typescript/lib/tsserverlibrary.js
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real typescript/lib/tsserverlibrary.js your application uses
|
||||
module.exports = moduleWrapper(absRequire(`typescript/lib/tsserverlibrary.js`));
|
||||
20
.yarn/sdks/typescript/lib/typescript.js
vendored
20
.yarn/sdks/typescript/lib/typescript.js
vendored
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const {existsSync} = require(`fs`);
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.cjs";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
if (existsSync(absPnpApiPath)) {
|
||||
if (!process.versions.pnp) {
|
||||
// Setup the environment to be able to require typescript/lib/typescript.js
|
||||
require(absPnpApiPath).setup();
|
||||
}
|
||||
}
|
||||
|
||||
// Defer to the real typescript/lib/typescript.js your application uses
|
||||
module.exports = absRequire(`typescript/lib/typescript.js`);
|
||||
6
.yarn/sdks/typescript/package.json
vendored
6
.yarn/sdks/typescript/package.json
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"name": "typescript",
|
||||
"version": "4.6.4-sdk",
|
||||
"main": "./lib/typescript.js",
|
||||
"type": "commonjs"
|
||||
}
|
||||
81
.yarnrc.yml
81
.yarnrc.yml
@@ -1,81 +0,0 @@
|
||||
enableTelemetry: false
|
||||
|
||||
nodeLinker: pnp
|
||||
|
||||
packageExtensions:
|
||||
"@grafana/slate-react@0.22.10-grafana":
|
||||
peerDependencies:
|
||||
slate-react: ">=0.22.0"
|
||||
"@mdx-js/loader@1.6.22":
|
||||
peerDependencies:
|
||||
react: 17.0.1
|
||||
"@storybook/addon-docs@6.4.21":
|
||||
peerDependencies:
|
||||
"@storybook/manager-webpack5": 6.4.21
|
||||
"@storybook/addon-essentials@6.4.21":
|
||||
peerDependencies:
|
||||
"@storybook/components": 6.4.21
|
||||
"@storybook/core-events": 6.4.21
|
||||
"@storybook/manager-webpack5": 6.4.21
|
||||
"@storybook/theming": 6.4.21
|
||||
"@storybook/core-server@6.4.21":
|
||||
peerDependencies:
|
||||
"@babel/core": ^7.0.0
|
||||
"@storybook/core@6.4.21":
|
||||
peerDependencies:
|
||||
"@babel/core": ^7.0.0
|
||||
"@storybook/manager-webpack5": 6.4.21
|
||||
"@storybook/csf-tools@6.4.21":
|
||||
peerDependencies:
|
||||
"@babel/core": ^7.0.0
|
||||
"@storybook/react@6.4.21":
|
||||
peerDependencies:
|
||||
"@storybook/manager-webpack5": 6.4.21
|
||||
doctrine@3.0.0:
|
||||
dependencies:
|
||||
assert: 2.0.0
|
||||
moveable@0.29.8:
|
||||
dependencies:
|
||||
"@daybrush/utils": 1.6.0
|
||||
framework-utils: ^1.1.0
|
||||
rc-time-picker@3.7.3:
|
||||
peerDependencies:
|
||||
react: 17.0.1
|
||||
react-dom: 17.0.1
|
||||
rc-trigger@2.6.5:
|
||||
peerDependencies:
|
||||
react: 17.0.1
|
||||
react-dom: 17.0.1
|
||||
react-compat-css-styled@1.0.8:
|
||||
dependencies:
|
||||
react-simple-compat: 1.2.1
|
||||
react-compat-moveable@0.17.8:
|
||||
dependencies:
|
||||
"@egjs/agent": ^2.2.1
|
||||
"@egjs/children-differ": ^1.0.1
|
||||
"@scena/matrix": 1.1.1
|
||||
css-to-mat: ^1.0.3
|
||||
gesto: ^1.7.0
|
||||
overlap-area: ^1.0.0
|
||||
react-simple-compat: 1.2.1
|
||||
peerDependencies:
|
||||
framework-utils: ^1.1.0
|
||||
react-docgen-typescript-loader@3.7.2:
|
||||
peerDependencies:
|
||||
webpack: 4.41.5
|
||||
react-icons@2.2.7:
|
||||
peerDependencies:
|
||||
prop-types: "*"
|
||||
react-resizable@3.0.4:
|
||||
peerDependencies:
|
||||
react-dom: 17.0.1
|
||||
|
||||
plugins:
|
||||
- path: .yarn/plugins/@yarnpkg/plugin-typescript.cjs
|
||||
spec: "@yarnpkg/plugin-typescript"
|
||||
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
|
||||
spec: "@yarnpkg/plugin-interactive-tools"
|
||||
- path: .yarn/plugins/@yarnpkg/plugin-outdated.cjs
|
||||
spec: "https://mskelton.dev/yarn-outdated/v2"
|
||||
|
||||
yarnPath: .yarn/releases/yarn-3.2.1.cjs
|
||||
1799
CHANGELOG.md
1799
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,4 @@
|
||||
|
||||
# 5.4.5 (2019-08-29)
|
||||
|
||||
- **Security**: Urgent security patch release. Please read more in our [blog](https://grafana.com/blog/2019/08/29/grafana-5.4.5-and-6.3.4-released-with-important-security-fix/)
|
||||
@@ -179,7 +180,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-3-3-and-4-
|
||||
- **Alerting**: Link to view full size image in Microsoft Teams alert notifier [#13121](https://github.com/grafana/grafana/issues/13121), thx [@holiiveira](https://github.com/holiiveira)
|
||||
- **Alerting**: Fixes a bug where all alerts would send reminders after upgrade & restart [#13402](https://github.com/grafana/grafana/pull/13402)
|
||||
- **Alerting**: Concurrent render limit for graphs used in notifications [#13401](https://github.com/grafana/grafana/pull/13401)
|
||||
- **Postgres/MySQL/MSSQL**: Add support for replacing $\_\_interval and $\_\_interval_ms in alert queries [#11555](https://github.com/grafana/grafana/issues/11555), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: Add support for replacing $__interval and $\_\_interval_ms in alert queries [#11555](https://github.com/grafana/grafana/issues/11555), thx [@svenklemm](https://github.com/svenklemm)
|
||||
|
||||
# 5.3.0-beta1 (2018-09-06)
|
||||
|
||||
@@ -213,18 +214,18 @@ See [security announcement](https://community.grafana.com/t/grafana-5-3-3-and-4-
|
||||
- **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
|
||||
- **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
|
||||
- **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
|
||||
- **Prometheus**: Add $**interval, $**interval_ms, \$**range, $**range_s & $\_\_range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) [#12882](https://github.com/grafana/grafana/issues/12882), thx [@roidelapluie](https://github.com/roidelapluie)
|
||||
- **Prometheus**: Add $__interval, $**interval_ms, \$**range, $__range_s & $\_\_range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) [#12882](https://github.com/grafana/grafana/issues/12882), thx [@roidelapluie](https://github.com/roidelapluie)
|
||||
- **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
|
||||
- **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927)
|
||||
- **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen)
|
||||
- **Postgres/MySQL/MSSQL**: New $\_\_unixEpochGroup and $\_\_unixEpochGroupAlias macros [#12892](https://github.com/grafana/grafana/issues/12892), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: New $__unixEpochGroup and $\_\_unixEpochGroupAlias macros [#12892](https://github.com/grafana/grafana/issues/12892), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: Add previous fill mode to \$\_\_timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: Use floor rounding in \$\_\_timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: New $\_\_timeGroupAlias macro. Postgres $\_\_timeGroup no longer automatically adds time column alias [#12749](https://github.com/grafana/grafana/issues/12749), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: New $__timeGroupAlias macro. Postgres $\_\_timeGroup no longer automatically adds time column alias [#12749](https://github.com/grafana/grafana/issues/12749), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **Postgres/MySQL/MSSQL**: Escape single quotes in variables [#12785](https://github.com/grafana/grafana/issues/12785), thx [@eMerzh](https://github.com/eMerzh)
|
||||
- **Postgres/MySQL/MSSQL**: Min time interval support [#13157](https://github.com/grafana/grafana/issues/13157), thx [@svenklemm](https://github.com/svenklemm)
|
||||
- **MySQL/MSSQL**: Use datetime format instead of epoch for $\_\_timeFilter, $**timeFrom and \$**timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
|
||||
- **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $**timeFrom and \$**timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
|
||||
- **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
|
||||
- **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
|
||||
- **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
|
||||
@@ -1018,7 +1019,7 @@ Pull Request: [#8472](https://github.com/grafana/grafana/pull/8472)
|
||||
## Enhancements
|
||||
|
||||
- **Telegram**: Added Telegram alert notifier [#7098](https://github.com/grafana/grafana/pull/7098), thx [@leonoff](https://github.com/leonoff)
|
||||
- **Templating**: Make $\_\_interval and $\_\_interval_ms global built in variables that can be used in by any data source (in panel queries), closes [#7190](https://github.com/grafana/grafana/issues/7190), closes [#6582](https://github.com/grafana/grafana/issues/6582)
|
||||
- **Templating**: Make $__interval and $\_\_interval_ms global built in variables that can be used in by any data source (in panel queries), closes [#7190](https://github.com/grafana/grafana/issues/7190), closes [#6582](https://github.com/grafana/grafana/issues/6582)
|
||||
- **S3 Image Store**: External s3 image store (used in alert notifications) now support AWS IAM Roles, closes [#6985](https://github.com/grafana/grafana/issues/6985), [#7058](https://github.com/grafana/grafana/issues/7058) thx [@mtanda](https://github.com/mtanda)
|
||||
- **SingleStat**: Implements diff aggregation method for singlestat [#7234](https://github.com/grafana/grafana/issues/7234), thx [@oliverpool](https://github.com/oliverpool)
|
||||
- **Dataproxy**: Added setting to enable more verbose logging in dataproxy [#7209](https://github.com/grafana/grafana/pull/7209), thx [@Ricky-N](https://github.com/Ricky-N)
|
||||
|
||||
@@ -26,12 +26,10 @@ Report a bug by submitting a [bug report](https://github.com/grafana/grafana/iss
|
||||
Follow the issue template and add additional information that will help us replicate the problem.
|
||||
|
||||
For data visualization issues:
|
||||
|
||||
- Query results from the inspect drawer (data tab & query inspector)
|
||||
- Panel settings can be extracted in the panel inspect drawer JSON tab
|
||||
|
||||
For a dashboard related issues:
|
||||
|
||||
- Dashboard JSON can be found in the dashboard settings JSON model view
|
||||
|
||||
For authentication and alerting Grafana server logs are useful.
|
||||
@@ -42,14 +40,10 @@ If you believe you've found a security vulnerability, please read our [security
|
||||
|
||||
### Suggest enhancements
|
||||
|
||||
If you have an idea of how to improve Grafana, submit an [enhancement request](https://github.com/grafana/grafana/discussions/new).
|
||||
If you have an idea of how to improve Grafana, submit an [enhancement request](https://github.com/grafana/grafana/issues/new?labels=type%3A+feature+request&template=2-feature_request.md).
|
||||
|
||||
We want to make Grafana accessible to even more people. Submit an [accessibility issue](https://github.com/grafana/grafana/issues/new?labels=type%3A+accessibility&template=3-accessibility.md) to help us understand what we can improve.
|
||||
|
||||
### Write documentation
|
||||
|
||||
To edit or write technical content, refer to [Contribute to our documentation](/contribute/documentation/README.md). We welcome your expertise and input as our body of technical content grows.
|
||||
|
||||
### Triage issues
|
||||
|
||||
If you don't have the knowledge or time to code, consider helping with _issue triage_. The community will thank you for saving them time by spending some of yours.
|
||||
@@ -80,6 +74,6 @@ Before we can accept your pull request, you need to [sign our CLA](https://grafa
|
||||
## Where do I go from here?
|
||||
|
||||
- Set up your [development environment](contribute/developer-guide.md).
|
||||
- Learn how to [contribute documentation](contribute/README.md).
|
||||
- Learn how to [contribute documentation](contribute/documentation.md).
|
||||
- Get started [developing plugins](https://grafana.com/docs/grafana/latest/developers/plugins/) for Grafana.
|
||||
- Look through the resources in the [contribute](contribute) folder.
|
||||
- Look through the resources in the [contribute](https://github.com/grafana/grafana/tree/main/contribute) folder.
|
||||
|
||||
93
Dockerfile
93
Dockerfile
@@ -1,17 +1,14 @@
|
||||
FROM node:16-alpine3.15 as js-builder
|
||||
FROM node:16-alpine3.14 as js-builder
|
||||
|
||||
ENV NODE_OPTIONS=--max_old_space_size=8000
|
||||
WORKDIR /usr/src/app/
|
||||
|
||||
WORKDIR /grafana
|
||||
|
||||
COPY package.json yarn.lock .yarnrc.yml ./
|
||||
COPY .yarn .yarn
|
||||
COPY package.json yarn.lock ./
|
||||
COPY packages packages
|
||||
COPY plugins-bundled plugins-bundled
|
||||
|
||||
RUN yarn install
|
||||
RUN apk --no-cache add git
|
||||
RUN yarn install --pure-lockfile --no-progress
|
||||
|
||||
COPY tsconfig.json .eslintrc .editorconfig .browserslistrc .prettierrc.js babel.config.json .linguirc ./
|
||||
COPY tsconfig.json .eslintrc .editorconfig .browserslistrc .prettierrc.js ./
|
||||
COPY public public
|
||||
COPY tools tools
|
||||
COPY scripts scripts
|
||||
@@ -20,26 +17,24 @@ COPY emails emails
|
||||
ENV NODE_ENV production
|
||||
RUN yarn build
|
||||
|
||||
FROM golang:1.17.12-alpine3.15 as go-builder
|
||||
FROM golang:1.16.1-alpine3.14 as go-builder
|
||||
|
||||
RUN apk add --no-cache gcc g++ make
|
||||
RUN apk add --no-cache gcc g++
|
||||
|
||||
WORKDIR /grafana
|
||||
WORKDIR $GOPATH/src/github.com/grafana/grafana
|
||||
|
||||
COPY go.mod go.sum embed.go Makefile build.go package.json ./
|
||||
COPY packages/grafana-schema packages/grafana-schema
|
||||
COPY public/app/plugins public/app/plugins
|
||||
COPY public/api-spec.json public/api-spec.json
|
||||
COPY pkg pkg
|
||||
COPY scripts scripts
|
||||
COPY go.mod go.sum embed.go ./
|
||||
COPY cue cue
|
||||
COPY cue.mod cue.mod
|
||||
COPY .bingo .bingo
|
||||
COPY public/app/plugins public/app/plugins
|
||||
COPY pkg pkg
|
||||
COPY build.go package.json ./
|
||||
|
||||
RUN go mod verify
|
||||
RUN make build-go
|
||||
RUN go run build.go build
|
||||
|
||||
# Final stage
|
||||
FROM alpine:3.15
|
||||
FROM alpine:3.14.2
|
||||
|
||||
LABEL maintainer="Grafana team <hello@grafana.com>"
|
||||
|
||||
@@ -47,45 +42,43 @@ ARG GF_UID="472"
|
||||
ARG GF_GID="0"
|
||||
|
||||
ENV PATH="/usr/share/grafana/bin:$PATH" \
|
||||
GF_PATHS_CONFIG="/etc/grafana/grafana.ini" \
|
||||
GF_PATHS_DATA="/var/lib/grafana" \
|
||||
GF_PATHS_HOME="/usr/share/grafana" \
|
||||
GF_PATHS_LOGS="/var/log/grafana" \
|
||||
GF_PATHS_PLUGINS="/var/lib/grafana/plugins" \
|
||||
GF_PATHS_PROVISIONING="/etc/grafana/provisioning"
|
||||
GF_PATHS_CONFIG="/etc/grafana/grafana.ini" \
|
||||
GF_PATHS_DATA="/var/lib/grafana" \
|
||||
GF_PATHS_HOME="/usr/share/grafana" \
|
||||
GF_PATHS_LOGS="/var/log/grafana" \
|
||||
GF_PATHS_PLUGINS="/var/lib/grafana/plugins" \
|
||||
GF_PATHS_PROVISIONING="/etc/grafana/provisioning"
|
||||
|
||||
WORKDIR $GF_PATHS_HOME
|
||||
|
||||
RUN apk add --no-cache ca-certificates bash tzdata musl-utils
|
||||
RUN apk add --no-cache openssl ncurses-libs ncurses-terminfo-base --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||
RUN apk upgrade ncurses-libs ncurses-terminfo-base --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||
RUN apk info -vv | sort
|
||||
RUN apk add --no-cache ca-certificates bash tzdata && \
|
||||
apk add --no-cache openssl musl-utils
|
||||
|
||||
COPY conf ./conf
|
||||
|
||||
RUN if [ ! $(getent group "$GF_GID") ]; then \
|
||||
addgroup -S -g $GF_GID grafana; \
|
||||
fi
|
||||
addgroup -S -g $GF_GID grafana; \
|
||||
fi
|
||||
|
||||
RUN export GF_GID_NAME=$(getent group $GF_GID | cut -d':' -f1) && \
|
||||
mkdir -p "$GF_PATHS_HOME/.aws" && \
|
||||
adduser -S -u $GF_UID -G "$GF_GID_NAME" grafana && \
|
||||
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
|
||||
"$GF_PATHS_PROVISIONING/dashboards" \
|
||||
"$GF_PATHS_PROVISIONING/notifiers" \
|
||||
"$GF_PATHS_PROVISIONING/plugins" \
|
||||
"$GF_PATHS_PROVISIONING/access-control" \
|
||||
"$GF_PATHS_LOGS" \
|
||||
"$GF_PATHS_PLUGINS" \
|
||||
"$GF_PATHS_DATA" && \
|
||||
cp "$GF_PATHS_HOME/conf/sample.ini" "$GF_PATHS_CONFIG" && \
|
||||
cp "$GF_PATHS_HOME/conf/ldap.toml" /etc/grafana/ldap.toml && \
|
||||
chown -R "grafana:$GF_GID_NAME" "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" "$GF_PATHS_PROVISIONING" && \
|
||||
chmod -R 777 "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" "$GF_PATHS_PROVISIONING"
|
||||
mkdir -p "$GF_PATHS_HOME/.aws" && \
|
||||
adduser -S -u $GF_UID -G "$GF_GID_NAME" grafana && \
|
||||
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
|
||||
"$GF_PATHS_PROVISIONING/dashboards" \
|
||||
"$GF_PATHS_PROVISIONING/notifiers" \
|
||||
"$GF_PATHS_PROVISIONING/plugins" \
|
||||
"$GF_PATHS_PROVISIONING/access-control" \
|
||||
"$GF_PATHS_LOGS" \
|
||||
"$GF_PATHS_PLUGINS" \
|
||||
"$GF_PATHS_DATA" && \
|
||||
cp "$GF_PATHS_HOME/conf/sample.ini" "$GF_PATHS_CONFIG" && \
|
||||
cp "$GF_PATHS_HOME/conf/ldap.toml" /etc/grafana/ldap.toml && \
|
||||
chown -R "grafana:$GF_GID_NAME" "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" "$GF_PATHS_PROVISIONING" && \
|
||||
chmod -R 777 "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" "$GF_PATHS_PROVISIONING"
|
||||
|
||||
COPY --from=go-builder /grafana/bin/*/grafana-server /grafana/bin/*/grafana-cli ./bin/
|
||||
COPY --from=js-builder /grafana/public ./public
|
||||
COPY --from=js-builder /grafana/tools ./tools
|
||||
COPY --from=go-builder /go/src/github.com/grafana/grafana/bin/*/grafana-server /go/src/github.com/grafana/grafana/bin/*/grafana-cli ./bin/
|
||||
COPY --from=js-builder /usr/src/app/public ./public
|
||||
COPY --from=js-builder /usr/src/app/tools ./tools
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
FROM node:16-alpine3.15 as js-builder
|
||||
|
||||
ENV NODE_OPTIONS=--max_old_space_size=8000
|
||||
FROM node:14.15.1-slim AS js-builder
|
||||
|
||||
WORKDIR /usr/src/app/
|
||||
|
||||
COPY package.json yarn.lock ./
|
||||
COPY packages packages
|
||||
COPY .yarnrc.yml ./
|
||||
COPY .yarn .yarn
|
||||
COPY plugins-bundled plugins-bundled
|
||||
|
||||
RUN yarn install
|
||||
RUN apt-get update && apt-get install -yq git
|
||||
RUN yarn install --pure-lockfile
|
||||
|
||||
COPY tsconfig.json .eslintrc .editorconfig .browserslistrc .prettierrc.js babel.config.json .linguirc ./
|
||||
COPY tsconfig.json .eslintrc .editorconfig .browserslistrc .prettierrc.js ./
|
||||
COPY public public
|
||||
COPY tools tools
|
||||
COPY scripts scripts
|
||||
@@ -21,21 +17,19 @@ COPY emails emails
|
||||
ENV NODE_ENV production
|
||||
RUN yarn build
|
||||
|
||||
FROM golang:1.17.12 AS go-builder
|
||||
FROM golang:1.16 AS go-builder
|
||||
|
||||
WORKDIR /src/grafana
|
||||
|
||||
COPY go.mod go.sum embed.go ./
|
||||
COPY Makefile build.go package.json ./
|
||||
COPY .bingo .bingo
|
||||
COPY build.go package.json ./
|
||||
COPY pkg pkg/
|
||||
COPY cue cue/
|
||||
COPY cue.mod cue.mod/
|
||||
COPY packages/grafana-schema packages/grafana-schema/
|
||||
COPY public/app/plugins public/app/plugins/
|
||||
COPY public/api-spec.json public/api-spec.json
|
||||
|
||||
RUN go mod verify
|
||||
RUN make build-go
|
||||
RUN go run build.go build
|
||||
|
||||
FROM ubuntu:20.04
|
||||
|
||||
|
||||
@@ -79,7 +79,8 @@ The current team members are:
|
||||
- Dominik Prokop ([Grafana Labs](https://grafana.com/))
|
||||
- Emil Tullstedt ([Grafana Labs](https://grafana.com/))
|
||||
- Erik Sundell ([Grafana Labs](https://grafana.com/))
|
||||
- Fredrik Enestad ([Embark Studios](https://www.embark-studios.com/))
|
||||
- Fredrik Enestad ([Soundtrack Your Brand](https://www.soundtrackyourbrand.com/))
|
||||
- Hugo Häggmark ([Grafana Labs](https://grafana.com/))
|
||||
- Ivana Huckova ([Grafana Labs](https://grafana.com/))
|
||||
- Jeroen Op 't Eynde ([Grafana Labs](https://grafana.com/))
|
||||
- Jessica Müller ([Grafana Labs](https://grafana.com/))
|
||||
@@ -99,7 +100,7 @@ The current team members are:
|
||||
- Ryan McKinley ([Grafana Labs](https://grafana.com/))
|
||||
- Sofia Papagiannaki ([Grafana Labs](https://grafana.com/))
|
||||
- Stephanie Closson ([Grafana Labs](https://grafana.com/))
|
||||
- Tobias Skarhed ([CERN](https://home.web.cern.ch/))
|
||||
- Tobias Skarhed ([Grafana Labs](https://grafana.com/))
|
||||
- Torkel Ödegaard ([Grafana Labs](https://grafana.com/))
|
||||
- Utkarsh Bhatnagar ([Tinder](https://www.tinder.com/))
|
||||
- Will Browne ([Grafana Labs](https://grafana.com/))
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
# Hall of fame
|
||||
|
||||
List of previous team members that have had a big impact on the company or the product and contributed during a long period of time.
|
||||
|
||||
- Hugo Häggmark ([School of applied technology](https://salt.study))
|
||||
@@ -4,7 +4,7 @@ The main goal of issue triage is to categorize all incoming Grafana issues and m
|
||||
|
||||
> **Note:** This information is for Grafana project Maintainers, Owners, and Admins. If you are a Contributor, then you will not be able to perform most of the tasks in this topic.
|
||||
|
||||
The core maintainers of the Grafana project are responsible for categorizing all incoming issues and delegating any critical or important issue to other maintainers. Currently one maintainer each week is responsible. Besides that part, triage provides an important way to contribute to an open source project.
|
||||
The core maintainers of the Grafana project are responsible for categorizing all incoming issues and delegating any critical or important issue to other maintainers. Currently one maintainer each week is responsible. Besides that part, triage provides an important way to contribute to an open source project.
|
||||
|
||||
Triage helps ensure issues resolve quickly by:
|
||||
|
||||
@@ -18,7 +18,6 @@ If you don't have the knowledge or time to code, consider helping with triage. T
|
||||
## Simplified flowchart diagram of the issue triage process
|
||||
|
||||
<!-- https://textik.com/#610afa78553def29 -->
|
||||
|
||||
```
|
||||
+--------------------------+
|
||||
+----------------+ New issue opened/ |
|
||||
@@ -77,16 +76,15 @@ Instructions for setting up filters in Gmail can be found [here](#setting-up-gma
|
||||
|
||||
## 2. Ensure the issue contains basic information
|
||||
|
||||
Before triaging an issue very far, make sure that the issue's author provided the standard issue information. This will help you make an educated recommendation on how to categorize the issue. The Grafana project utilizes [GitHub issue templates](https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/configuring-issue-templates-for-your-repository) to guide contributors to provide standard information that must be included for each type of template or type of issue.
|
||||
Before triaging an issue very far, make sure that the issue's author provided the standard issue information. This will help you make an educated recommendation on how to categorize the issue. The Grafana project utilizes [GitHub issue templates](https://help.github.com/en/articles/creating-issue-templates-for-your-repository) to guide contributors to provide standard information that must be included for each type of template or type of issue.
|
||||
|
||||
### Standard issue information that must be included
|
||||
|
||||
Given a certain [issue template](https://github.com/grafana/grafana/issues/new/choose) have been used by the issue author or depending how the issue is perceived by the issue triage responsible, the following should help you understand what standard issue information that must be included.
|
||||
Given a certain [issue template]([template](https://github.com/grafana/grafana/issues/new/choose)) have been used by the issue author or depending how the issue is perceived by the issue triage responsible, the following should help you understand what standard issue information that must be included.
|
||||
|
||||
#### Bug reports
|
||||
|
||||
Should explain what happened, what was expected and how to reproduce it together with any additional information that may help giving a complete picture of what happened such as screenshots, [query inspector](https://community.grafana.com/t/using-grafanas-query-inspector-to-troubleshoot-issues/2630) output and any environment related information that's applicable and/or maybe related to the reported problem:
|
||||
|
||||
- Grafana version
|
||||
- Data source type & version
|
||||
- Platform & OS Grafana is installed on
|
||||
@@ -172,12 +170,10 @@ If it's not perfectly clear that it's an actual bug, quickly try to reproduce it
|
||||
4. Move on to [prioritizing the issue](#4-prioritization-of-issues).
|
||||
|
||||
**It can't be reproduced:**
|
||||
|
||||
1. Either [ask for more information](#2-ensure-the-issue-contains-basic-information) needed to investigate it more thoroughly.
|
||||
2. Either [delegate further investigations](#investigation-of-issues) to someone else.
|
||||
|
||||
**It works as intended/by design:**
|
||||
|
||||
1. Kindly and politely add a comment explaining briefly why we think it works as intended and close the issue.
|
||||
2. Label the issue `type/works-as-intended`.
|
||||
|
||||
@@ -192,14 +188,13 @@ First, evaluate if the documentation makes sense to be included in the Grafana p
|
||||
|
||||
- Is this something we want/can maintain as a project?
|
||||
- Is this referring to usage of some specific integration/tool and in that case is that a popular use case in combination with Grafana?
|
||||
- If unsure, kindly and politely add a comment explaining that we would need [upvotes](https://github.blog/2016-03-10-add-reactions-to-pull-requests-issues-and-comments) to identify that lots of other users want/need this.
|
||||
- If unsure, kindly and politely add a comment explaining that we would need [upvotes](https://help.github.com/en/articles/about-conversations-on-github#reacting-to-ideas-in-comments) to identify that lots of other users want/need this.
|
||||
|
||||
Second, label the issue `type/docs` and at least one `area/*` or `datasource/*` label.
|
||||
|
||||
**Minor typo/error/lack of information:**
|
||||
|
||||
There's a minor typo/error/lack of information that adds a lot of confusion for users and given the amount of work is a big win to make sure fixing it:
|
||||
|
||||
1. Either update the documentation yourself and open a pull request.
|
||||
2. Either delegate the work to someone else by assigning that person to the issue and add the issue to next major/minor milestone.
|
||||
|
||||
@@ -296,20 +291,20 @@ In many cases the issue author or community as a whole is more suitable to contr
|
||||
|
||||
## Investigation of issues
|
||||
|
||||
When an issue has all basic information provided, but the triage responsible haven't been able to reproduce the reported problem at a first glance, the issue is labeled [Needs investigation](https://github.com/grafana/grafana/labels/needs%20investigation). Depending on the perceived severity and/or number of [upvotes](https://github.blog/2016-03-10-add-reactions-to-pull-requests-issues-and-comments), the investigation will either be delegated to another maintainer for further investigation or put on hold until someone else (maintainer or contributor) picks it up and eventually starts investigating it.
|
||||
When an issue has all basic information provided, but the triage responsible haven't been able to reproduce the reported problem at a first glance, the issue is labeled [Needs investigation](https://github.com/grafana/grafana/labels/needs%20investigation). Depending on the perceived severity and/or number of [upvotes](https://help.github.com/en/articles/about-conversations-on-github#reacting-to-ideas-in-comments), the investigation will either be delegated to another maintainer for further investigation or put on hold until someone else (maintainer or contributor) picks it up and eventually starts investigating it.
|
||||
|
||||
Investigating issues can be a very time consuming task, especially for the maintainers, given the huge number of combinations of plugins, data sources, platforms, databases, browsers, tools, hardware, integrations, versions and cloud services, etc that are being used with Grafana. There is a certain number of combinations that are more common than others, and these are in general easier for maintainers to investigate.
|
||||
|
||||
For some other combinations it may not be possible at all for a maintainer to setup a proper test environment to investigate the issue. In these cases we really appreciate any help we can get from the community. Otherwise the issue is highly likely to be closed.
|
||||
|
||||
Even if you don't have the time or knowledge to investigate an issue we highly recommend that you [upvote](https://github.blog/2016-03-10-add-reactions-to-pull-requests-issues-and-comments) the issue if you happen to have the same problem. If you have further details that may help investigating the issue please provide as much information as possible.
|
||||
Even if you don't have the time or knowledge to investigate an issue we highly recommend that you [upvote](https://help.github.com/en/articles/about-conversations-on-github#reacting-to-ideas-in-comments) the issue if you happen to have the same problem. If you have further details that may help investigating the issue please provide as much information as possible.
|
||||
|
||||
## Automation
|
||||
## Automation
|
||||
|
||||
We have some automation that triggers on comments or labels being added to issues. Many of these automated behaviors are defined in [commands.json](https://github.com/grafana/grafana/blob/main/.github/commands.json). Or in other [GitHub Actions](https://github.com/grafana/grafana/tree/main/.github/workflows)
|
||||
|
||||
- Add /duplicate `#<issue number>` to have Grafana label & close issue with an appropriate message.
|
||||
- Add `bot/question` and the bot will close it with an appropriate message.
|
||||
* Add /duplicate `#<issue number>` to have Grafana label & close issue with an appropriate message.
|
||||
* Add `bot/question` and the bot will close it with an appropriate message.
|
||||
|
||||
[Read more on bot actions](https://github.com/grafana/grafana/blob/main/.github/bot.md)
|
||||
|
||||
@@ -329,7 +324,6 @@ Part of issue triage should also be triaging of external PRs. Main goal should b
|
||||
If you're using Gmail it's highly recommended that you setup filters to automatically remove email from the inbox and label them accordingly to make it easy for you to understand when you need to act upon a notification or process all incoming issues that haven't been triaged.
|
||||
|
||||
This may be setup by personal preference, but here's a working configuration for reference.
|
||||
|
||||
1. Follow instructions in [gist](https://gist.github.com/marefr/9167c2e31466f6316c1cba118874e74f)
|
||||
2. In Gmail, go to Settings -> Filters and Blocked Addresses
|
||||
3. Import filters -> select xml file -> Open file
|
||||
@@ -338,7 +332,6 @@ This may be setup by personal preference, but here's a working configuration for
|
||||
6. Create filters
|
||||
|
||||
This will give you a structure of labels in the sidebar similar to the following:
|
||||
|
||||
```
|
||||
- Inbox
|
||||
...
|
||||
|
||||
@@ -17,13 +17,11 @@ packages/grafana-toolkit/
|
||||
packages/grafana-ui/
|
||||
packages/jaeger-ui-components/
|
||||
packaging/
|
||||
pkg/coremodel/
|
||||
pkg/framework/coremodel/
|
||||
grafana-mixin/
|
||||
cue/
|
||||
```
|
||||
|
||||
The following directories and their subdirectories are licensed under their original upstream licenses:
|
||||
The following directories and their subdirectories are the original upstream licenses:
|
||||
|
||||
```
|
||||
public/vendor/
|
||||
|
||||
100
Makefile
100
Makefile
@@ -2,20 +2,13 @@
|
||||
##
|
||||
## For more information, refer to https://suva.sh/posts/well-documented-makefiles/
|
||||
|
||||
WIRE_TAGS = "oss"
|
||||
|
||||
-include local/Makefile
|
||||
include .bingo/Variables.mk
|
||||
|
||||
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-full build-docker-full-ubuntu lint-go golangci-lint test-go test-js gen-ts test run run-frontend clean devenv devenv-down protobuf drone help
|
||||
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go golangci-lint test-go test-js test run run-frontend clean devenv devenv-down protobuf drone help
|
||||
|
||||
GO = go
|
||||
GO_FILES ?= ./pkg/...
|
||||
SH_FILES ?= $(shell find ./scripts -name *.sh)
|
||||
API_DEFINITION_FILES = $(shell find ./pkg/api/docs/definitions -name '*.go' -print)
|
||||
SWAGGER_TAG ?= latest
|
||||
GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev)
|
||||
GO_BUILD_FLAGS += $(if $(GO_BUILD_TAGS),-build-tags=$(GO_BUILD_TAGS))
|
||||
|
||||
all: deps build
|
||||
|
||||
@@ -30,74 +23,21 @@ deps: deps-js ## Install all dependencies.
|
||||
|
||||
node_modules: package.json yarn.lock ## Install node modules.
|
||||
@echo "install frontend dependencies"
|
||||
YARN_ENABLE_PROGRESS_BARS=false yarn install --immutable
|
||||
|
||||
##@ Swagger
|
||||
SPEC_TARGET = public/api-spec.json
|
||||
MERGED_SPEC_TARGET := public/api-merged.json
|
||||
NGALERT_SPEC_TARGET = pkg/services/ngalert/api/tooling/api.json
|
||||
|
||||
$(SPEC_TARGET): $(API_DEFINITION_FILES) ## Generate API spec
|
||||
docker run --rm -it \
|
||||
-e GOPATH=${HOME}/go:/go \
|
||||
-e SWAGGER_GENERATE_EXTENSION=false \
|
||||
-v ${HOME}/go:/go \
|
||||
-v $$(pwd):/grafana \
|
||||
-w $$(pwd)/pkg/api/docs quay.io/goswagger/swagger:$(SWAGGER_TAG) \
|
||||
generate spec -m -o /grafana/public/api-spec.json \
|
||||
-w /grafana/pkg/server \
|
||||
-x "grafana/grafana/pkg/services/ngalert/api/tooling/definitions" \
|
||||
-x "github.com/prometheus/alertmanager" \
|
||||
-i /grafana/pkg/api/docs/tags.json
|
||||
|
||||
swagger-api-spec: gen-go $(SPEC_TARGET) $(MERGED_SPEC_TARGET) validate-api-spec
|
||||
|
||||
$(NGALERT_SPEC_TARGET):
|
||||
+$(MAKE) -C pkg/services/ngalert/api/tooling api.json
|
||||
|
||||
$(MERGED_SPEC_TARGET): $(SPEC_TARGET) $(NGALERT_SPEC_TARGET) ## Merge generated and ngalert API specs
|
||||
go run pkg/api/docs/merge/merge_specs.go -o=public/api-merged.json $(<) pkg/services/ngalert/api/tooling/api.json
|
||||
|
||||
ensure_go-swagger_mac:
|
||||
@hash swagger &>/dev/null || (brew tap go-swagger/go-swagger && brew install go-swagger)
|
||||
|
||||
--swagger-api-spec-mac: ensure_go-swagger_mac $(API_DEFINITION_FILES) ## Generate API spec (for M1 Mac)
|
||||
swagger generate spec -m -w pkg/server -o public/api-spec.json \
|
||||
-x "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" \
|
||||
-x "github.com/prometheus/alertmanager" \
|
||||
-i pkg/api/docs/tags.json
|
||||
|
||||
swagger-api-spec-mac: gen-go --swagger-api-spec-mac $(MERGED_SPEC_TARGET) validate-api-spec
|
||||
|
||||
validate-api-spec: $(MERGED_SPEC_TARGET) ## Validate API spec
|
||||
docker run --rm -it \
|
||||
-e GOPATH=${HOME}/go:/go \
|
||||
-e SWAGGER_GENERATE_EXTENSION=false \
|
||||
-v ${HOME}/go:/go \
|
||||
-v $$(pwd):/grafana \
|
||||
-w $$(pwd)/pkg/api/docs quay.io/goswagger/swagger:$(SWAGGER_TAG) \
|
||||
validate /grafana/$(<)
|
||||
|
||||
clean-api-spec:
|
||||
rm $(SPEC_TARGET) $(MERGED_SPEC_TARGET)
|
||||
yarn install --pure-lockfile --no-progress
|
||||
|
||||
##@ Building
|
||||
|
||||
gen-go: $(WIRE)
|
||||
@echo "generate go files"
|
||||
$(WIRE) gen -tags $(WIRE_TAGS) ./pkg/server ./pkg/cmd/grafana-cli/runner
|
||||
|
||||
build-go: $(MERGED_SPEC_TARGET) gen-go ## Build all Go binaries.
|
||||
build-go: ## Build all Go binaries.
|
||||
@echo "build go files"
|
||||
$(GO) run build.go $(GO_BUILD_FLAGS) build
|
||||
$(GO) run build.go build
|
||||
|
||||
build-server: ## Build Grafana server.
|
||||
@echo "build server"
|
||||
$(GO) run build.go $(GO_BUILD_FLAGS) build-server
|
||||
$(GO) run build.go build-server
|
||||
|
||||
build-cli: ## Build Grafana CLI application.
|
||||
@echo "build grafana-cli"
|
||||
$(GO) run build.go $(GO_BUILD_FLAGS) build-cli
|
||||
$(GO) run build.go build-cli
|
||||
|
||||
build-js: ## Build frontend assets.
|
||||
@echo "build frontend"
|
||||
@@ -139,7 +79,7 @@ golangci-lint: scripts/go/bin/golangci-lint
|
||||
--config ./scripts/go/configs/.golangci.toml \
|
||||
$(GO_FILES)
|
||||
|
||||
lint-go: golangci-lint ## Run all code checks for backend. You can use GO_FILES to specify exact files to check
|
||||
lint-go: golangci-lint # Run all code checks for backend.
|
||||
|
||||
# with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts
|
||||
shellcheck: $(SH_FILES) ## Run checks for shell scripts.
|
||||
@@ -148,15 +88,17 @@ shellcheck: $(SH_FILES) ## Run checks for shell scripts.
|
||||
|
||||
##@ Docker
|
||||
|
||||
build-docker-dev: ## Build Docker image for development (fast).
|
||||
@echo "build development container"
|
||||
@echo "\033[92mInfo:\033[0m the frontend code is expected to be built already."
|
||||
$(GO) run build.go -goos linux -pkg-arch amd64 ${OPT} build latest
|
||||
cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
|
||||
cd packaging/docker && docker build --tag grafana/grafana:dev .
|
||||
|
||||
build-docker-full: ## Build Docker image for development.
|
||||
@echo "build docker container"
|
||||
docker build --tag grafana/grafana:dev .
|
||||
|
||||
build-docker-full-ubuntu: ## Build Docker image based on Ubuntu for development.
|
||||
@echo "build docker container"
|
||||
docker build --tag grafana/grafana:dev-ubuntu -f ./Dockerfile.ubuntu .
|
||||
|
||||
|
||||
##@ Services
|
||||
|
||||
# create docker-compose file with provided sources and start them
|
||||
@@ -198,19 +140,13 @@ clean: ## Clean up intermediate build artifacts.
|
||||
rm -rf node_modules
|
||||
rm -rf public/build
|
||||
|
||||
gen-ts:
|
||||
@echo "generating TypeScript definitions"
|
||||
go get github.com/tkrajina/typescriptify-golang-structs/typescriptify@v0.1.7
|
||||
tscriptify -interface -package=github.com/grafana/grafana/pkg/services/live/pipeline -import="import { FieldConfig } from '@grafana/data'" -target=public/app/features/live/pipeline/models.gen.ts pkg/services/live/pipeline/config.go
|
||||
go mod tidy
|
||||
|
||||
# This repository's configuration is protected (https://readme.drone.io/signature/).
|
||||
# Use this make target to regenerate the configuration YAML files when
|
||||
# you modify starlark files.
|
||||
drone: $(DRONE)
|
||||
$(DRONE) starlark --format
|
||||
$(DRONE) lint .drone.yml --trusted
|
||||
$(DRONE) --server https://drone.grafana.net sign --save grafana/grafana
|
||||
drone:
|
||||
drone starlark
|
||||
drone lint
|
||||
drone --server https://drone.grafana.net sign --save grafana/grafana
|
||||
|
||||
help: ## Display this help.
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
|
||||
Copyright 2014-2021 Grafana Labs
|
||||
|
||||
This software is based on Kibana:
|
||||
This software is based on Kibana:
|
||||
Copyright 2012-2013 Elasticsearch BV
|
||||
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
# Plugin development
|
||||
# Plugin development
|
||||
|
||||
This document is not meant as a complete guide for developing plugins but more as a changelog for changes in
|
||||
Grafana that can impact plugin development. Whenever you as a plugin author encounter an issue with your plugin after
|
||||
upgrading Grafana please check here before creating an issue.
|
||||
upgrading Grafana please check here before creating an issue.
|
||||
|
||||
## Plugin development resources
|
||||
|
||||
- [Grafana plugin developer guide](https://grafana.com/docs/grafana/latest/developers/plugins/)
|
||||
- [Grafana plugin developer guide](http://docs.grafana.org/plugins/developing/development/)
|
||||
- [Webpack Grafana plugin template project](https://github.com/CorpGlory/grafana-plugin-template-webpack)
|
||||
- [Simple JSON datasource plugin](https://github.com/grafana/simple-json-datasource)
|
||||
|
||||
## Changes in Grafana v4.6
|
||||
|
||||
This version of Grafana has big changes that will impact a limited set of plugins. We moved from systemjs to webpack
|
||||
for built-in plugins and everything internal. External plugins still use systemjs but now with a limited
|
||||
set of Grafana components they can import. Plugins can depend on libs like lodash & moment and internal components
|
||||
like before using the same import paths. However since everything in Grafana is no longer accessible, a few plugins could encounter issues when importing a Grafana dependency.
|
||||
for built-in plugins and everything internal. External plugins still use systemjs but now with a limited
|
||||
set of Grafana components they can import. Plugins can depend on libs like lodash & moment and internal components
|
||||
like before using the same import paths. However since everything in Grafana is no longer accessible, a few plugins could encounter issues when importing a Grafana dependency.
|
||||
|
||||
[List of exposed components plugins can import/require](https://github.com/grafana/grafana/blob/main/public/app/features/plugins/plugin_loader.ts#L48)
|
||||
|
||||
If you think we missed exposing a crucial lib or Grafana component let us know by opening an issue.
|
||||
If you think we missed exposing a crucial lib or Grafana component let us know by opening an issue.
|
||||
|
||||
### Deprecated components
|
||||
### Deprecated components
|
||||
|
||||
The angular directive `<spectrum-picker>` is now deprecated (will still work for a version more) but we recommend plugin authors
|
||||
upgrade to new `<color-picker color="ctrl.color" onChange="ctrl.onSparklineColorChange"></color-picker>`
|
||||
|
||||
14
README.md
14
README.md
@@ -1,14 +1,14 @@
|
||||

|
||||
|
||||
The open-source platform for monitoring and observability
|
||||
The open-source platform for monitoring and observability.
|
||||
|
||||
[](LICENSE)
|
||||
[](https://drone.grafana.net/grafana/grafana)
|
||||
[](https://goreportcard.com/report/github.com/grafana/grafana)
|
||||
|
||||
Grafana allows you to query, visualize, alert on and understand your metrics no matter where they are stored. Create, explore, and share dashboards with your team and foster a data-driven culture:
|
||||
Grafana allows you to query, visualize, alert on and understand your metrics no matter where they are stored. Create, explore, and share dashboards with your team and foster a data driven culture:
|
||||
|
||||
- **Visualizations:** Fast and flexible client side graphs with a multitude of options. Panel plugins offer many different ways to visualize metrics and logs.
|
||||
- **Visualize:** Fast and flexible client side graphs with a multitude of options. Panel plugins offer many different ways to visualize metrics and logs.
|
||||
- **Dynamic Dashboards:** Create dynamic & reusable dashboards with template variables that appear as dropdowns at the top of the dashboard.
|
||||
- **Explore Metrics:** Explore your data through ad-hoc queries and dynamic drilldown. Split view and compare different time ranges, queries and data sources side by side.
|
||||
- **Explore Logs:** Experience the magic of switching from metrics to logs with preserved label filters. Quickly search through all your logs or streaming them live.
|
||||
@@ -30,8 +30,8 @@ The Grafana documentation is available at [grafana.com/docs](https://grafana.com
|
||||
|
||||
If you're interested in contributing to the Grafana project:
|
||||
|
||||
- Start by reading the [Contributing guide](https://github.com/grafana/grafana/blob/HEAD/CONTRIBUTING.md).
|
||||
- Learn how to set up your local environment, in our [Developer guide](https://github.com/grafana/grafana/blob/HEAD/contribute/developer-guide.md).
|
||||
- Start by reading the [Contributing guide](/CONTRIBUTING.md).
|
||||
- Learn how to set up your local environment, in our [Developer guide](/contribute/developer-guide.md).
|
||||
- Explore our [beginner-friendly issues](https://github.com/grafana/grafana/issues?q=is%3Aopen+is%3Aissue+label%3A%22beginner+friendly%22).
|
||||
- Look through our [style guide and Storybook](https://developers.grafana.com/ui/latest/index.html).
|
||||
|
||||
@@ -40,8 +40,8 @@ If you're interested in contributing to the Grafana project:
|
||||
- Follow [@grafana on Twitter](https://twitter.com/grafana/).
|
||||
- Read and subscribe to the [Grafana blog](https://grafana.com/blog/).
|
||||
- If you have a specific question, check out our [discussion forums](https://community.grafana.com/).
|
||||
- For general discussions, join us on the [official Slack](https://slack.grafana.com) team.
|
||||
- For general discussions, join us on the [official Slack](http://slack.raintank.io/) team.
|
||||
|
||||
## License
|
||||
|
||||
Grafana is distributed under [AGPL-3.0-only](LICENSE). For Apache-2.0 exceptions, see [LICENSING.md](https://github.com/grafana/grafana/blob/HEAD/LICENSING.md).
|
||||
Grafana is distributed under [AGPL-3.0-only](LICENSE). For Apache-2.0 exceptions, see [LICENSING.md](LICENSING.md).
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Roadmap
|
||||
|
||||
The roadmap is a tentative plan for the core development team. Things change constantly as pull requests come in and priorities change, but it will give you an idea of our current vision and plan.
|
||||
|
||||
The roadmap is a tentative plan for the core development team. Things change constantly as pull requests come in and priorities change, but it will give you an idea of our current vision and plan.
|
||||
|
||||
To view the Roadmap, go to the Issues tab on GitHub. There you will find three roadmap issues pinned at the top.
|
||||
|
||||
10
SECURITY.md
10
SECURITY.md
@@ -1,10 +1,10 @@
|
||||
# Reporting security issues
|
||||
|
||||
If you think you have found a security vulnerability, please send a report to [security@grafana.com](mailto:security@grafana.com). This address can be used for all of Grafana Labs's open source and commercial products (including but not limited to Grafana, Grafana Cloud, Grafana Enterprise, and grafana.com). We can accept only vulnerability reports at this address.
|
||||
If you think you have found a security vulnerability, please send a report to [security@grafana.com](mailto:security@grafana.com). This address can be used for all of Grafana Labs's open source and commercial products (including but not limited to Grafana, Grafana Cloud, Grafana Enterprise, and grafana.com). We can accept only vulnerability reports at this address.
|
||||
|
||||
Please encrypt your message to us; please use our PGP key. The key fingerprint is:
|
||||
|
||||
F988 7BEA 027A 049F AE8E 5CAA D125 8932 BE24 C5CA
|
||||
F988 7BEA 027A 049F AE8E 5CAA D125 8932 BE24 C5CA
|
||||
|
||||
The key is available from [keyserver.ubuntu.com](https://keyserver.ubuntu.com/pks/lookup?search=0xF9887BEA027A049FAE8E5CAAD1258932BE24C5CA&fingerprint=on&op=index).
|
||||
|
||||
@@ -14,7 +14,7 @@ Grafana Labs will send you a response indicating the next steps in handling your
|
||||
|
||||
## Security announcements
|
||||
|
||||
We maintain a category on the community site called [Security Announcements](https://community.grafana.com/c/support/security-announcements),
|
||||
where we will post a summary, remediation, and mitigation details for any patch containing security fixes.
|
||||
We maintain a category on the community site called [Security Announcements](https://community.grafana.com/c/security-announcements),
|
||||
where we will post a summary, remediation, and mitigation details for any patch containing security fixes.
|
||||
|
||||
You can also subscribe to email updates to this category if you have a grafana.com account and sign on to the community site or track updates via an [RSS feed](https://community.grafana.com/c/support/security-announcements.rss).
|
||||
You can also subscribe to email updates to this category if you have a grafana.com account and sign on to the community site or track updates via an [RSS feed](https://community.grafana.com/c/security-announcements.rss).
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
# Get Grafana help
|
||||
|
||||
---
|
||||
|
||||
------------------
|
||||
First, check the official [Grafana documentation](https://grafana.com/docs/).
|
||||
|
||||
If you require further help or support then ask a question in the [Grafana community site](https://community.grafana.com/) or [Grafana Slack](http://slack.raintank.io/). You can also search the community site for previously answered questions, in case someone already had your problem and got help.
|
||||
|
||||
**Please note:**
|
||||
|
||||
**Please note:**
|
||||
- The Grafana project uses GitHub mainly for tracking bugs and feature requests.
|
||||
- Do not open an issue just to ask a question. The issue will be closed immediately.
|
||||
- Only submit issues for bug reports, feature requests, or enhancements.
|
||||
|
||||
@@ -15,7 +15,6 @@ Upgrading Go or Node.js requires making changes in many different files. See bel
|
||||
- `grafana/build-container`
|
||||
- Appveyor
|
||||
- Dockerfile
|
||||
- `.github/workflows/publish.yml`
|
||||
|
||||
## Go dependencies
|
||||
|
||||
|
||||
10
WORKFLOW.md
10
WORKFLOW.md
@@ -13,7 +13,6 @@ Team members and their access to repositories is maintained through [GitHub team
|
||||
## Proposing changes
|
||||
|
||||
Examples of proposed changes are overarching architecture, component design, and specific code or graphical elements. Proposed changes SHOULD cover the big picture and intention, but individual parts SHOULD be split into the smallest possible changes. Changes SHOULD be based on and target the main branch. Depending on size of the proposed change, each change SHOULD be discussed, in increasing order of change size and complexity:
|
||||
|
||||
- Directly in a RR (Pull Request) - this MAY be done, but SHOULD not be the common case.
|
||||
- Issue
|
||||
- Developer mailing list
|
||||
@@ -25,7 +24,6 @@ Significant changes MUST be discussed and agreed upon with the relevant subsyste
|
||||
|
||||
Depending on the size and complexity of a PR, different requirements MUST be applied. Any team member contributing substantially to a PR MUST NOT count against review requirements.
|
||||
Commits MUST be merged into main using PRs. They MUST NOT be merged into main directly.
|
||||
|
||||
- Every merge MUST be approved by at least one team member.
|
||||
- Non-trivial changes MUST be approved by at least
|
||||
- two team members, or
|
||||
@@ -35,7 +33,6 @@ Commits MUST be merged into main using PRs. They MUST NOT be merged into main di
|
||||
- the relevant subsystem maintainer.
|
||||
|
||||
PRs MUST be [reviewed](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/reviewing-changes-in-pull-requests) and [approved](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/approving-a-pull-request-with-required-reviews) via GitHub’s review system.
|
||||
|
||||
- Reviewers MAY write comments if approving
|
||||
- Reviewers MUST write comments if rejecting a PR or if requesting changes.
|
||||
|
||||
@@ -43,9 +40,9 @@ Once a PR is approved as per above, any team member MAY merge the PR.
|
||||
|
||||
## Backporting a PR
|
||||
|
||||
PRs intended for inclusion in the next PATCH release they must be backported to the release branch. The bot can do this automatically. [Read more on backport PRs](https://github.com/grafana/grafana/blob/main/.github/bot.md). Both the source PR and the backport PR should be assigned to the patch release milestone, unless you are backporting to many releases then it can differ.
|
||||
PRs intended for inclusion in the next PATCH release they must be backported to the release branch. The bot can do this automatically. [Read more on backport PRs](https://github.com/grafana/grafana/blob/main/.github/bot.md). Both the source PR and the backport PR should be assigned to the patch release milestone, unless you are backporting to many releases then it can differ.
|
||||
|
||||
Backport PRs are also needed during the beta period to get fixes into the stable release.
|
||||
Backport PRs are also needed during the beta period to get fixes into the stable release.
|
||||
|
||||
# Release workflow
|
||||
|
||||
@@ -54,7 +51,6 @@ Backport PRs are also needed during the beta period to get fixes into the stable
|
||||
Grafana uses trunk-based development.
|
||||
|
||||
In particular, we found that the following principles match how we work:
|
||||
|
||||
- Main and release branches MUST always build without failure.
|
||||
- Branches SHOULD be merged often. Larger changes SHOULD be activated with feature flags until they are ready. Long-lived development branches SHOULD be avoided.
|
||||
- Changes MAY be enabled by default once they are in a complete state
|
||||
@@ -65,7 +61,6 @@ In particular, we found that the following principles match how we work:
|
||||
Releases MUST follow [Semantic Versioning](https://semver.org/) in naming and SHOULD follow Semantic Versioning as closely as reasonably possible for non-library software.
|
||||
|
||||
Release branches MUST be split from the following branches.
|
||||
|
||||
- MAJOR release branches MUST be based on main.
|
||||
- MINOR release branches MUST be based on main.
|
||||
- PATCH release branches MUST be split from the relevant MINOR release branch’s most current PATCH
|
||||
@@ -73,7 +68,6 @@ Release branches MUST be split from the following branches.
|
||||
Security releases follow the same process but MUST be prepared in secret. Security releases MUST NOT include changes which are not related to the security fix. Normal release processes MUST accommodate the security release process. SECURITY.md MUST be followed.
|
||||
|
||||
Releases follow the following cadence
|
||||
|
||||
- MAJOR: Yearly
|
||||
- MINOR: Every 4-6 weeks
|
||||
- PATCH: As needed
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
{
|
||||
"babelrc": false,
|
||||
// Note: order is bottom-to-top and/or right-to-left
|
||||
"presets": [
|
||||
[
|
||||
"@babel/preset-env",
|
||||
{
|
||||
"bugfixes": true,
|
||||
"browserslistEnv": "dev",
|
||||
"useBuiltIns": "entry",
|
||||
"corejs": "3.10"
|
||||
}
|
||||
],
|
||||
[
|
||||
"@babel/preset-typescript",
|
||||
{
|
||||
"allowNamespaces": true,
|
||||
"allowDeclareFields": true
|
||||
}
|
||||
],
|
||||
[
|
||||
"@babel/preset-react",
|
||||
{
|
||||
"runtime": "automatic"
|
||||
}
|
||||
]
|
||||
],
|
||||
"plugins": [
|
||||
[
|
||||
"@babel/plugin-transform-typescript",
|
||||
{
|
||||
"allowNamespaces": true,
|
||||
"allowDeclareFields": true
|
||||
}
|
||||
],
|
||||
// added to mitigate https://github.com/babel/babel/issues/14289
|
||||
// package (and following line) can be removed once the issue is fixed and released
|
||||
"@babel/plugin-proposal-class-properties",
|
||||
["@babel/plugin-proposal-object-rest-spread", { "loose": true }],
|
||||
"@babel/plugin-transform-react-constant-elements",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator",
|
||||
"@babel/plugin-proposal-optional-chaining",
|
||||
"@babel/plugin-syntax-dynamic-import", // needed for `() => import()` in routes.ts
|
||||
"angularjs-annotate",
|
||||
"macros"
|
||||
],
|
||||
"env": {
|
||||
"production": {
|
||||
"presets": [
|
||||
[
|
||||
"@babel/preset-env",
|
||||
{
|
||||
"browserslistEnv": "production"
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"hot": {
|
||||
"plugins": ["react-refresh/babel"]
|
||||
}
|
||||
}
|
||||
}
|
||||
466
build.go
466
build.go
@@ -3,14 +3,476 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
"github.com/grafana/grafana/pkg/build"
|
||||
const (
|
||||
windows = "windows"
|
||||
linux = "linux"
|
||||
)
|
||||
|
||||
var (
|
||||
//versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`)
|
||||
goarch string
|
||||
goos string
|
||||
gocc string
|
||||
cgo bool
|
||||
libc string
|
||||
pkgArch string
|
||||
version string = "v1"
|
||||
buildTags []string
|
||||
// deb & rpm does not support semver so have to handle their version a little differently
|
||||
linuxPackageVersion string = "v1"
|
||||
linuxPackageIteration string = ""
|
||||
race bool
|
||||
workingDir string
|
||||
includeBuildId bool = true
|
||||
buildId string = "0"
|
||||
serverBinary string = "grafana-server"
|
||||
cliBinary string = "grafana-cli"
|
||||
binaries []string = []string{serverBinary, cliBinary}
|
||||
isDev bool = false
|
||||
enterprise bool = false
|
||||
skipRpmGen bool = false
|
||||
skipDebGen bool = false
|
||||
printGenVersion bool = false
|
||||
)
|
||||
|
||||
func main() {
|
||||
log.SetOutput(os.Stdout)
|
||||
log.SetFlags(0)
|
||||
os.Exit(build.RunCmd())
|
||||
|
||||
var buildIdRaw string
|
||||
var buildTagsRaw string
|
||||
|
||||
flag.StringVar(&goarch, "goarch", runtime.GOARCH, "GOARCH")
|
||||
flag.StringVar(&goos, "goos", runtime.GOOS, "GOOS")
|
||||
flag.StringVar(&gocc, "cc", "", "CC")
|
||||
flag.StringVar(&libc, "libc", "", "LIBC")
|
||||
flag.StringVar(&buildTagsRaw, "build-tags", "", "Sets custom build tags")
|
||||
flag.BoolVar(&cgo, "cgo-enabled", cgo, "Enable cgo")
|
||||
flag.StringVar(&pkgArch, "pkg-arch", "", "PKG ARCH")
|
||||
flag.BoolVar(&race, "race", race, "Use race detector")
|
||||
flag.BoolVar(&includeBuildId, "includeBuildId", includeBuildId, "IncludeBuildId in package name")
|
||||
flag.BoolVar(&enterprise, "enterprise", enterprise, "Build enterprise version of Grafana")
|
||||
flag.StringVar(&buildIdRaw, "buildId", "0", "Build ID from CI system")
|
||||
flag.BoolVar(&isDev, "dev", isDev, "optimal for development, skips certain steps")
|
||||
flag.BoolVar(&skipRpmGen, "skipRpm", skipRpmGen, "skip rpm package generation (default: false)")
|
||||
flag.BoolVar(&skipDebGen, "skipDeb", skipDebGen, "skip deb package generation (default: false)")
|
||||
flag.BoolVar(&printGenVersion, "gen-version", printGenVersion, "generate Grafana version and output (default: false)")
|
||||
flag.Parse()
|
||||
|
||||
buildId = shortenBuildId(buildIdRaw)
|
||||
|
||||
readVersionFromPackageJson()
|
||||
|
||||
if pkgArch == "" {
|
||||
pkgArch = goarch
|
||||
}
|
||||
|
||||
if printGenVersion {
|
||||
printGeneratedVersion()
|
||||
return
|
||||
}
|
||||
|
||||
if len(buildTagsRaw) > 0 {
|
||||
buildTags = strings.Split(buildTagsRaw, ",")
|
||||
}
|
||||
|
||||
log.Printf("Version: %s, Linux Version: %s, Package Iteration: %s\n", version, linuxPackageVersion, linuxPackageIteration)
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
log.Println("Usage: go run build.go build")
|
||||
return
|
||||
}
|
||||
|
||||
workingDir, _ = os.Getwd()
|
||||
|
||||
for _, cmd := range flag.Args() {
|
||||
switch cmd {
|
||||
case "setup":
|
||||
setup()
|
||||
|
||||
case "build-srv", "build-server":
|
||||
clean()
|
||||
doBuild("grafana-server", "./pkg/cmd/grafana-server", buildTags)
|
||||
|
||||
case "build-cli":
|
||||
clean()
|
||||
doBuild("grafana-cli", "./pkg/cmd/grafana-cli", buildTags)
|
||||
|
||||
case "build":
|
||||
//clean()
|
||||
for _, binary := range binaries {
|
||||
doBuild(binary, "./pkg/cmd/"+binary, buildTags)
|
||||
}
|
||||
|
||||
case "build-frontend":
|
||||
yarn("build")
|
||||
|
||||
case "sha-dist":
|
||||
shaFilesInDist()
|
||||
|
||||
case "latest":
|
||||
makeLatestDistCopies()
|
||||
|
||||
case "clean":
|
||||
clean()
|
||||
|
||||
default:
|
||||
log.Fatalf("Unknown command %q", cmd)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func makeLatestDistCopies() {
|
||||
files, err := ioutil.ReadDir("dist")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to create latest copies. Cannot read from /dist")
|
||||
}
|
||||
|
||||
latestMapping := map[string]string{
|
||||
"_amd64.deb": "dist/grafana_latest_amd64.deb",
|
||||
".x86_64.rpm": "dist/grafana-latest-1.x86_64.rpm",
|
||||
".linux-amd64.tar.gz": "dist/grafana-latest.linux-x64.tar.gz",
|
||||
".linux-amd64-musl.tar.gz": "dist/grafana-latest.linux-x64-musl.tar.gz",
|
||||
".linux-armv7.tar.gz": "dist/grafana-latest.linux-armv7.tar.gz",
|
||||
".linux-armv7-musl.tar.gz": "dist/grafana-latest.linux-armv7-musl.tar.gz",
|
||||
".linux-armv6.tar.gz": "dist/grafana-latest.linux-armv6.tar.gz",
|
||||
".linux-arm64.tar.gz": "dist/grafana-latest.linux-arm64.tar.gz",
|
||||
".linux-arm64-musl.tar.gz": "dist/grafana-latest.linux-arm64-musl.tar.gz",
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
for extension, fullName := range latestMapping {
|
||||
if strings.HasSuffix(file.Name(), extension) {
|
||||
runError("cp", path.Join("dist", file.Name()), fullName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func readVersionFromPackageJson() {
|
||||
reader, err := os.Open("package.json")
|
||||
if err != nil {
|
||||
log.Fatal("Failed to open package.json")
|
||||
return
|
||||
}
|
||||
defer reader.Close()
|
||||
|
||||
jsonObj := map[string]interface{}{}
|
||||
jsonParser := json.NewDecoder(reader)
|
||||
|
||||
if err := jsonParser.Decode(&jsonObj); err != nil {
|
||||
log.Fatal("Failed to decode package.json")
|
||||
}
|
||||
|
||||
version = jsonObj["version"].(string)
|
||||
linuxPackageVersion = version
|
||||
linuxPackageIteration = ""
|
||||
|
||||
// handle pre version stuff (deb / rpm does not support semver)
|
||||
parts := strings.Split(version, "-")
|
||||
|
||||
if len(parts) > 1 {
|
||||
linuxPackageVersion = parts[0]
|
||||
linuxPackageIteration = parts[1]
|
||||
}
|
||||
|
||||
// add timestamp to iteration
|
||||
if includeBuildId {
|
||||
if buildId != "0" {
|
||||
linuxPackageIteration = fmt.Sprintf("%s%s", buildId, linuxPackageIteration)
|
||||
} else {
|
||||
linuxPackageIteration = fmt.Sprintf("%d%s", time.Now().Unix(), linuxPackageIteration)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func yarn(params ...string) {
|
||||
runPrint(`yarn run`, params...)
|
||||
}
|
||||
|
||||
func genPackageVersion() string {
|
||||
if includeBuildId {
|
||||
return fmt.Sprintf("%v-%v", linuxPackageVersion, linuxPackageIteration)
|
||||
} else {
|
||||
return version
|
||||
}
|
||||
}
|
||||
|
||||
func setup() {
|
||||
args := []string{"install", "-v"}
|
||||
if goos == windows {
|
||||
args = append(args, "-buildmode=exe")
|
||||
}
|
||||
args = append(args, "./pkg/cmd/grafana-server")
|
||||
runPrint("go", args...)
|
||||
}
|
||||
|
||||
func printGeneratedVersion() {
|
||||
fmt.Print(genPackageVersion())
|
||||
}
|
||||
|
||||
func test(pkg string) {
|
||||
setBuildEnv()
|
||||
args := []string{"test", "-short", "-timeout", "60s"}
|
||||
if goos == windows {
|
||||
args = append(args, "-buildmode=exe")
|
||||
}
|
||||
args = append(args, pkg)
|
||||
runPrint("go", args...)
|
||||
}
|
||||
|
||||
func doBuild(binaryName, pkg string, tags []string) {
|
||||
libcPart := ""
|
||||
if libc != "" {
|
||||
libcPart = fmt.Sprintf("-%s", libc)
|
||||
}
|
||||
binary := fmt.Sprintf("./bin/%s-%s%s/%s", goos, goarch, libcPart, binaryName)
|
||||
if isDev {
|
||||
//don't include os/arch/libc in output path in dev environment
|
||||
binary = fmt.Sprintf("./bin/%s", binaryName)
|
||||
}
|
||||
|
||||
if goos == windows {
|
||||
binary += ".exe"
|
||||
}
|
||||
|
||||
if !isDev {
|
||||
rmr(binary, binary+".md5")
|
||||
}
|
||||
args := []string{"build", "-ldflags", ldflags()}
|
||||
if goos == windows {
|
||||
// Work around a linking error on Windows: "export ordinal too large"
|
||||
args = append(args, "-buildmode=exe")
|
||||
}
|
||||
if len(tags) > 0 {
|
||||
args = append(args, "-tags", strings.Join(tags, ","))
|
||||
}
|
||||
if race {
|
||||
args = append(args, "-race")
|
||||
}
|
||||
|
||||
args = append(args, "-o", binary)
|
||||
args = append(args, pkg)
|
||||
|
||||
if !isDev {
|
||||
setBuildEnv()
|
||||
runPrint("go", "version")
|
||||
libcPart := ""
|
||||
if libc != "" {
|
||||
libcPart = fmt.Sprintf("/%s", libc)
|
||||
}
|
||||
fmt.Printf("Targeting %s/%s%s\n", goos, goarch, libcPart)
|
||||
}
|
||||
|
||||
runPrint("go", args...)
|
||||
|
||||
if !isDev {
|
||||
// Create an md5 checksum of the binary, to be included in the archive for
|
||||
// automatic upgrades.
|
||||
err := md5File(binary)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func ldflags() string {
|
||||
var b bytes.Buffer
|
||||
b.WriteString("-w")
|
||||
b.WriteString(fmt.Sprintf(" -X main.version=%s", version))
|
||||
b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha()))
|
||||
b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp()))
|
||||
b.WriteString(fmt.Sprintf(" -X main.buildBranch=%s", getGitBranch()))
|
||||
if v := os.Getenv("LDFLAGS"); v != "" {
|
||||
b.WriteString(fmt.Sprintf(" -extldflags \"%s\"", v))
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func rmr(paths ...string) {
|
||||
for _, path := range paths {
|
||||
log.Println("rm -r", path)
|
||||
os.RemoveAll(path)
|
||||
}
|
||||
}
|
||||
|
||||
func clean() {
|
||||
if isDev {
|
||||
return
|
||||
}
|
||||
|
||||
rmr("dist")
|
||||
rmr("tmp")
|
||||
rmr(filepath.Join(build.Default.GOPATH, fmt.Sprintf("pkg/%s_%s/github.com/grafana", goos, goarch)))
|
||||
}
|
||||
|
||||
func setBuildEnv() {
|
||||
os.Setenv("GOOS", goos)
|
||||
if goos == windows {
|
||||
// require windows >=7
|
||||
os.Setenv("CGO_CFLAGS", "-D_WIN32_WINNT=0x0601")
|
||||
}
|
||||
if goarch != "amd64" || goos != linux {
|
||||
// needed for all other archs
|
||||
cgo = true
|
||||
}
|
||||
if strings.HasPrefix(goarch, "armv") {
|
||||
os.Setenv("GOARCH", "arm")
|
||||
os.Setenv("GOARM", goarch[4:])
|
||||
} else {
|
||||
os.Setenv("GOARCH", goarch)
|
||||
}
|
||||
if cgo {
|
||||
os.Setenv("CGO_ENABLED", "1")
|
||||
}
|
||||
if gocc != "" {
|
||||
os.Setenv("CC", gocc)
|
||||
}
|
||||
}
|
||||
|
||||
func getGitBranch() string {
|
||||
v, err := runError("git", "rev-parse", "--abbrev-ref", "HEAD")
|
||||
if err != nil {
|
||||
return "main"
|
||||
}
|
||||
return string(v)
|
||||
}
|
||||
|
||||
func getGitSha() string {
|
||||
v, err := runError("git", "rev-parse", "--short", "HEAD")
|
||||
if err != nil {
|
||||
return "unknown-dev"
|
||||
}
|
||||
return string(v)
|
||||
}
|
||||
|
||||
func buildStamp() int64 {
|
||||
// use SOURCE_DATE_EPOCH if set.
|
||||
if s, _ := strconv.ParseInt(os.Getenv("SOURCE_DATE_EPOCH"), 10, 64); s > 0 {
|
||||
return s
|
||||
}
|
||||
|
||||
bs, err := runError("git", "show", "-s", "--format=%ct")
|
||||
if err != nil {
|
||||
return time.Now().Unix()
|
||||
}
|
||||
s, _ := strconv.ParseInt(string(bs), 10, 64)
|
||||
return s
|
||||
}
|
||||
|
||||
func runError(cmd string, args ...string) ([]byte, error) {
|
||||
ecmd := exec.Command(cmd, args...)
|
||||
bs, err := ecmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return bytes.TrimSpace(bs), nil
|
||||
}
|
||||
|
||||
func runPrint(cmd string, args ...string) {
|
||||
log.Println(cmd, strings.Join(args, " "))
|
||||
ecmd := exec.Command(cmd, args...)
|
||||
ecmd.Stdout = os.Stdout
|
||||
ecmd.Stderr = os.Stderr
|
||||
err := ecmd.Run()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func md5File(file string) error {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fd.Close()
|
||||
|
||||
h := md5.New()
|
||||
_, err = io.Copy(h, fd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
out, err := os.Create(file + ".md5")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = fmt.Fprintf(out, "%x\n", h.Sum(nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return out.Close()
|
||||
}
|
||||
|
||||
func shaFilesInDist() {
|
||||
filepath.Walk("./dist", func(path string, f os.FileInfo, err error) error {
|
||||
if path == "./dist" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !strings.Contains(path, ".sha256") {
|
||||
err := shaFile(path)
|
||||
if err != nil {
|
||||
log.Printf("Failed to create sha file. error: %v\n", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func shaFile(file string) error {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fd.Close()
|
||||
|
||||
h := sha256.New()
|
||||
_, err = io.Copy(h, fd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
out, err := os.Create(file + ".sha256")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = fmt.Fprintf(out, "%x\n", h.Sum(nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return out.Close()
|
||||
}
|
||||
|
||||
func shortenBuildId(buildId string) string {
|
||||
buildId = strings.Replace(buildId, "-", "", -1)
|
||||
if len(buildId) < 9 {
|
||||
return buildId
|
||||
}
|
||||
return buildId[0:8]
|
||||
}
|
||||
|
||||
@@ -9,9 +9,6 @@ app_mode = production
|
||||
# instance name, defaults to HOSTNAME environment variable value or hostname if HOSTNAME var is empty
|
||||
instance_name = ${HOSTNAME}
|
||||
|
||||
# force migration will run migrations that might cause dataloss
|
||||
force_migration = false
|
||||
|
||||
#################################### Paths ###############################
|
||||
[paths]
|
||||
# Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used)
|
||||
@@ -125,9 +122,6 @@ path = grafana.db
|
||||
# For "sqlite3" only. cache mode setting used for connecting to the database
|
||||
cache_mode = private
|
||||
|
||||
# For "mysql" only if migrationLocking feature toggle is set. How many seconds to wait before failing to lock the database for the migrations, default is 0.
|
||||
locking_attempt_timeout_sec = 0
|
||||
|
||||
#################################### Cache server #############################
|
||||
[remote_cache]
|
||||
# Either "redis", "memcached" or "database" default is "database"
|
||||
@@ -178,12 +172,6 @@ idle_conn_timeout_seconds = 90
|
||||
# If enabled and user is not anonymous, data proxy will add X-Grafana-User header with username into the request.
|
||||
send_user_header = false
|
||||
|
||||
# Limit the amount of bytes that will be read/accepted from responses of outgoing HTTP requests.
|
||||
response_limit = 0
|
||||
|
||||
# Limits the number of rows that Grafana will process from SQL data sources.
|
||||
row_limit = 1000000
|
||||
|
||||
#################################### Analytics ###########################
|
||||
[analytics]
|
||||
# Server reporting, sends usage counters to stats.grafana.org every 24 hours.
|
||||
@@ -196,19 +184,12 @@ reporting_enabled = true
|
||||
reporting_distributor = grafana-labs
|
||||
|
||||
# Set to false to disable all checks to https://grafana.com
|
||||
# for new versions of grafana. The check is used
|
||||
# in some UI views to notify that a grafana update exists.
|
||||
# for new versions (grafana itself and plugins), check is used
|
||||
# in some UI views to notify that grafana or plugin update exists
|
||||
# This option does not cause any auto updates, nor send any information
|
||||
# only a GET request to https://raw.githubusercontent.com/grafana/grafana/main/latest.json to get the latest version.
|
||||
# only a GET request to https://grafana.com to get latest versions
|
||||
check_for_updates = true
|
||||
|
||||
# Set to false to disable all checks to https://grafana.com
|
||||
# for new versions of plugins. The check is used
|
||||
# in some UI views to notify that a plugin update exists.
|
||||
# This option does not cause any auto updates, nor send any information
|
||||
# only a GET request to https://grafana.com to get the latest versions.
|
||||
check_for_plugin_updates = true
|
||||
|
||||
# Google Analytics universal tracking code, only enabled if you specify an id here
|
||||
google_analytics_ua_id =
|
||||
|
||||
@@ -221,21 +202,6 @@ rudderstack_write_key =
|
||||
# Rudderstack data plane url, enabled only if rudderstack_write_key is also set
|
||||
rudderstack_data_plane_url =
|
||||
|
||||
# Rudderstack SDK url, optional, only valid if rudderstack_write_key and rudderstack_data_plane_url is also set
|
||||
rudderstack_sdk_url =
|
||||
|
||||
# Rudderstack Config url, optional, used by Rudderstack SDK to fetch source config
|
||||
rudderstack_config_url =
|
||||
|
||||
# Application Insights connection string. Specify an URL string to enable this feature.
|
||||
application_insights_connection_string =
|
||||
|
||||
# Optional. Specifies an Application Insights endpoint URL where the endpoint string is wrapped in backticks ``.
|
||||
application_insights_endpoint_url =
|
||||
|
||||
# Controls if the UI contains any links to user feedback forms
|
||||
feedback_links_enabled = true
|
||||
|
||||
#################################### Security ############################
|
||||
[security]
|
||||
# disable creation of admin user on first start of grafana
|
||||
@@ -250,12 +216,6 @@ admin_password = admin
|
||||
# used for signing
|
||||
secret_key = SW2YcwTIb9zpOOhoPsMm
|
||||
|
||||
# current key provider used for envelope encryption, default to static value specified by secret_key
|
||||
encryption_provider = secretKey.v1
|
||||
|
||||
# list of configured key providers, space separated (Enterprise only): e.g., awskms.v1 azurekv.v1
|
||||
available_encryption_providers =
|
||||
|
||||
# disable gravatar profile images
|
||||
disable_gravatar = false
|
||||
|
||||
@@ -275,6 +235,7 @@ cookie_samesite = lax
|
||||
allow_embedding = false
|
||||
|
||||
# Set to true if you want to enable http strict transport security (HSTS) response header.
|
||||
# This is only sent when HTTPS is enabled in this configuration.
|
||||
# HSTS tells browsers that the site should only be accessed using HTTPS.
|
||||
strict_transport_security = false
|
||||
|
||||
@@ -305,24 +266,12 @@ content_security_policy = false
|
||||
# $ROOT_PATH is server.root_url without the protocol.
|
||||
content_security_policy_template = """script-src 'self' 'unsafe-eval' 'unsafe-inline' 'strict-dynamic' $NONCE;object-src 'none';font-src 'self';style-src 'self' 'unsafe-inline' blob:;img-src * data:;base-uri 'self';connect-src 'self' grafana.com ws://$ROOT_PATH wss://$ROOT_PATH;manifest-src 'self';media-src 'none';form-action 'self';"""
|
||||
|
||||
# Controls if old angular plugins are supported or not. This will be disabled by default in future release
|
||||
angular_support_enabled = true
|
||||
|
||||
[security.encryption]
|
||||
# Defines the time-to-live (TTL) for decrypted data encryption keys stored in memory (cache).
|
||||
# Please note that small values may cause performance issues due to a high frequency decryption operations.
|
||||
data_keys_cache_ttl = 15m
|
||||
|
||||
# Defines the frequency of data encryption keys cache cleanup interval.
|
||||
# On every interval, decrypted data encryption keys that reached the TTL are removed from the cache.
|
||||
data_keys_cache_cleanup_interval = 1m
|
||||
|
||||
#################################### Snapshots ###########################
|
||||
[snapshots]
|
||||
# snapshot sharing options
|
||||
external_enabled = true
|
||||
external_snapshot_url = https://snapshots.raintank.io
|
||||
external_snapshot_name = Publish to snapshots.raintank.io
|
||||
external_snapshot_url = https://snapshots-origin.raintank.io
|
||||
external_snapshot_name = Publish to snapshot.raintank.io
|
||||
|
||||
# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
|
||||
# creating and deleting snapshots.
|
||||
@@ -425,18 +374,12 @@ oauth_auto_login = false
|
||||
# OAuth state max age cookie duration in seconds. Defaults to 600 seconds.
|
||||
oauth_state_cookie_max_age = 600
|
||||
|
||||
# Skip forced assignment of OrgID 1 or 'auto_assign_org_id' for social logins
|
||||
oauth_skip_org_role_update_sync = false
|
||||
|
||||
# limit of api_key seconds to live before expiration
|
||||
api_key_max_seconds_to_live = -1
|
||||
|
||||
# Set to true to enable SigV4 authentication option for HTTP-based datasources
|
||||
sigv4_auth_enabled = false
|
||||
|
||||
# Set to true to enable verbose logging of SigV4 request signing
|
||||
sigv4_verbose_logging = false
|
||||
|
||||
#################################### Anonymous Auth ######################
|
||||
[auth.anonymous]
|
||||
# enable anonymous access
|
||||
@@ -477,8 +420,6 @@ token_url = https://gitlab.com/oauth/token
|
||||
api_url = https://gitlab.com/api/v4
|
||||
allowed_domains =
|
||||
allowed_groups =
|
||||
role_attribute_path =
|
||||
role_attribute_strict = false
|
||||
|
||||
#################################### Google Auth #########################
|
||||
[auth.google]
|
||||
@@ -523,12 +464,10 @@ auth_url = https://login.microsoftonline.com/<tenant-id>/oauth2/v2.0/authorize
|
||||
token_url = https://login.microsoftonline.com/<tenant-id>/oauth2/v2.0/token
|
||||
allowed_domains =
|
||||
allowed_groups =
|
||||
role_attribute_strict = false
|
||||
|
||||
#################################### Okta OAuth #######################
|
||||
[auth.okta]
|
||||
name = Okta
|
||||
icon = okta
|
||||
enabled = false
|
||||
allow_sign_up = true
|
||||
client_id = some_id
|
||||
@@ -545,7 +484,6 @@ role_attribute_strict = false
|
||||
#################################### Generic OAuth #######################
|
||||
[auth.generic_oauth]
|
||||
name = OAuth
|
||||
icon = signin
|
||||
enabled = false
|
||||
allow_sign_up = true
|
||||
client_id = some_id
|
||||
@@ -560,11 +498,9 @@ role_attribute_path =
|
||||
role_attribute_strict = false
|
||||
groups_attribute_path =
|
||||
id_token_attribute_name =
|
||||
team_ids_attribute_path =
|
||||
auth_url =
|
||||
token_url =
|
||||
api_url =
|
||||
teams_url =
|
||||
allowed_domains =
|
||||
team_ids =
|
||||
allowed_organizations =
|
||||
@@ -572,7 +508,6 @@ tls_skip_verify_insecure = false
|
||||
tls_client_cert =
|
||||
tls_client_key =
|
||||
tls_client_ca =
|
||||
use_pkce = false
|
||||
|
||||
#################################### Basic Auth ##########################
|
||||
[auth.basic]
|
||||
@@ -584,10 +519,11 @@ enabled = false
|
||||
header_name = X-WEBAUTH-USER
|
||||
header_property = username
|
||||
auto_sign_up = true
|
||||
# Deprecated, use sync_ttl instead
|
||||
ldap_sync_ttl = 60
|
||||
sync_ttl = 60
|
||||
whitelist =
|
||||
headers =
|
||||
headers_encoded = false
|
||||
enable_login_token = false
|
||||
|
||||
#################################### Auth JWT ##########################
|
||||
@@ -601,7 +537,6 @@ jwk_set_file =
|
||||
cache_ttl = 60m
|
||||
expected_claims = {}
|
||||
key_file =
|
||||
auto_sign_up = false
|
||||
|
||||
#################################### Auth LDAP ###########################
|
||||
[auth.ldap]
|
||||
@@ -611,7 +546,7 @@ allow_sign_up = true
|
||||
|
||||
# LDAP background sync (Enterprise only)
|
||||
# At 1 am every day
|
||||
sync_cron = "0 1 * * *"
|
||||
sync_cron = "0 0 1 * * *"
|
||||
active_sync_enabled = true
|
||||
|
||||
#################################### AWS ###########################
|
||||
@@ -643,12 +578,6 @@ managed_identity_enabled = false
|
||||
# Should be set for user-assigned identity and should be empty for system-assigned identity
|
||||
managed_identity_client_id =
|
||||
|
||||
#################################### Role-based Access Control ###########
|
||||
[rbac]
|
||||
enabled = true
|
||||
# If enabled, cache permissions in a in memory cache (Enterprise only)
|
||||
permission_cache = true
|
||||
|
||||
#################################### SMTP / Emailing #####################
|
||||
[smtp]
|
||||
enabled = false
|
||||
@@ -786,83 +715,11 @@ global_session = -1
|
||||
# global limit of alerts
|
||||
global_alert_rule = -1
|
||||
|
||||
#################################### Unified Alerting ####################
|
||||
[unified_alerting]
|
||||
# Enable the Unified Alerting sub-system and interface. When enabled we'll migrate all of your alert rules and notification channels to the new system. New alert rules will be created and your notification channels will be converted into an Alertmanager configuration. Previous data is preserved to enable backwards compatibility but new data is removed when switching. When this configuration section and flag are not defined, the state is defined at runtime. See the documentation for more details.
|
||||
enabled =
|
||||
|
||||
# Comma-separated list of organization IDs for which to disable unified alerting. Only supported if unified alerting is enabled.
|
||||
disabled_orgs =
|
||||
|
||||
# Specify the frequency of polling for admin config changes.
|
||||
# The interval string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
|
||||
admin_config_poll_interval = 60s
|
||||
|
||||
# Specify the frequency of polling for Alertmanager config changes.
|
||||
# The interval string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
|
||||
alertmanager_config_poll_interval = 60s
|
||||
|
||||
# Listen address/hostname and port to receive unified alerting messages for other Grafana instances. The port is used for both TCP and UDP. It is assumed other Grafana instances are also running on the same port.
|
||||
ha_listen_address = "0.0.0.0:9094"
|
||||
|
||||
# Explicit address/hostname and port to advertise other Grafana instances. The port is used for both TCP and UDP.
|
||||
ha_advertise_address = ""
|
||||
|
||||
# Comma-separated list of initial instances (in a format of host:port) that will form the HA cluster. Configuring this setting will enable High Availability mode for alerting.
|
||||
ha_peers = ""
|
||||
|
||||
# Time to wait for an instance to send a notification via the Alertmanager. In HA, each Grafana instance will
|
||||
# be assigned a position (e.g. 0, 1). We then multiply this position with the timeout to indicate how long should
|
||||
# each instance wait before sending the notification to take into account replication lag.
|
||||
# The interval string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
|
||||
ha_peer_timeout = 15s
|
||||
|
||||
# The interval between sending gossip messages. By lowering this value (more frequent) gossip messages are propagated
|
||||
# across cluster more quickly at the expense of increased bandwidth usage.
|
||||
# The interval string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
|
||||
ha_gossip_interval = 200ms
|
||||
|
||||
# The interval between gossip full state syncs. Setting this interval lower (more frequent) will increase convergence speeds
|
||||
# across larger clusters at the expense of increased bandwidth usage.
|
||||
# The interval string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
|
||||
ha_push_pull_interval = 60s
|
||||
|
||||
# Enable or disable alerting rule execution. The alerting UI remains visible. This option has a legacy version in the `[alerting]` section that takes precedence.
|
||||
execute_alerts = true
|
||||
|
||||
# Alert evaluation timeout when fetching data from the datasource. This option has a legacy version in the `[alerting]` section that takes precedence.
|
||||
# The timeout string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
|
||||
evaluation_timeout = 30s
|
||||
|
||||
# Number of times we'll attempt to evaluate an alert rule before giving up on that evaluation. This option has a legacy version in the `[alerting]` section that takes precedence.
|
||||
max_attempts = 3
|
||||
|
||||
# Minimum interval to enforce between rule evaluations. Rules will be adjusted if they are less than this value or if they are not multiple of the scheduler interval (10s). Higher values can help with resource management as we'll schedule fewer evaluations over time. This option has a legacy version in the `[alerting]` section that takes precedence.
|
||||
# The interval string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
|
||||
min_interval = 10s
|
||||
|
||||
[unified_alerting.screenshots]
|
||||
# Enable screenshots in notifications. This option requires the Grafana Image Renderer plugin.
|
||||
# For more information on configuration options, refer to [rendering].
|
||||
capture = false
|
||||
|
||||
# The maximum number of screenshots that can be taken at the same time. This option is different from
|
||||
# concurrent_render_request_limit as max_concurrent_screenshots sets the number of concurrent screenshots
|
||||
# that can be taken at the same time for all firing alerts where as concurrent_render_request_limit sets
|
||||
# the total number of concurrent screenshots across all Grafana services.
|
||||
max_concurrent_screenshots = 5
|
||||
|
||||
# Uploads screenshots to the local Grafana server or remote storage such as Azure, S3 and GCS. Please
|
||||
# see [external_image_storage] for further configuration options. If this option is false then
|
||||
# screenshots will be persisted to disk for up to temp_data_lifetime.
|
||||
upload_external_image_storage = false
|
||||
|
||||
#################################### Alerting ############################
|
||||
[alerting]
|
||||
# Enable the legacy alerting sub-system and interface. If Unified Alerting is already enabled and you try to go back to legacy alerting, all data that is part of Unified Alerting will be deleted. When this configuration section and flag are not defined, the state is defined at runtime. See the documentation for more details.
|
||||
enabled =
|
||||
|
||||
# Makes it possible to turn off alert execution but alerting UI is visible
|
||||
# Disable alerting engine & UI features
|
||||
enabled = true
|
||||
# Makes it possible to turn off alert rule execution but alerting UI is visible
|
||||
execute_alerts = true
|
||||
|
||||
# Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state)
|
||||
@@ -925,30 +782,15 @@ max_annotations_to_keep =
|
||||
# Enable the Explore section
|
||||
enabled = true
|
||||
|
||||
#################################### Help #############################
|
||||
[help]
|
||||
# Enable the Help section
|
||||
enabled = true
|
||||
|
||||
#################################### Profile #############################
|
||||
[profile]
|
||||
# Enable the Profile section
|
||||
enabled = true
|
||||
|
||||
#################################### Query History #############################
|
||||
[query_history]
|
||||
# Enable the Query history
|
||||
enabled = true
|
||||
|
||||
#################################### Internal Grafana Metrics ############
|
||||
# Metrics available at HTTP URL /metrics and /metrics/plugins/:pluginId
|
||||
# Metrics available at HTTP API Url /metrics
|
||||
[metrics]
|
||||
enabled = true
|
||||
interval_seconds = 10
|
||||
# Disable total stats (stat_totals_*) metrics to be generated
|
||||
disable_total_stats = false
|
||||
|
||||
#If both are set, basic auth will be required for the metrics endpoints.
|
||||
#If both are set, basic auth will be required for the metrics endpoint.
|
||||
basic_auth_username =
|
||||
basic_auth_password =
|
||||
|
||||
@@ -972,7 +814,6 @@ url = https://grafana.com
|
||||
url = https://grafana.com
|
||||
|
||||
#################################### Distributed tracing ############
|
||||
# Opentracing is deprecated use opentelemetry instead
|
||||
[tracing.jaeger]
|
||||
# jaeger destination (ex localhost:6831)
|
||||
address =
|
||||
@@ -996,19 +837,6 @@ zipkin_propagation = false
|
||||
# Not disabling is the most common setting when using Zipkin elsewhere in your infrastructure.
|
||||
disable_shared_zipkin_spans = false
|
||||
|
||||
[tracing.opentelemetry.jaeger]
|
||||
# jaeger destination (ex http://localhost:14268/api/traces)
|
||||
address =
|
||||
# Propagation specifies the text map propagation format: w3c, jaeger
|
||||
propagation =
|
||||
|
||||
# This is a configuration for OTLP exporter with GRPC protocol
|
||||
[tracing.opentelemetry.otlp]
|
||||
# otlp destination (ex localhost:4317)
|
||||
address =
|
||||
# Propagation specifies the text map propagation format: w3c, jaeger
|
||||
propagation =
|
||||
|
||||
#################################### External Image Storage ##############
|
||||
[external_image_storage]
|
||||
# Used for uploading images to public servers so they can be included in slack/email messages.
|
||||
@@ -1052,8 +880,6 @@ container_name =
|
||||
server_url =
|
||||
# If the remote HTTP image renderer service runs on a different server than the Grafana server you may have to configure this to a URL where Grafana is reachable, e.g. http://grafana.domain/.
|
||||
callback_url =
|
||||
# An auth token that will be sent to and verified by the renderer. The renderer will deny any request without an auth token matching the one configured on the renderer side.
|
||||
renderer_token = -
|
||||
# Concurrent render request limit affects when the /render HTTP endpoint is used. Rendering many images at the same time can overload the server,
|
||||
# which this setting can help protect against by only allowing a certain amount of concurrent requests.
|
||||
concurrent_render_request_limit = 30
|
||||
@@ -1068,12 +894,10 @@ enable_alpha = false
|
||||
app_tls_skip_verify_insecure = false
|
||||
# Enter a comma-separated list of plugin identifiers to identify plugins to load even if they are unsigned. Plugins with modified signatures are never loaded.
|
||||
allow_loading_unsigned_plugins =
|
||||
# Enable or disable installing / uninstalling / updating plugins directly from within Grafana.
|
||||
plugin_admin_enabled = true
|
||||
# Enable or disable installing plugins directly from within Grafana.
|
||||
plugin_admin_enabled = false
|
||||
plugin_admin_external_manage_enabled = false
|
||||
plugin_catalog_url = https://grafana.com/grafana/plugins/
|
||||
# Enter a comma-separated list of plugin identifiers to hide in the plugin catalog.
|
||||
plugin_catalog_hidden_plugins =
|
||||
|
||||
#################################### Grafana Live ##########################################
|
||||
[live]
|
||||
@@ -1140,14 +964,12 @@ rendering_chrome_bin =
|
||||
# Mode 'reusable' will have one browser instance and will create a new incognito page on each request.
|
||||
rendering_mode =
|
||||
|
||||
# When rendering_mode = clustered, you can instruct how many browsers or incognito pages can execute concurrently. Default is 'browser'
|
||||
# When rendering_mode = clustered you can instruct how many browsers or incognito pages can execute concurrently. Default is 'browser'
|
||||
# and will cluster using browser instances.
|
||||
# Mode 'context' will cluster using incognito pages.
|
||||
rendering_clustering_mode =
|
||||
# When rendering_mode = clustered, you can define the maximum number of browser instances/incognito pages that can execute concurrently. Default is '5'.
|
||||
# When rendering_mode = clustered you can define maximum number of browser instances/incognito pages that can execute concurrently..
|
||||
rendering_clustering_max_concurrency =
|
||||
# When rendering_mode = clustered, you can specify the duration a rendering request can take before it will time out. Default is `30` seconds.
|
||||
rendering_clustering_timeout =
|
||||
|
||||
# Limit the maximum viewport width, height and device scale factor that can be requested.
|
||||
rendering_viewport_max_width =
|
||||
@@ -1163,32 +985,9 @@ grpc_port =
|
||||
license_path =
|
||||
|
||||
[feature_toggles]
|
||||
# there are currently two ways to enable feature toggles in the `grafana.ini`.
|
||||
# you can either pass an array of feature you want to enable to the `enable` field or
|
||||
# configure each toggle by setting the name of the toggle to true/false. Toggles set to true/false
|
||||
# will take precedence over toggles in the `enable` list.
|
||||
|
||||
# enable = feature1,feature2
|
||||
# enable features, separated by spaces
|
||||
enable =
|
||||
|
||||
# The new prometheus visual query builder
|
||||
promQueryBuilder = true
|
||||
|
||||
# The new loki visual query builder
|
||||
lokiQueryBuilder = true
|
||||
|
||||
# Experimental Explore to Dashboard workflow
|
||||
explore2Dashboard = true
|
||||
|
||||
# Experimental Command Palette
|
||||
commandPalette = true
|
||||
|
||||
# Use dynamic labels in CloudWatch datasource
|
||||
cloudWatchDynamicLabels = true
|
||||
|
||||
# feature1 = true
|
||||
# feature2 = false
|
||||
|
||||
[date_formats]
|
||||
# For information on what formatting patterns that are supported https://momentjs.com/docs/#/displaying/
|
||||
|
||||
@@ -1219,22 +1018,3 @@ default_baselayer_config =
|
||||
|
||||
# Enable or disable loading other base map layers
|
||||
enable_custom_baselayers = true
|
||||
|
||||
#################################### Dashboard previews #####################################
|
||||
|
||||
[dashboard_previews.crawler]
|
||||
# Number of dashboards rendered in parallel. Default is 6.
|
||||
thread_count =
|
||||
|
||||
# Timeout passed down to the Image Renderer plugin. It is used in two separate places within a single rendering request:
|
||||
# First during the initial navigation to the dashboard and then when waiting for all the panels to load. Default is 20s.
|
||||
# This setting should be expressed as a duration. Examples: 10s (seconds), 1m (minutes).
|
||||
rendering_timeout =
|
||||
|
||||
# Maximum duration of a single crawl. Default is 1h.
|
||||
# This setting should be expressed as a duration. Examples: 10s (seconds), 1m (minutes).
|
||||
max_crawl_duration =
|
||||
|
||||
# Minimum interval between two subsequent scheduler runs. Default is 12h.
|
||||
# This setting should be expressed as a duration. Examples: 10s (seconds), 1m (minutes).
|
||||
scheduler_interval =
|
||||
|
||||
@@ -1,68 +1,76 @@
|
||||
# ---
|
||||
# # config file version
|
||||
# apiVersion: 2
|
||||
# apiVersion: 1
|
||||
|
||||
# # <list> list of roles to insert/update/delete
|
||||
# roles:
|
||||
# # <string, required> name of the role you want to create or update. Required.
|
||||
# - name: 'custom:users:writer'
|
||||
# # <string> uid of the role. Has to be unique for all orgs.
|
||||
# uid: customuserswriter1
|
||||
# # <string> description of the role, informative purpose only.
|
||||
# description: 'Create, read, write users'
|
||||
# # <int> version of the role, Grafana will update the role when increased.
|
||||
# version: 2
|
||||
# # <int> org id. Defaults to Grafana's default if not specified.
|
||||
# # list of default built-in role assignments that should be removed
|
||||
# removeDefaultAssignments:
|
||||
# # <string>, must be one of the Organization roles (`Viewer`, `Editor`, `Admin`) or `Grafana Admin`
|
||||
# - builtInRole: "Grafana Admin"
|
||||
# # <string>, must be one of the existing fixed roles
|
||||
# fixedRole: "fixed:permissions:admin"
|
||||
|
||||
# # list of default built-in role assignments that should be added back
|
||||
# addDefaultAssignments:
|
||||
# # <string>, must be one of the Organization roles (`Viewer`, `Editor`, `Admin`) or `Grafana Admin`
|
||||
# - builtInRole: "Admin"
|
||||
# # <string>, must be one of the existing fixed roles
|
||||
# fixedRole: "fixed:reporting:admin:read"
|
||||
|
||||
# # list of roles that should be deleted
|
||||
# deleteRoles:
|
||||
# # <string> name of the role you want to create. Required if no uid is set
|
||||
# - name: "custom:reports:editor"
|
||||
# # <string> uid of the role. Required if no name
|
||||
# uid: "customreportseditor1"
|
||||
# # <int> org id. will default to Grafana's default if not specified
|
||||
# orgId: 1
|
||||
# # <list> list of the permissions granted by this role.
|
||||
# permissions:
|
||||
# # <string, required> action allowed.
|
||||
# - action: 'users:read'
|
||||
# #<string> scope it applies to.
|
||||
# scope: 'users:*'
|
||||
# - action: 'users:write'
|
||||
# scope: 'users:*'
|
||||
# - action: 'users:create'
|
||||
# - name: 'custom:global:users:reader'
|
||||
# # <bool> overwrite org id and creates a global role.
|
||||
# global: true
|
||||
# # <string> state of the role. Defaults to 'present'. If 'absent', role will be deleted.
|
||||
# state: 'absent'
|
||||
# # <bool> force deletion revoking all grants of the role.
|
||||
# # <bool> force deletion revoking all grants of the role
|
||||
# force: true
|
||||
# - uid: 'basic_editor'
|
||||
# version: 2
|
||||
# - name: "custom:global:reports:reader"
|
||||
# uid: "customglobalreportsreader1"
|
||||
# # <bool> overwrite org id and removes a global role
|
||||
# global: true
|
||||
# # <list> list of roles to copy permissions from.
|
||||
# from:
|
||||
# - uid: 'basic_editor'
|
||||
# global: true
|
||||
# - name: 'fixed:users:writer'
|
||||
# global: true
|
||||
# # <list> list of the permissions to add/remove on top of the copied ones.
|
||||
# permissions:
|
||||
# - action: 'users:read'
|
||||
# scope: 'users:*'
|
||||
# - action: 'users:write'
|
||||
# scope: 'users:*'
|
||||
# # <string> state of the permission. Defaults to 'present'. If 'absent', the permission will be removed.
|
||||
# state: absent
|
||||
# force: true
|
||||
|
||||
# # <list> list role assignments to teams to create or remove.
|
||||
# teams:
|
||||
# # <string, required> name of the team you want to assign roles to. Required.
|
||||
# - name: 'Users writers'
|
||||
# # <int> org id. Will default to Grafana's default if not specified.
|
||||
# orgId: 1
|
||||
# # <list> list of roles to assign to the team
|
||||
# roles:
|
||||
# # <string> uid of the role you want to assign to the team.
|
||||
# - uid: 'customuserswriter1'
|
||||
# # <int> org id. Will default to Grafana's default if not specified.
|
||||
# orgId: 1
|
||||
# # <string> name of the role you want to assign to the team.
|
||||
# - name: 'fixed:users:writer'
|
||||
# # <bool> overwrite org id to specify the role is global.
|
||||
# # list of roles to insert/update depending on what is available in the database
|
||||
# roles:
|
||||
# # <string, required> name of the role you want to create. Required
|
||||
# - name: "custom:users:editor"
|
||||
# # <string> uid of the role. Has to be unique for all orgs.
|
||||
# uid: customuserseditor1
|
||||
# # <string> description of the role, informative purpose only.
|
||||
# description: "Role for our custom user editors"
|
||||
# # <int> version of the role, Grafana will update the role when increased
|
||||
# version: 2
|
||||
# # <int> org id. will default to Grafana's default if not specified
|
||||
# orgId: 1
|
||||
# # <list> list of the permissions granted by this role
|
||||
# permissions:
|
||||
# # <string, required> action allowed
|
||||
# - action: "users:read"
|
||||
# #<string> scope it applies to
|
||||
# scope: "users:*"
|
||||
# - action: "users:write"
|
||||
# scope: "users:*"
|
||||
# - action: "users:create"
|
||||
# scope: "users:*"
|
||||
# # <list> list of builtIn roles the role should be assigned to
|
||||
# builtInRoles:
|
||||
# # <string, required> name of the builtin role you want to assign the role to
|
||||
# - name: "Editor"
|
||||
# # <int> org id. will default to the role org id
|
||||
# orgId: 1
|
||||
# - name: "custom:global:users:reader"
|
||||
# uid: "customglobalusersreader1"
|
||||
# description: "Global Role for custom user readers"
|
||||
# version: 1
|
||||
# # <bool> overwrite org id and creates a global role
|
||||
# global: true
|
||||
# permissions:
|
||||
# - action: "users:read"
|
||||
# scope: "users:*"
|
||||
# builtInRoles:
|
||||
# - name: "Viewer"
|
||||
# orgId: 1
|
||||
# - name: "Editor"
|
||||
# # <bool> overwrite org id and assign role globally
|
||||
# global: true
|
||||
# # <string> state of the assignment. Defaults to 'present'. If 'absent', the assignment will be revoked.
|
||||
# state: absent
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user