From 434f9195c4aa860826973174eccc0bd573ed8935 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Mon, 14 Sep 2020 18:43:14 +0800 Subject: [PATCH 01/29] misc: Increase ulimit to 65535 for test env (#756) --- .../_shared/Vagrantfile.partial.pubKey.rb | 24 +++++++++++++++++++ .../_shared/Vagrantfile.partial.pubKey.ruby | 12 ---------- etc/manualTestEnv/multiHost/Vagrantfile | 2 +- etc/manualTestEnv/multiReplica/Vagrantfile | 2 +- etc/manualTestEnv/singleHost/Vagrantfile | 2 +- .../singleHostMultiDisk/Vagrantfile | 2 +- 6 files changed, 28 insertions(+), 16 deletions(-) create mode 100644 etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb delete mode 100644 etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.ruby diff --git a/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb b/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb new file mode 100644 index 0000000000..8b7a381637 --- /dev/null +++ b/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb @@ -0,0 +1,24 @@ +Vagrant.configure("2") do |config| + ssh_pub_key = File.readlines("#{File.dirname(__FILE__)}/vagrant_key.pub").first.strip + + config.vm.box = "hashicorp/bionic64" + config.vm.provision "shell", privileged: false, inline: <<-SHELL + sudo apt install -y zsh + sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" + sudo chsh -s /usr/bin/zsh vagrant + + echo #{ssh_pub_key} >> /home/vagrant/.ssh/authorized_keys + SHELL + + config.vm.provision "shell", privileged: true, inline: <<-SHELL + echo "setting ulimit" + sudo echo "fs.file-max = 65535" >> /etc/sysctl.conf + sudo sysctl -p + sudo echo "* hard nofile 65535" >> /etc/security/limits.conf + sudo echo "* soft nofile 65535" >> /etc/security/limits.conf + sudo echo "root hard nofile 65535" >> /etc/security/limits.conf + sudo echo "root hard nofile 65535" >> /etc/security/limits.conf + SHELL +end + +# ulimit ref: https://my.oschina.net/u/914655/blog/3067520 diff --git a/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.ruby b/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.ruby deleted file mode 100644 index 3af2b17570..0000000000 --- a/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.ruby +++ /dev/null @@ -1,12 +0,0 @@ -Vagrant.configure("2") do |config| - ssh_pub_key = File.readlines("#{File.dirname(__FILE__)}/vagrant_key.pub").first.strip - - config.vm.box = "hashicorp/bionic64" - config.vm.provision "shell", privileged: false, inline: <<-SHELL - sudo apt install -y zsh - sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" - sudo chsh -s /usr/bin/zsh vagrant - - echo #{ssh_pub_key} >> /home/vagrant/.ssh/authorized_keys - SHELL -end diff --git a/etc/manualTestEnv/multiHost/Vagrantfile b/etc/manualTestEnv/multiHost/Vagrantfile index 9f4890fdd1..10db798e87 100644 --- a/etc/manualTestEnv/multiHost/Vagrantfile +++ b/etc/manualTestEnv/multiHost/Vagrantfile @@ -1,4 +1,4 @@ -load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.ruby" +load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.rb" Vagrant.configure("2") do |config| config.vm.provider "virtualbox" do |v| diff --git a/etc/manualTestEnv/multiReplica/Vagrantfile b/etc/manualTestEnv/multiReplica/Vagrantfile index 82098283c1..00b3d0945d 100644 --- a/etc/manualTestEnv/multiReplica/Vagrantfile +++ b/etc/manualTestEnv/multiReplica/Vagrantfile @@ -1,4 +1,4 @@ -load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.ruby" +load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.rb" Vagrant.configure("2") do |config| config.vm.provider "virtualbox" do |v| diff --git a/etc/manualTestEnv/singleHost/Vagrantfile b/etc/manualTestEnv/singleHost/Vagrantfile index 77d49ffdea..6bfcc26d49 100644 --- a/etc/manualTestEnv/singleHost/Vagrantfile +++ b/etc/manualTestEnv/singleHost/Vagrantfile @@ -1,4 +1,4 @@ -load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.ruby" +load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.rb" Vagrant.configure("2") do |config| config.vm.provider "virtualbox" do |v| diff --git a/etc/manualTestEnv/singleHostMultiDisk/Vagrantfile b/etc/manualTestEnv/singleHostMultiDisk/Vagrantfile index 971db113cc..b3e910a81d 100644 --- a/etc/manualTestEnv/singleHostMultiDisk/Vagrantfile +++ b/etc/manualTestEnv/singleHostMultiDisk/Vagrantfile @@ -1,4 +1,4 @@ -load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.ruby" +load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.rb" Vagrant.configure("2") do |config| config.vm.provider "virtualbox" do |v| From b3b056df2006e0c37eb57ced0a276dd25f7987d6 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Thu, 17 Sep 2020 13:43:38 +0800 Subject: [PATCH 02/29] test: Fix frontend CI (#752) --- .github/workflows/e2e-test.yaml | 10 +++++++--- scripts/wait_tiup_playground.sh | 22 ++++++++++++++++++++++ ui/dashboardApp/layout/signin/index.tsx | 1 + ui/tests/e2e/login.test.ts | 8 +++++++- 4 files changed, 37 insertions(+), 4 deletions(-) create mode 100755 scripts/wait_tiup_playground.sh diff --git a/.github/workflows/e2e-test.yaml b/.github/workflows/e2e-test.yaml index 4c55365be4..fb1b3f7469 100644 --- a/.github/workflows/e2e-test.yaml +++ b/.github/workflows/e2e-test.yaml @@ -46,8 +46,9 @@ jobs: run: | curl --proto '=https' --tlsv1.2 -sSf https://tiup-mirrors.pingcap.com/install.sh | sh source /home/runner/.profile - tiup update --nightly - tiup playground nightly --tiflash=0 & + tiup update playground + source /home/runner/.profile + tiup playground v4.0.6 --tiflash=0 & - name: Build UI run: | make ui @@ -55,6 +56,10 @@ jobs: NO_MINIMIZE: true CI: true REACT_APP_MIXPANEL_TOKEN: "" + - name: Wait TiUP Playground + run: | + chmod u+x scripts/wait_tiup_playground.sh + scripts/wait_tiup_playground.sh 15 20 - name: Debug TiUP run: | source /home/runner/.profile @@ -62,7 +67,6 @@ jobs: ls /home/runner/.tiup/components/playground/ DATA_PATH=$(ls /home/runner/.tiup/data/) echo $DATA_PATH - tiup playground display echo "==== TiDB Log ====" head -n 3 /home/runner/.tiup/data/$DATA_PATH/tidb-0/tidb.log echo "==== TiKV Log ====" diff --git a/scripts/wait_tiup_playground.sh b/scripts/wait_tiup_playground.sh new file mode 100755 index 0000000000..4c58bbed58 --- /dev/null +++ b/scripts/wait_tiup_playground.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +# Wait unitl `tiup playground` command runs success + +INTERVAL=$1 +MAX_TIMES=$2 + +if ([ -z "${INTERVAL}" ] || [ -z "${MAX_TIMES}" ]); then + echo "Usage: command " + exit 1 +fi + +source /home/runner/.profile + +for ((i=0; i<${MAX_TIMES}; i++)); do + tiup playground display + if [ $? -eq 0 ]; then + exit 0 + fi + sleep ${INTERVAL} +done + +exit 1 diff --git a/ui/dashboardApp/layout/signin/index.tsx b/ui/dashboardApp/layout/signin/index.tsx index d5cf833698..733f9a3f5c 100644 --- a/ui/dashboardApp/layout/signin/index.tsx +++ b/ui/dashboardApp/layout/signin/index.tsx @@ -220,6 +220,7 @@ function TiDBSignInForm({ successRoute, onClickAlternative }) { } disabled /> { await ppExpect(page).toFill('input#tidb_signin_password', 'any') await ppExpect(page).toClick('button#signin_btn') - await ppExpect(page).toMatch('TiDB authentication failed') + + const failReason = await page.waitForSelector( + 'form#tidb_signin div[data-e2e="password"] div:last-child' + ) + const content = await failReason.evaluate((n) => n.innerText) + console.log('fail reason:', content) + expect(content).toContain('TiDB authentication failed') }, 10 * 1000 ) From 7a737493dc70c09a2fcf50ef49899f1286aefc29 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Mon, 21 Sep 2020 18:00:31 +0800 Subject: [PATCH 03/29] ui: fix dayjs i18n (#755) --- ui/lib/utils/i18n.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/ui/lib/utils/i18n.ts b/ui/lib/utils/i18n.ts index 001504d06b..0da831e337 100644 --- a/ui/lib/utils/i18n.ts +++ b/ui/lib/utils/i18n.ts @@ -1,5 +1,4 @@ -import 'dayjs/locale/en' -import 'dayjs/locale/zh-cn' +import 'dayjs/locale/zh' import dayjs from 'dayjs' import i18next from 'i18next' @@ -7,7 +6,6 @@ import LanguageDetector from 'i18next-browser-languagedetector' import { initReactI18next } from 'react-i18next' i18next.on('languageChanged', function (lng) { - console.log('Language', lng) dayjs.locale(lng.toLowerCase()) }) @@ -46,8 +44,8 @@ i18next .use(initReactI18next) .init({ resources: {}, // oh! this line is a big pitfall, we can't remove it, else it will cause strange crash! - fallbackLng: 'en', - whitelist: ['zh', 'en'], + fallbackLng: 'en', // fallbackLng won't change the detected language + whitelist: ['zh', 'en'], // whitelist will change the detected lanuage interpolation: { escapeValue: false, }, From 2bba342d22f90868c5eebca5319ae3c53912a1fe Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Tue, 22 Sep 2020 22:50:37 +0800 Subject: [PATCH 04/29] ui: handle error globally (#757) --- ui/.storybook/preview.js | 2 - ui/dashboardApp/index.ts | 9 +- ui/dashboardApp/layout/main/Sider/Banner.tsx | 4 +- ui/dashboardApp/layout/main/Sider/index.tsx | 28 ++-- ui/dashboardApp/layout/signin/index.tsx | 23 ++- ui/dashboardApp/layout/translations/en.yaml | 14 -- ui/dashboardApp/layout/translations/zh.yaml | 14 -- .../apps/ClusterInfo/components/HostTable.tsx | 5 +- .../ClusterInfo/components/InstanceTable.tsx | 15 +- .../ClusterInfo/components/StoreLocation.tsx | 8 +- ui/lib/apps/Configuration/InlineEditor.tsx | 11 +- ui/lib/apps/Configuration/index.tsx | 9 +- .../Diagnose/components/DiagnoseHistory.tsx | 5 +- ui/lib/apps/Diagnose/pages/DiagnoseStatus.tsx | 11 +- .../apps/InstanceProfiling/pages/Detail.tsx | 10 +- ui/lib/apps/InstanceProfiling/pages/List.tsx | 16 +-- ui/lib/apps/KeyViz/components/KeyViz.tsx | 16 ++- .../KeyViz/components/KeyVizSettingForm.tsx | 65 +++------ ui/lib/apps/KeyViz/heatmap/utils.ts | 4 +- ui/lib/apps/Overview/components/Instances.tsx | 12 +- .../apps/Overview/components/MonitorAlert.tsx | 8 +- ui/lib/apps/QueryEditor/index.tsx | 13 +- .../SearchLogs/components/SearchHeader.tsx | 14 +- .../SearchLogs/components/SearchResult.tsx | 33 +++-- .../apps/SearchLogs/pages/LogSearchDetail.tsx | 5 +- ui/lib/apps/SlowQuery/pages/Detail/index.tsx | 16 ++- ui/lib/apps/SlowQuery/utils/tableColumns.tsx | 13 -- ui/lib/apps/SlowQuery/utils/useSlowQuery.ts | 19 ++- .../Statement/pages/Detail/PlanDetail.tsx | 6 +- ui/lib/apps/Statement/pages/Detail/index.tsx | 9 +- .../pages/List/StatementSettingForm.tsx | 67 ++++----- ui/lib/apps/Statement/utils/useStatement.ts | 47 ++++--- ui/lib/apps/UserProfile/index.tsx | 17 +-- ui/lib/client/index.ts | 21 --- ui/lib/client/index.tsx | 131 ++++++++++++++++++ ui/lib/client/translations/en.yaml | 14 ++ ui/lib/client/translations/zh.yaml | 14 ++ ui/lib/components/ErrorBar/index.tsx | 4 +- ui/lib/components/InstanceSelect/index.tsx | 15 +- ui/lib/components/MetricChart/index.tsx | 6 +- ui/lib/utils/apiClient.ts | 64 --------- ui/lib/utils/useClientRequest.ts | 34 +++-- 42 files changed, 428 insertions(+), 423 deletions(-) delete mode 100644 ui/lib/client/index.ts create mode 100644 ui/lib/client/index.tsx create mode 100644 ui/lib/client/translations/en.yaml create mode 100644 ui/lib/client/translations/zh.yaml delete mode 100644 ui/lib/utils/apiClient.ts diff --git a/ui/.storybook/preview.js b/ui/.storybook/preview.js index 467451de28..472fca8790 100644 --- a/ui/.storybook/preview.js +++ b/ui/.storybook/preview.js @@ -2,12 +2,10 @@ import React, { useEffect } from 'react' import { addDecorator } from '@storybook/react' import { Root } from '@lib/components' import client from '@lib/client' -import * as apiClient from '@lib/utils/apiClient' import * as auth from '@lib/utils/auth' function StoryRoot({ children }) { useEffect(() => { - apiClient.init() client .getInstance() .userLogin({ diff --git a/ui/dashboardApp/index.ts b/ui/dashboardApp/index.ts index cd9bd6fcd9..79cbf76666 100644 --- a/ui/dashboardApp/index.ts +++ b/ui/dashboardApp/index.ts @@ -10,10 +10,9 @@ import AppRegistry from '@lib/utils/registry' import * as routing from '@lib/utils/routing' import * as auth from '@lib/utils/auth' import * as i18n from '@lib/utils/i18n' -import * as apiClient from '@lib/utils/apiClient' import { saveAppOptions, loadAppOptions } from '@lib/utils/appOptions' import * as telemetry from '@lib/utils/telemetry' -import client, { InfoInfoResponse } from '@lib/client' +import client, { ErrorStrategy, InfoInfoResponse } from '@lib/client' import LayoutMain from '@dashboard/layout/main' import LayoutSignIn from '@dashboard/layout/signin' @@ -46,12 +45,12 @@ async function main() { require.context('@dashboard/layout/translations/', false, /\.yaml$/) ) - apiClient.init() - let info: InfoInfoResponse try { - const i = await client.getInstance().infoGet() + const i = await client.getInstance().infoGet({ + errorStrategy: ErrorStrategy.Custom, + }) info = i.data } catch (e) { Modal.error({ diff --git a/ui/dashboardApp/layout/main/Sider/Banner.tsx b/ui/dashboardApp/layout/main/Sider/Banner.tsx index ed9e21ad34..4c9b85ad07 100644 --- a/ui/dashboardApp/layout/main/Sider/Banner.tsx +++ b/ui/dashboardApp/layout/main/Sider/Banner.tsx @@ -55,8 +55,8 @@ export default function ToggleBanner({ width: collapsed ? collapsedWidth : toggleWidth, }) - const { data, isLoading } = useClientRequest((cancelToken) => - client.getInstance().infoGet({ cancelToken }) + const { data, isLoading } = useClientRequest((reqConfig) => + client.getInstance().infoGet(reqConfig) ) const version = useMemo(() => { diff --git a/ui/dashboardApp/layout/main/Sider/index.tsx b/ui/dashboardApp/layout/main/Sider/index.tsx index 5a3679e2a6..3c222823ff 100644 --- a/ui/dashboardApp/layout/main/Sider/index.tsx +++ b/ui/dashboardApp/layout/main/Sider/index.tsx @@ -1,11 +1,11 @@ -import React, { useState, useEffect, useMemo } from 'react' +import React, { useState, useMemo } from 'react' import { ExperimentOutlined, BugOutlined } from '@ant-design/icons' import { Layout, Menu } from 'antd' import { Link } from 'react-router-dom' import { useEventListener } from '@umijs/hooks' import { useTranslation } from 'react-i18next' import { useSpring, animated } from 'react-spring' -import client, { InfoWhoAmIResponse } from '@lib/client' +import client from '@lib/client' import Banner from './Banner' import styles from './index.module.less' @@ -38,20 +38,6 @@ function useActiveAppId(registry) { return appId } -function useCurrentLogin() { - const [login, setLogin] = useState(null) - useEffect(() => { - async function fetch() { - const resp = await client.getInstance().infoWhoami() - if (resp.data) { - setLogin(resp.data) - } - } - fetch() - }, []) - return login -} - function Sider({ registry, fullWidth, @@ -63,10 +49,12 @@ function Sider({ }) { const { t } = useTranslation() const activeAppId = useActiveAppId(registry) - const currentLogin = useCurrentLogin() - const { data } = useClientRequest((cancelToken) => - client.getInstance().infoGet({ cancelToken }) + const { data: currentLogin } = useClientRequest((reqConfig) => + client.getInstance().infoWhoami(reqConfig) + ) + const { data: info } = useClientRequest((reqConfig) => + client.getInstance().infoGet(reqConfig) ) const debugSubMenuItems = [useAppMenuItem(registry, 'instance_profiling')] @@ -113,7 +101,7 @@ function Sider({ debugSubMenu, ] - if (data?.enable_experimental) { + if (info?.enable_experimental) { menuItems.push(experimentalSubMenu) } diff --git a/ui/dashboardApp/layout/signin/index.tsx b/ui/dashboardApp/layout/signin/index.tsx index 733f9a3f5c..9e291de79d 100644 --- a/ui/dashboardApp/layout/signin/index.tsx +++ b/ui/dashboardApp/layout/signin/index.tsx @@ -15,7 +15,7 @@ import { import { Form, Input, Button, message, Typography } from 'antd' import { useTranslation } from 'react-i18next' import LanguageDropdown from '@lib/components/LanguageDropdown' -import client, { UserAuthenticateForm } from '@lib/client' +import client, { ErrorStrategy, UserAuthenticateForm } from '@lib/client' import * as auth from '@lib/utils/auth' import { useMount } from 'react-use' import Flexbox from '@g07cha/flexbox-react' @@ -146,28 +146,23 @@ function useSignInSubmit( }, []) const handleSubmit = usePersistFn(async (form) => { - setLoading(true) - clearErrorMsg() - try { - const r = await client.getInstance().userLogin(fnLoginForm(form)) + clearErrorMsg() + setLoading(true) + const r = await client.getInstance().userLogin(fnLoginForm(form), { + errorStrategy: ErrorStrategy.Custom, + }) auth.setAuthToken(r.data.token) message.success(t('signin.message.success')) singleSpa.navigateToUrl(successRoute) } catch (e) { - console.log(e) if (!e.handled) { - let msg - if (e.response.data) { - msg = t(e.response.data.code) - } else { - msg = e.message - } - setError(t('signin.message.error', { msg })) + setError(t('signin.message.error', { msg: e.message })) onFailure() } + } finally { + setLoading(false) } - setLoading(false) }) return { handleSubmit, loading, errorMsg: error, clearErrorMsg } diff --git a/ui/dashboardApp/layout/translations/en.yaml b/ui/dashboardApp/layout/translations/en.yaml index b1b766195e..177a17b6d6 100644 --- a/ui/dashboardApp/layout/translations/en.yaml +++ b/ui/dashboardApp/layout/translations/en.yaml @@ -1,17 +1,3 @@ -error: - message: - network: Network connection error - unauthorized: Session is expired. Please sign in again. - tidb: - no_alive_tidb: No live TiDB instance in the cluster - pd_access_failed: Failed to access PD node - tidb_conn_failed: Failed to connect to TiDB - tidb_auth_failed: TiDB authentication failed - api: - user: - signin: - invalid_code: Authorization Code is invalid or expired - other: Other error signin: message: error: 'Sign in failed: {{ msg }}' diff --git a/ui/dashboardApp/layout/translations/zh.yaml b/ui/dashboardApp/layout/translations/zh.yaml index 21254f306d..2778644dbf 100644 --- a/ui/dashboardApp/layout/translations/zh.yaml +++ b/ui/dashboardApp/layout/translations/zh.yaml @@ -1,17 +1,3 @@ -error: - message: - network: 网络连接失败 - unauthorized: 会话已过期,请重新登录 - tidb: - no_alive_tidb: 集群未启动 TiDB 实例 - pd_access_failed: 无法访问 PD 节点 - tidb_conn_failed: 无法连接到 TiDB - tidb_auth_failed: TiDB 登录验证失败 - api: - user: - signin: - invalid_code: 授权码无效或已过期 - other: 其他错误 signin: message: error: '登录失败: {{ msg }}' diff --git a/ui/lib/apps/ClusterInfo/components/HostTable.tsx b/ui/lib/apps/ClusterInfo/components/HostTable.tsx index a655bcf7ee..634dbd996a 100644 --- a/ui/lib/apps/ClusterInfo/components/HostTable.tsx +++ b/ui/lib/apps/ClusterInfo/components/HostTable.tsx @@ -20,8 +20,8 @@ function filterUniquePartitions(items) { export default function HostTable() { const { t } = useTranslation() - const { data: tableData, isLoading } = useClientRequest((cancelToken) => - client.getInstance().getHostsInfo({ cancelToken }) + const { data: tableData, isLoading, error } = useClientRequest((reqConfig) => + client.getInstance().getHostsInfo(reqConfig) ) const columns = [ @@ -206,6 +206,7 @@ System: ${getValueFormat('percentunit')(system)}` loading={isLoading} columns={columns} items={tableData || []} + errors={[error]} /> ) } diff --git a/ui/lib/apps/ClusterInfo/components/InstanceTable.tsx b/ui/lib/apps/ClusterInfo/components/InstanceTable.tsx index f01ac9a6bc..47fe76f6c0 100644 --- a/ui/lib/apps/ClusterInfo/components/InstanceTable.tsx +++ b/ui/lib/apps/ClusterInfo/components/InstanceTable.tsx @@ -57,27 +57,30 @@ function StatusColumn({ export default function ListPage() { const { t } = useTranslation() + const { data: dataTiDB, isLoading: loadingTiDB, error: errTiDB, sendRequest, - } = useClientRequest((cancelToken) => - client.getInstance().getTiDBTopology({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().getTiDBTopology(reqConfig) ) + const { data: dataStores, isLoading: loadingStores, error: errStores, - } = useClientRequest((cancelToken) => - client.getInstance().getStoreTopology({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().getStoreTopology(reqConfig) ) + const { data: dataPD, isLoading: loadingPD, error: errPD, - } = useClientRequest((cancelToken) => - client.getInstance().getPDTopology({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().getPDTopology(reqConfig) ) const [tableData, groupData] = useMemo( diff --git a/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx b/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx index 32a3fba94e..65dec6ff16 100644 --- a/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx +++ b/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx @@ -1,7 +1,7 @@ import React, { useMemo } from 'react' import { useClientRequest } from '@lib/utils/useClientRequest' import client, { TopologyStoreLocation } from '@lib/client' -import { ErrorBar, AnimatedSkeleton } from '@lib/components' +import { AnimatedSkeleton, ErrorBar } from '@lib/components' import StoreLocationTree from './StoreLocationTree' type TreeNode = { @@ -44,14 +44,14 @@ function buildTreeData(data: TopologyStoreLocation | undefined): TreeNode { } export default function StoreLocation() { - const { data, isLoading, error } = useClientRequest((cancelToken) => - client.getInstance().getStoreLocationTopology({ cancelToken }) + const { data, isLoading, error } = useClientRequest((reqConfig) => + client.getInstance().getStoreLocationTopology(reqConfig) ) const treeData = useMemo(() => buildTreeData(data), [data]) return (
- + diff --git a/ui/lib/apps/Configuration/InlineEditor.tsx b/ui/lib/apps/Configuration/InlineEditor.tsx index e126203516..5cbe59c0a2 100644 --- a/ui/lib/apps/Configuration/InlineEditor.tsx +++ b/ui/lib/apps/Configuration/InlineEditor.tsx @@ -1,7 +1,7 @@ import { useState, useCallback, useEffect } from 'react' import React from 'react' import { EditOutlined } from '@ant-design/icons' -import { Input, Popover, Button, Space, Tooltip, Modal } from 'antd' +import { Input, Popover, Button, Space, Tooltip } from 'antd' import { usePersistFn } from '@umijs/hooks' interface IInlineEditorProps { @@ -60,8 +60,8 @@ function InlineEditor({ setIsVisible(false) return } - setIsPosting(true) try { + setIsPosting(true) // PD only accept modified config in the same value type, // i.e. true => false, but not true => "false" const r = await onSave(valueWithSameType(inputVal, value)) @@ -73,14 +73,11 @@ function InlineEditor({ setInputVal(displayValue) } } catch (e) { - Modal.error({ - content: e.message, - zIndex: 2000, // higher than Popover - }) setInputVal(displayValue) setIsVisible(false) + } finally { + setIsPosting(false) } - setIsPosting(false) }) const handleInputValueChange = useCallback((e) => { diff --git a/ui/lib/apps/Configuration/index.tsx b/ui/lib/apps/Configuration/index.tsx index cc653e3f1c..8540032dc4 100644 --- a/ui/lib/apps/Configuration/index.tsx +++ b/ui/lib/apps/Configuration/index.tsx @@ -39,11 +39,6 @@ function Value({ item, onSaved }: IValueProps) { }) } } catch (e) { - Modal.error({ - title: 'Edit configuration failed', - content:
{e?.response?.data?.message ?? e.message}
, - zIndex: 2000, // higher than Popover - }) return false } onSaved?.() @@ -89,8 +84,8 @@ export default function () { isLoading, error, sendRequest, - } = useClientRequest((cancelToken) => - client.getInstance().configurationGetAll({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().configurationGetAll(reqConfig) ) const { t } = useTranslation() diff --git a/ui/lib/apps/Diagnose/components/DiagnoseHistory.tsx b/ui/lib/apps/Diagnose/components/DiagnoseHistory.tsx index fc20f2a8aa..1bd21b14ae 100644 --- a/ui/lib/apps/Diagnose/components/DiagnoseHistory.tsx +++ b/ui/lib/apps/Diagnose/components/DiagnoseHistory.tsx @@ -96,8 +96,8 @@ const tableColumns = (t: TFunction): IColumn[] => [ export default function DiagnoseHistory() { const navigate = useNavigate() const { t } = useTranslation() - const { data, isLoading } = useClientRequest((cancelToken) => - client.getInstance().diagnoseReportsGet({ cancelToken }) + const { data, isLoading, error } = useClientRequest((reqConfig) => + client.getInstance().diagnoseReportsGet(reqConfig) ) const columns = useMemo(() => tableColumns(t), [t]) @@ -113,6 +113,7 @@ export default function DiagnoseHistory() { loading={isLoading} items={data || []} columns={columns} + errors={[error]} onRowClicked={handleRowClick} /> ) diff --git a/ui/lib/apps/Diagnose/pages/DiagnoseStatus.tsx b/ui/lib/apps/Diagnose/pages/DiagnoseStatus.tsx index 6d22c6a5cd..f08bec87e8 100644 --- a/ui/lib/apps/Diagnose/pages/DiagnoseStatus.tsx +++ b/ui/lib/apps/Diagnose/pages/DiagnoseStatus.tsx @@ -6,7 +6,7 @@ import { ArrowLeftOutlined } from '@ant-design/icons' import client from '@lib/client' import publicPathPrefix from '@lib/utils/publicPathPrefix' -import { AnimatedSkeleton, DateTime, Head } from '@lib/components' +import { AnimatedSkeleton, DateTime, ErrorBar, Head } from '@lib/components' import { useClientRequestWithPolling } from '@lib/utils/useClientRequest' import useQueryParams from '@lib/utils/useQueryParams' @@ -14,13 +14,11 @@ function DiagnoseStatus() { const { t } = useTranslation() const { id } = useQueryParams() - const { data: report, isLoading } = useClientRequestWithPolling( - (cancelToken) => - client.getInstance().diagnoseReportsIdStatusGet(id, { cancelToken }), + const { data: report, isLoading, error } = useClientRequestWithPolling( + (reqConfig) => + client.getInstance().diagnoseReportsIdStatusGet(id, reqConfig), { shouldPoll: (data) => data?.progress! < 100, - pollingInterval: 1000, - immediate: true, } ) @@ -48,6 +46,7 @@ function DiagnoseStatus() { } > + {error && } {report && ( diff --git a/ui/lib/apps/InstanceProfiling/pages/Detail.tsx b/ui/lib/apps/InstanceProfiling/pages/Detail.tsx index ef3bbf64f5..51cce5c1a1 100644 --- a/ui/lib/apps/InstanceProfiling/pages/Detail.tsx +++ b/ui/lib/apps/InstanceProfiling/pages/Detail.tsx @@ -40,13 +40,10 @@ export default function Page() { const { t } = useTranslation() const { id } = useQueryParams() - const { data: respData, isLoading } = useClientRequestWithPolling( - (cancelToken) => - client.getInstance().getProfilingGroupDetail(id, { cancelToken }), + const { data: respData, isLoading, error } = useClientRequestWithPolling( + (reqConfig) => client.getInstance().getProfilingGroupDetail(id, reqConfig), { shouldPoll: (data) => !isFinished(data), - pollingInterval: 1000, - immediate: true, } ) @@ -147,9 +144,10 @@ export default function Page() { } /> diff --git a/ui/lib/apps/InstanceProfiling/pages/List.tsx b/ui/lib/apps/InstanceProfiling/pages/List.tsx index 63da1dea7c..0a5c0994bd 100644 --- a/ui/lib/apps/InstanceProfiling/pages/List.tsx +++ b/ui/lib/apps/InstanceProfiling/pages/List.tsx @@ -25,8 +25,9 @@ export default function Page() { const { data: historyTable, isLoading: listLoading, - } = useClientRequest((cancelToken) => - client.getInstance().getProfilingGroups({ cancelToken }) + error: historyError, + } = useClientRequest((reqConfig) => + client.getInstance().getProfilingGroups(reqConfig) ) const { t } = useTranslation() const navigate = useNavigate() @@ -47,7 +48,6 @@ export default function Page() { }) return } - setSubmitting(true) const targets: ModelRequestTargetNode[] = instanceSelect .current!.getInstanceByKeys(fieldsValue.instances) .map((instance) => { @@ -74,15 +74,12 @@ export default function Page() { duration_secs: fieldsValue.duration, } try { + setSubmitting(true) const res = await client.getInstance().startProfiling(req) navigate(`/instance_profiling/detail?id=${res.data.id}`) - } catch (e) { - // FIXME - Modal.error({ - content: e.message, - }) + } finally { + setSubmitting(false) } - setSubmitting(false) }, [navigate] ) @@ -204,6 +201,7 @@ export default function Page() { loading={listLoading} items={historyTable || []} columns={historyTableColumns} + errors={[historyError]} onRowClicked={handleRowClick} /> diff --git a/ui/lib/apps/KeyViz/components/KeyViz.tsx b/ui/lib/apps/KeyViz/components/KeyViz.tsx index de7cbd79c9..1bbbda7de1 100644 --- a/ui/lib/apps/KeyViz/components/KeyViz.tsx +++ b/ui/lib/apps/KeyViz/components/KeyViz.tsx @@ -101,8 +101,8 @@ const KeyViz = () => { const enabled = config?.auto_collection_disabled !== true const updateServiceStatus = useCallback(async function () { - setLoading(true) try { + setLoading(true) const resp = await client.getInstance().keyvisualConfigGet() const config = resp.data const enabled = config?.auto_collection_disabled !== true @@ -110,8 +110,9 @@ const KeyViz = () => { setAutoRefreshSeconds(0) } setConfig(config) - } catch (e) {} - setLoading(false) + } finally { + setLoading(false) + } // eslint-disable-next-line react-hooks/exhaustive-deps }, []) @@ -121,17 +122,18 @@ const KeyViz = () => { if (getAutoRefreshSeconds() > 0) { setRemainingRefreshSeconds(getAutoRefreshSeconds()) } - setLoading(true) - setOnBrush(false) try { + setLoading(true) + setOnBrush(false) const metricType = getMetricType() const data = await cache.fetch( getSelection() || getDateRange(), metricType ) setChartState({ heatmapData: data!, metricType }) - } catch (e) {} - setLoading(false) + } finally { + setLoading(false) + } // eslint-disable-next-line react-hooks/exhaustive-deps }, []) diff --git a/ui/lib/apps/KeyViz/components/KeyVizSettingForm.tsx b/ui/lib/apps/KeyViz/components/KeyVizSettingForm.tsx index 7c8bf2c658..eb4bca92e6 100644 --- a/ui/lib/apps/KeyViz/components/KeyVizSettingForm.tsx +++ b/ui/lib/apps/KeyViz/components/KeyVizSettingForm.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useState, useMemo, useCallback } from 'react' +import React, { useState, useMemo, useCallback } from 'react' import { Form, Skeleton, @@ -11,7 +11,9 @@ import { } from 'antd' import { ExclamationCircleOutlined } from '@ant-design/icons' import { useTranslation } from 'react-i18next' -import client, { ConfigKeyVisualConfig } from '@lib/client' +import client from '@lib/client' +import { useClientRequest } from '@lib/utils/useClientRequest' +import { ErrorBar } from '@lib/components' const policyConfigurable = process.env.NODE_ENV === 'development' @@ -60,50 +62,26 @@ function getPolicyOptions(t) { } function KeyVizSettingForm({ onClose, onConfigUpdated }: Props) { - const [loading, setLoading] = useState(true) const [submitting, setSubmitting] = useState(false) - const [config, setConfig] = useState(null) const { t } = useTranslation() - const onFetchServiceStatus = () => { - setLoading(true) - client - .getInstance() - .keyvisualConfigGet() - .then( - (r) => { - setConfig({ ...r.data }) - setLoading(false) - }, - () => { - setLoading(false) - } - ) - } - - const onSubmitted = () => { - client - .getInstance() - .keyvisualConfigGet() - .then( - (r) => { - setConfig({ ...r.data }) - setSubmitting(false) - onClose() - setTimeout(onConfigUpdated, 500) - }, - () => { - setSubmitting(false) - } - ) - } + const { + data: config, + isLoading: loading, + error, + } = useClientRequest((reqConfig) => + client.getInstance().keyvisualConfigGet(reqConfig) + ) - const onUpdateServiceStatus = (values) => { - setSubmitting(true) - client - .getInstance() - .keyvisualConfigPut(values) - .then(onSubmitted, onSubmitted) + const onUpdateServiceStatus = async (values) => { + try { + setSubmitting(true) + await client.getInstance().keyvisualConfigPut(values) + onClose() + onConfigUpdated() + } finally { + setSubmitting(false) + } } const onSubmit = (values) => { @@ -125,8 +103,6 @@ function KeyVizSettingForm({ onClose, onConfigUpdated }: Props) { } } - useEffect(onFetchServiceStatus, []) - const [form] = Form.useForm() const onValuesChange = useCallback( (changedValues, values) => { @@ -148,6 +124,7 @@ function KeyVizSettingForm({ onClose, onConfigUpdated }: Props) { return ( <> + {error && } {loading && } {!loading && config && (
{ try { let r = g.next() if (r.done) resolve() - } catch (ex) { - reject(ex) + } catch (e) { + reject(e) } setTimeout(advance, 0) } diff --git a/ui/lib/apps/Overview/components/Instances.tsx b/ui/lib/apps/Overview/components/Instances.tsx index a262fe99a4..793921f133 100644 --- a/ui/lib/apps/Overview/components/Instances.tsx +++ b/ui/lib/apps/Overview/components/Instances.tsx @@ -64,11 +64,11 @@ function ComponentItem(props: { export default function Nodes() { const { t } = useTranslation() - const tidbResp = useClientRequest((cancelToken) => - client.getInstance().getTiDBTopology({ cancelToken }) + const tidbResp = useClientRequest((reqConfig) => + client.getInstance().getTiDBTopology(reqConfig) ) - const storeResp = useClientRequest((cancelToken) => - client.getInstance().getStoreTopology({ cancelToken }) + const storeResp = useClientRequest((reqConfig) => + client.getInstance().getStoreTopology(reqConfig) ) const tiKVResp = { ...storeResp, @@ -78,8 +78,8 @@ export default function Nodes() { ...storeResp, data: storeResp.data?.tiflash, } - const pdResp = useClientRequest((cancelToken) => - client.getInstance().getPDTopology({ cancelToken }) + const pdResp = useClientRequest((reqConfig) => + client.getInstance().getPDTopology(reqConfig) ) return ( diff --git a/ui/lib/apps/Overview/components/MonitorAlert.tsx b/ui/lib/apps/Overview/components/MonitorAlert.tsx index 67b8f5bad3..7f907dedcb 100644 --- a/ui/lib/apps/Overview/components/MonitorAlert.tsx +++ b/ui/lib/apps/Overview/components/MonitorAlert.tsx @@ -15,14 +15,14 @@ export default function MonitorAlert() { const { data: amData, isLoading: amIsLoading, - } = useClientRequest((cancelToken) => - client.getInstance().getAlertManagerTopology({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().getAlertManagerTopology(reqConfig) ) const { data: grafanaData, isLoading: grafanaIsLoading, - } = useClientRequest((cancelToken) => - client.getInstance().getGrafanaTopology({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().getGrafanaTopology(reqConfig) ) useEffect(() => { diff --git a/ui/lib/apps/QueryEditor/index.tsx b/ui/lib/apps/QueryEditor/index.tsx index 74f313ba7f..650550e4f1 100644 --- a/ui/lib/apps/QueryEditor/index.tsx +++ b/ui/lib/apps/QueryEditor/index.tsx @@ -2,7 +2,7 @@ import React, { useState, useCallback, useRef } from 'react' import cx from 'classnames' import { Root, Card } from '@lib/components' import Split from 'react-split' -import { Button, Modal, Space, Typography } from 'antd' +import { Button, Space, Typography } from 'antd' import { CaretRightOutlined, LoadingOutlined, @@ -30,20 +30,17 @@ function App() { (!results.error_msg && (!results.column_names?.length || !results.rows)) const handleRun = useCallback(async () => { - setRunning(true) - setResults(undefined) try { + setRunning(true) + setResults(undefined) const resp = await client.getInstance().queryEditorRun({ max_rows: MAX_DISPLAY_ROWS, statements: editor.current?.editor.getValue(), }) setResults(resp.data) - } catch (ex) { - Modal.error({ - content: ex.message, - }) + } finally { + setRunning(false) } - setRunning(false) editor.current?.editor.focus() }, []) diff --git a/ui/lib/apps/SearchLogs/components/SearchHeader.tsx b/ui/lib/apps/SearchLogs/components/SearchHeader.tsx index 157e492220..e6f4a1947d 100644 --- a/ui/lib/apps/SearchLogs/components/SearchHeader.tsx +++ b/ui/lib/apps/SearchLogs/components/SearchHeader.tsx @@ -82,7 +82,6 @@ export default function SearchHeader({ taskGroupID }: Props) { }) return } - setSubmitting(true) const targets: ModelRequestTargetNode[] = instanceSelect .current!.getInstanceByKeys(fieldsValue.instances) @@ -120,18 +119,15 @@ export default function SearchHeader({ taskGroupID }: Props) { } try { + setSubmitting(true) const result = await client.getInstance().logsTaskgroupPut(req) const id = result?.data?.task_group?.id - if (!id) { - throw new Error('Invalid server response') + if (id) { + navigate(`/search_logs/detail?id=${id}`) } - navigate(`/search_logs/detail?id=${id}`) - } catch (e) { - Modal.error({ - content: e.message, - }) + } finally { + setSubmitting(false) } - setSubmitting(false) }, [navigate] ) diff --git a/ui/lib/apps/SearchLogs/components/SearchResult.tsx b/ui/lib/apps/SearchLogs/components/SearchResult.tsx index 544322e316..55a83715c4 100644 --- a/ui/lib/apps/SearchLogs/components/SearchResult.tsx +++ b/ui/lib/apps/SearchLogs/components/SearchResult.tsx @@ -69,23 +69,26 @@ export default function SearchResult({ patterns, taskGroupID, tasks }: Props) { return } - const res = await client - .getInstance() - .logsTaskgroupsIdPreviewGet(taskGroupID + '') - setData( - res.data.map( - (value, index): LogPreview => { - return { - key: index, - time: dayjs(value.time).format('YYYY-MM-DD HH:mm:ss'), - level: LogLevelText[value.level ?? 0], - component: getComponent(value.task_id), - log: value.message, + try { + const res = await client + .getInstance() + .logsTaskgroupsIdPreviewGet(taskGroupID + '') + setData( + res.data.map( + (value, index): LogPreview => { + return { + key: index, + time: dayjs(value.time).format('YYYY-MM-DD HH:mm:ss'), + level: LogLevelText[value.level ?? 0], + component: getComponent(value.task_id), + log: value.message, + } } - } + ) ) - ) - setLoading(false) + } finally { + setLoading(false) + } } if (tasks.length > 0 && taskGroupID !== tasks[0].task_group_id) { setLoading(true) diff --git a/ui/lib/apps/SearchLogs/pages/LogSearchDetail.tsx b/ui/lib/apps/SearchLogs/pages/LogSearchDetail.tsx index 5b5e60e110..e32353eecd 100644 --- a/ui/lib/apps/SearchLogs/pages/LogSearchDetail.tsx +++ b/ui/lib/apps/SearchLogs/pages/LogSearchDetail.tsx @@ -37,12 +37,9 @@ export default function LogSearchingDetail() { } const { data } = useClientRequestWithPolling( - (cancelToken) => - client.getInstance().logsTaskgroupsIdGet(id, { cancelToken }), + (reqConfig) => client.getInstance().logsTaskgroupsIdGet(id, reqConfig), { shouldPoll: (data) => !isFinished(data), - pollingInterval: 1000, - immediate: true, } ) diff --git a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx index b1bbacdced..dbfd02b104 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx @@ -1,5 +1,5 @@ import React from 'react' -import { Space, Alert } from 'antd' +import { Space } from 'antd' import { useTranslation } from 'react-i18next' import { useLocation, Link } from 'react-router-dom' import { ArrowLeftOutlined } from '@ant-design/icons' @@ -19,6 +19,7 @@ import { CopyLink, CardTabs, AnimatedSkeleton, + ErrorBar, } from '@lib/components' import TabBasic from './DetailTabBasic' import TabTime from './DetailTabTime' @@ -36,12 +37,15 @@ function DetailPage() { const { t } = useTranslation() - const { data, isLoading } = useClientRequest((cancelToken) => + const { data, isLoading, error } = useClientRequest((reqConfig) => client .getInstance() - .slowQueryDetailGet(query.connectId!, query.digest!, query.time!, { - cancelToken, - }) + .slowQueryDetailGet( + query.connectId!, + query.digest!, + query.time!, + reqConfig + ) ) const { state: sqlExpanded, toggle: toggleSqlExpanded } = useToggle(false) @@ -61,7 +65,7 @@ function DetailPage() { } > - {!data && } + {error && } {!!data && ( <> diff --git a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx index acd42b9875..6e08efc0c8 100644 --- a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx +++ b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx @@ -28,19 +28,6 @@ function commonColumnName(fieldName: string): any { return } -// temporary not used -// function connectionIDColumn( -// _rows?: { connection_id?: number }[] // used for type check only -// ): IColumn { -// return { -// name: commonColumnName('connection_id'), -// key: 'connection_id', -// fieldName: 'connection_id', -// minWidth: 100, -// maxWidth: 120, -// } -// } - function sqlColumn( _rows?: { query?: string }[], // used for type check only showFullSQL?: boolean diff --git a/ui/lib/apps/SlowQuery/utils/useSlowQuery.ts b/ui/lib/apps/SlowQuery/utils/useSlowQuery.ts index 9c3754cd9d..23a9e4a8f3 100644 --- a/ui/lib/apps/SlowQuery/utils/useSlowQuery.ts +++ b/ui/lib/apps/SlowQuery/utils/useSlowQuery.ts @@ -1,7 +1,7 @@ import { useEffect, useMemo, useState } from 'react' import { useSessionStorageState } from '@umijs/hooks' -import client, { SlowqueryBase } from '@lib/client' +import client, { ErrorStrategy, SlowqueryBase } from '@lib/client' import { calcTimeRange, TimeRange } from '@lib/components' import useOrderState, { IOrderOptions } from '@lib/utils/useOrderState' @@ -81,10 +81,12 @@ export default function useSlowQuery( useEffect(() => { async function querySchemas() { try { - const res = await client.getInstance().infoListDatabases() + const res = await client.getInstance().infoListDatabases({ + errorStrategy: ErrorStrategy.Custom, + }) setAllSchemas(res?.data || []) - } catch (error) { - setErrors((prev) => [...prev, { ...error }]) + } catch (e) { + setErrors((prev) => [...prev, { ...e }]) } } querySchemas() @@ -105,12 +107,15 @@ export default function useSlowQuery( queryTimeRange.beginTime, orderOptions.orderBy, queryOptions.plans, - queryOptions.searchText + queryOptions.searchText, + { + errorStrategy: ErrorStrategy.Custom, + } ) setSlowQueries(res.data || []) setErrors([]) - } catch (error) { - setErrors((prev) => [...prev, { ...error }]) + } catch (e) { + setErrors((prev) => [...prev, { ...e }]) } setLoadingSlowQueries(false) } diff --git a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx index e49028f030..ef16819a97 100644 --- a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx +++ b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx @@ -12,6 +12,7 @@ import { Expand, CopyLink, AnimatedSkeleton, + ErrorBar, } from '@lib/components' import { useClientRequest } from '@lib/utils/useClientRequest' import client from '@lib/client' @@ -35,7 +36,7 @@ export interface IPlanDetailProps { function PlanDetail({ query }: IPlanDetailProps) { const { t } = useTranslation() - const { data, isLoading } = useClientRequest((cancelToken) => + const { data, isLoading, error } = useClientRequest((reqConfig) => client .getInstance() .statementsPlanDetailGet( @@ -44,7 +45,7 @@ function PlanDetail({ query }: IPlanDetailProps) { query.endTime!, query.plans, query.schema!, - { cancelToken } + reqConfig ) ) const { state: sqlExpanded, toggle: toggleSqlExpanded } = useToggle(false) @@ -69,6 +70,7 @@ function PlanDetail({ query }: IPlanDetailProps) { })} > + {error && } {data && ( <> diff --git a/ui/lib/apps/Statement/pages/Detail/index.tsx b/ui/lib/apps/Statement/pages/Detail/index.tsx index 6a9ffc1621..858747e8b0 100644 --- a/ui/lib/apps/Statement/pages/Detail/index.tsx +++ b/ui/lib/apps/Statement/pages/Detail/index.tsx @@ -17,6 +17,7 @@ import { Head, HighlightSQL, TextWithInfo, + ErrorBar, } from '@lib/components' import CopyLink from '@lib/components/CopyLink' import formatSql from '@lib/utils/formatSql' @@ -35,7 +36,7 @@ export interface IPageQuery { function DetailPage() { const query = DetailPage.parseQuery(useLocation().search) - const { data: plans, isLoading } = useClientRequest((cancelToken) => + const { data: plans, isLoading, error } = useClientRequest((reqConfig) => client .getInstance() .statementsPlansGet( @@ -43,7 +44,7 @@ function DetailPage() { query.digest!, query.endTime!, query.schema!, - { cancelToken } + reqConfig ) ) const { t } = useTranslation() @@ -78,9 +79,7 @@ function DetailPage() { } > - {(!plans || plans.length === 0) && ( - - )} + {error && } {plans && plans.length > 0 && ( <> diff --git a/ui/lib/apps/Statement/pages/List/StatementSettingForm.tsx b/ui/lib/apps/Statement/pages/List/StatementSettingForm.tsx index bda3319b43..2d487b9ef8 100644 --- a/ui/lib/apps/Statement/pages/List/StatementSettingForm.tsx +++ b/ui/lib/apps/Statement/pages/List/StatementSettingForm.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useState } from 'react' +import React, { useMemo, useState } from 'react' import { Form, InputNumber, @@ -13,6 +13,8 @@ import { import { ExclamationCircleOutlined } from '@ant-design/icons' import { useTranslation } from 'react-i18next' import client, { StatementConfig } from '@lib/client' +import { useClientRequest } from '@lib/utils/useClientRequest' +import { ErrorBar } from '@lib/components' interface Props { onClose: () => void @@ -35,42 +37,38 @@ const REFRESH_INTERVAL_MARKS = convertArrToObj([1, 5, 15, 30, 60]) const KEEP_DURATION_MARKS = convertArrToObj([1, 2, 5, 10, 20, 30]) function StatementSettingForm({ onClose, onConfigUpdated }: Props) { - const [loading, setLoading] = useState(false) const [submitting, setSubmitting] = useState(false) - const [oriConfig, setOriConfig] = useState(null) - const [config, setConfig] = useState(null) const { t } = useTranslation() - useEffect(() => { - async function fetchConfig() { - setLoading(true) - const res = await client.getInstance().statementsConfigGet() - if (res?.data) { - const oriConfig = res.data - setOriConfig(oriConfig) + const { + data: oriConfig, + isLoading: loading, + error, + } = useClientRequest((reqConfig) => + client.getInstance().statementsConfigGet(reqConfig) + ) + + const config = useMemo(() => { + if (oriConfig) { + const refresh_interval = Math.ceil(oriConfig.refresh_interval! / 60) + const max_refresh_interval = Math.max(refresh_interval, 60) + const keep_duration = Math.ceil( + (oriConfig.refresh_interval! * oriConfig.history_size!) / (24 * 60 * 60) + ) + const max_keep_duration = Math.max(keep_duration, 30) - const refresh_interval = Math.ceil(oriConfig.refresh_interval! / 60) - const max_refresh_interval = Math.max(refresh_interval, 60) - const keep_duration = Math.ceil( - (oriConfig.refresh_interval! * oriConfig.history_size!) / - (24 * 60 * 60) - ) - const max_keep_duration = Math.max(keep_duration, 30) - setConfig({ - ...oriConfig, - refresh_interval, - keep_duration, - max_refresh_interval, - max_keep_duration, - }) - } - setLoading(false) + return { + ...oriConfig, + refresh_interval, + keep_duration, + max_refresh_interval, + max_keep_duration, + } as InternalStatementConfig } - fetchConfig() - }, []) + return null + }, [oriConfig]) async function updateConfig(values) { - setSubmitting(true) const newConfig: StatementConfig = { enable: values.enable, refresh_interval: values.refresh_interval * 60, @@ -78,11 +76,13 @@ function StatementSettingForm({ onClose, onConfigUpdated }: Props) { (values.keep_duration * 24 * 60) / values.refresh_interval ), } - const res = await client.getInstance().statementsConfigPost(newConfig) - setSubmitting(false) - if (res) { + try { + setSubmitting(true) + await client.getInstance().statementsConfigPost(newConfig) onClose() onConfigUpdated() + } finally { + setSubmitting(false) } } @@ -105,6 +105,7 @@ function StatementSettingForm({ onClose, onConfigUpdated }: Props) { return ( <> + {error && } {loading && } {!loading && config && ( diff --git a/ui/lib/apps/Statement/utils/useStatement.ts b/ui/lib/apps/Statement/utils/useStatement.ts index 00eaea1e92..098b1e44bd 100644 --- a/ui/lib/apps/Statement/utils/useStatement.ts +++ b/ui/lib/apps/Statement/utils/useStatement.ts @@ -1,7 +1,11 @@ import { useEffect, useMemo, useState } from 'react' import { useSessionStorageState } from '@umijs/hooks' -import client, { StatementModel, StatementTimeRange } from '@lib/client' +import client, { + ErrorStrategy, + StatementModel, + StatementTimeRange, +} from '@lib/client' import useOrderState, { IOrderOptions } from '@lib/utils/useOrderState' import { @@ -86,37 +90,45 @@ export default function useStatement( useEffect(() => { async function queryStatementStatus() { try { - const res = await client.getInstance().statementsConfigGet() + const res = await client.getInstance().statementsConfigGet({ + errorStrategy: ErrorStrategy.Custom, + }) setEnable(res?.data.enable!) - } catch (error) { - setErrors((prev) => [...prev, { ...error }]) + } catch (e) { + setErrors((prev) => [...prev, { ...e }]) } } async function querySchemas() { try { - const res = await client.getInstance().infoListDatabases() + const res = await client.getInstance().infoListDatabases({ + errorStrategy: ErrorStrategy.Custom, + }) setAllSchemas(res?.data || []) - } catch (error) { - setErrors((prev) => [...prev, { ...error }]) + } catch (e) { + setErrors((prev) => [...prev, { ...e }]) } } async function queryTimeRanges() { try { - const res = await client.getInstance().statementsTimeRangesGet() + const res = await client.getInstance().statementsTimeRangesGet({ + errorStrategy: ErrorStrategy.Custom, + }) setAllTimeRanges(res?.data || []) - } catch (error) { - setErrors((prev) => [...prev, { ...error }]) + } catch (e) { + setErrors((prev) => [...prev, { ...e }]) } } async function queryStmtTypes() { try { - const res = await client.getInstance().statementsStmtTypesGet() + const res = await client.getInstance().statementsStmtTypesGet({ + errorStrategy: ErrorStrategy.Custom, + }) setAllStmtTypes(res?.data || []) - } catch (error) { - setErrors((prev) => [...prev, { ...error }]) + } catch (e) { + setErrors((prev) => [...prev, { ...e }]) } } @@ -143,12 +155,15 @@ export default function useStatement( validTimeRange.end_time!, queryOptions.schemas, queryOptions.stmtTypes, - queryOptions.searchText + queryOptions.searchText, + { + errorStrategy: ErrorStrategy.Custom, + } ) setStatements(res?.data || []) setErrors([]) - } catch (error) { - setErrors((prev) => [...prev, { ...error }]) + } catch (e) { + setErrors((prev) => [...prev, { ...e }]) } setLoadingStatements(false) } diff --git a/ui/lib/apps/UserProfile/index.tsx b/ui/lib/apps/UserProfile/index.tsx index c1f43d6dd7..cd15944b43 100644 --- a/ui/lib/apps/UserProfile/index.tsx +++ b/ui/lib/apps/UserProfile/index.tsx @@ -26,6 +26,7 @@ import { CopyLink, TextWithInfo, Pre, + ErrorBar, } from '@lib/components' import * as auth from '@lib/utils/auth' import { ALL_LANGUAGES } from '@lib/utils/i18n' @@ -44,8 +45,8 @@ function ShareSessionButton() { const [code, setCode] = useState(undefined) const [isCopied, setIsCopied] = useState(false) - const { data } = useClientRequest((cancelToken) => - client.getInstance().infoWhoami({ cancelToken }) + const { data } = useClientRequest((reqConfig) => + client.getInstance().infoWhoami(reqConfig) ) const handleOpen = useCallback(() => { @@ -60,17 +61,12 @@ function ShareSessionButton() { }, []) const handleFinish = useCallback(async (values) => { - setIsPosting(true) try { + setIsPosting(true) const r = await client.getInstance().userShareSession({ expire_in_sec: values.expire * 60 * 60, }) setCode(r.data.code) - } catch (e) { - // TODO: Extract to a common component - Modal.error({ - content:
{e?.response?.data?.message ?? e.message}
, - }) } finally { setIsPosting(false) } @@ -196,8 +192,8 @@ function App() { window.location.reload() }, []) - const { data: info, isLoading } = useClientRequest((cancelToken) => - client.getInstance().infoGet({ cancelToken }) + const { data: info, isLoading, error } = useClientRequest((reqConfig) => + client.getInstance().infoGet(reqConfig) ) return ( @@ -227,6 +223,7 @@ function App() { + {error && } {info && ( + API: {API} +
+ {content} + + ), + }) + } else if (['post', 'put', 'delete', 'patch'].includes(method)) { + Modal.error({ + title: i18next.t('error.title'), + content: content, + zIndex: 2000, // higher than popover + }) + } + err.handled = true + } + + return Promise.reject(err) + }) + + return instance +} + +function init() { + let apiPrefix + if (process.env.NODE_ENV === 'development') { + if (process.env.REACT_APP_DASHBOARD_API_URL) { + apiPrefix = `${process.env.REACT_APP_DASHBOARD_API_URL}/dashboard` + } else { + apiPrefix = 'http://127.0.0.1:12333/dashboard' + } + } else { + apiPrefix = publicPathPrefix + } + const apiUrl = `${apiPrefix}/api` + + const dashboardClient = new DefaultApi( + { + basePath: apiUrl, + apiKey: () => auth.getAuthTokenAsBearer() || '', + baseOptions: { + errorStrategy: ErrorStrategy.Default, + }, + }, + undefined, + initAxios() + ) + + save(apiUrl, dashboardClient) +} + +init() diff --git a/ui/lib/client/translations/en.yaml b/ui/lib/client/translations/en.yaml new file mode 100644 index 0000000000..cbd393ae3d --- /dev/null +++ b/ui/lib/client/translations/en.yaml @@ -0,0 +1,14 @@ +error: + title: Error + network: Network connection error + api: + unauthorized: Session is expired. Please sign in again. + user: + signin: + invalid_code: Authorization Code is invalid or expired + other: Other error + tidb: + no_alive_tidb: No live TiDB instance in the cluster + pd_access_failed: Failed to access PD node + tidb_conn_failed: Failed to connect to TiDB + tidb_auth_failed: TiDB authentication failed diff --git a/ui/lib/client/translations/zh.yaml b/ui/lib/client/translations/zh.yaml new file mode 100644 index 0000000000..f54f31e44d --- /dev/null +++ b/ui/lib/client/translations/zh.yaml @@ -0,0 +1,14 @@ +error: + title: 错误 + network: 网络连接失败 + api: + unauthorized: 会话已过期,请重新登录 + user: + signin: + invalid_code: 授权码无效或已过期 + other: 其他错误 + tidb: + no_alive_tidb: 集群未启动 TiDB 实例 + pd_access_failed: 无法访问 PD 节点 + tidb_conn_failed: 无法连接到 TiDB + tidb_auth_failed: TiDB 登录验证失败 diff --git a/ui/lib/components/ErrorBar/index.tsx b/ui/lib/components/ErrorBar/index.tsx index 52ef3f4dff..8ff81e1b2e 100644 --- a/ui/lib/components/ErrorBar/index.tsx +++ b/ui/lib/components/ErrorBar/index.tsx @@ -10,9 +10,7 @@ export default function ErrorBar({ errors }: IErrorBarProps) { // show at most 3 kinds of errors const errorMsgs = useMemo( () => - _.uniq( - _.map(errors, (err) => err?.response?.data?.message || err?.msg || '') - ) + _.uniq(_.map(errors, (err) => err?.message || '')) .filter((msg) => msg !== '') .slice(0, 3), [errors] diff --git a/ui/lib/components/InstanceSelect/index.tsx b/ui/lib/components/InstanceSelect/index.tsx index 9e672267e0..e78e965345 100644 --- a/ui/lib/components/InstanceSelect/index.tsx +++ b/ui/lib/components/InstanceSelect/index.tsx @@ -95,20 +95,17 @@ function InstanceSelect( const { data: dataTiDB, isLoading: loadingTiDB, - } = useClientRequest((cancelToken) => - client.getInstance().getTiDBTopology({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().getTiDBTopology(reqConfig) ) const { data: dataStores, isLoading: loadingStores, - } = useClientRequest((cancelToken) => - client.getInstance().getStoreTopology({ cancelToken }) + } = useClientRequest((reqConfig) => + client.getInstance().getStoreTopology(reqConfig) ) - const { - data: dataPD, - isLoading: loadingPD, - } = useClientRequest((cancelToken) => - client.getInstance().getPDTopology({ cancelToken }) + const { data: dataPD, isLoading: loadingPD } = useClientRequest((reqConfig) => + client.getInstance().getPDTopology(reqConfig) ) const columns: IColumn[] = useMemo( diff --git a/ui/lib/components/MetricChart/index.tsx b/ui/lib/components/MetricChart/index.tsx index e2a7a2a3ad..98f1df4949 100644 --- a/ui/lib/components/MetricChart/index.tsx +++ b/ui/lib/components/MetricChart/index.tsx @@ -72,7 +72,7 @@ export default function MetricChart({ const timeParams = useRef(getTimeParams()) const { isLoading, data, error, sendRequest } = useBatchClientRequest( - series.map((s) => (cancelToken) => + series.map((s) => (reqConfig) => client .getInstance() .metricsQueryGet( @@ -80,9 +80,7 @@ export default function MetricChart({ s.query, timeParams.current.beginTimeSec, 30, - { - cancelToken, - } + reqConfig ) ) ) diff --git a/ui/lib/utils/apiClient.ts b/ui/lib/utils/apiClient.ts deleted file mode 100644 index 699830893d..0000000000 --- a/ui/lib/utils/apiClient.ts +++ /dev/null @@ -1,64 +0,0 @@ -import i18next from 'i18next' -import axios from 'axios' -import { message } from 'antd' -import * as singleSpa from 'single-spa' -import DashboardClient, { DefaultApi } from '@lib/client' -import * as auth from '@lib/utils/auth' -import * as routing from '@lib/utils/routing' -import publicPathPrefix from '@lib/utils/publicPathPrefix' - -function initAxios() { - const instance = axios.create() - - instance.interceptors.response.use(undefined, function (err) { - const { response } = err - // Handle unauthorized error in a unified way - if ( - response && - response.data && - response.data.code === 'error.api.unauthorized' - ) { - if (!routing.isLocationMatch('/') && !routing.isSignInPage()) { - message.error(i18next.t('error.message.unauthorized')) - } - auth.clearAuthToken() - singleSpa.navigateToUrl('#' + routing.signInRoute) - err.handled = true - } else if (err.message === 'Network Error') { - const content = i18next.t('error.message.network') - message.error({ content, key: 'network_error' }) // use the same key to avoid multiple message boxes - err.handled = true - err.msg = content // use `err.message = content` doesn't work - } - return Promise.reject(err) - }) - - return instance -} - -export function init() { - let apiPrefix - if (process.env.NODE_ENV === 'development') { - if (process.env.REACT_APP_DASHBOARD_API_URL) { - apiPrefix = `${process.env.REACT_APP_DASHBOARD_API_URL}/dashboard` - } else { - apiPrefix = 'http://127.0.0.1:12333/dashboard' - } - } else { - apiPrefix = publicPathPrefix - } - const apiUrl = `${apiPrefix}/api` - - console.log('API BasePath: %s', apiUrl) - - const dashboardClient = new DefaultApi( - { - basePath: apiUrl, - apiKey: () => auth.getAuthTokenAsBearer() || '', - }, - undefined, - initAxios() - ) - - DashboardClient.init(apiUrl, dashboardClient) -} diff --git a/ui/lib/utils/useClientRequest.ts b/ui/lib/utils/useClientRequest.ts index 26bc6aca7e..87e9dcf0a0 100644 --- a/ui/lib/utils/useClientRequest.ts +++ b/ui/lib/utils/useClientRequest.ts @@ -1,14 +1,20 @@ import { useMount, useUnmount, usePersistFn } from '@umijs/hooks' import { useState, useRef, useEffect } from 'react' -import { CancelToken, AxiosPromise, CancelTokenSource } from 'axios' -import axios from 'axios' +import axios, { CancelToken, AxiosPromise, CancelTokenSource } from 'axios' -interface RequestFactory { - (token: CancelToken): AxiosPromise +import { ErrorStrategy } from '@lib/client' + +export interface ReqConfig { + cancelToken: CancelToken + errorStrategy: ErrorStrategy +} + +export interface RequestFactory { + (reqConfig: ReqConfig): AxiosPromise } interface Options { - immediate: boolean + immediate?: boolean afterRequest?: () => void beforeRequest?: () => void } @@ -53,7 +59,11 @@ export function useClientRequest( })) try { - const resp = await reqFactory(cancelTokenSource.current.token) + const reqConfig: ReqConfig = { + cancelToken: cancelTokenSource.current.token, + errorStrategy: ErrorStrategy.Custom, // handle the error by component self + } + const resp = await reqFactory(reqConfig) if (mounted.current) { setState({ data: resp.data, @@ -119,9 +129,11 @@ export function useBatchClientRequest( const sendRequestEach = async (idx) => { try { - const resp = await reqFactories[idx]( - cancelTokenSource.current![idx].token - ) + const reqConfig: ReqConfig = { + cancelToken: cancelTokenSource.current![idx].token, + errorStrategy: ErrorStrategy.Custom, + } + const resp = await reqFactories[idx](reqConfig) if (mounted.current) { setState((s) => { s.data[idx] = resp.data @@ -191,8 +203,8 @@ export function useBatchClientRequest( } interface OptionsWithPolling extends Options { - pollingInterval: number - shouldPoll: ((data: T) => boolean) | null + pollingInterval?: number + shouldPoll?: ((data: T) => boolean) | null } export function useClientRequestWithPolling( From 0e53dcbc55700a29608fa8aee1f5534c1c8371bd Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Mon, 28 Sep 2020 10:04:18 +0800 Subject: [PATCH 05/29] statement, slow_query: support all fields in list page (#749) --- go.mod | 1 + go.sum | 2 + pkg/apiserver/slowquery/queries.go | 105 ++-- pkg/apiserver/slowquery/service.go | 2 +- pkg/apiserver/statement/queries.go | 33 +- pkg/apiserver/statement/statement.go | 14 +- .../Overview/components/RecentSlowQueries.tsx | 42 +- .../Overview/components/RecentStatements.tsx | 55 +- .../SlowQuery/components/SlowQueriesTable.tsx | 48 +- ui/lib/apps/SlowQuery/index.tsx | 4 +- .../SlowQuery/pages/Detail/DetailTabTxn.tsx | 2 +- ui/lib/apps/SlowQuery/pages/Detail/index.tsx | 4 +- ui/lib/apps/SlowQuery/pages/List/index.tsx | 73 +-- ui/lib/apps/SlowQuery/translations/en.yaml | 9 +- ui/lib/apps/SlowQuery/translations/zh.yaml | 9 +- ui/lib/apps/SlowQuery/utils/tableColumns.tsx | 346 +++-------- ...uery.ts => useSlowQueryTableController.ts} | 63 +- .../Statement/components/StatementsTable.tsx | 53 +- ui/lib/apps/Statement/index.tsx | 4 +- .../Statement/pages/Detail/SlowQueryTab.tsx | 29 +- ui/lib/apps/Statement/pages/List/index.tsx | 80 +-- ui/lib/apps/Statement/translations/en.yaml | 10 +- ui/lib/apps/Statement/translations/zh.yaml | 2 + ui/lib/apps/Statement/utils/tableColumns.tsx | 540 ++++++++---------- ...ment.ts => useStatementTableController.ts} | 58 +- ui/lib/components/ColumnsSelector/index.tsx | 41 +- ui/lib/utils/tableColumnFactory.tsx | 254 ++++++++ ui/lib/utils/tableColumns.tsx | 18 +- 28 files changed, 996 insertions(+), 905 deletions(-) rename ui/lib/apps/SlowQuery/utils/{useSlowQuery.ts => useSlowQueryTableController.ts} (61%) rename ui/lib/apps/Statement/utils/{useStatement.ts => useStatementTableController.ts} (71%) create mode 100644 ui/lib/utils/tableColumnFactory.tsx diff --git a/go.mod b/go.mod index 8dfbb9352d..947eb421dd 100644 --- a/go.mod +++ b/go.mod @@ -33,6 +33,7 @@ require ( github.com/stretchr/testify v1.5.1 github.com/swaggo/http-swagger v0.0.0-20200308142732-58ac5e232fba github.com/swaggo/swag v1.6.6-0.20200529100950-7c765ddd0476 + github.com/thoas/go-funk v0.7.0 github.com/vmihailenco/msgpack/v5 v5.0.0-beta.1 go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738 go.uber.org/atomic v1.5.0 diff --git a/go.sum b/go.sum index a5369d5a0d..5cb1fd340d 100644 --- a/go.sum +++ b/go.sum @@ -315,6 +315,8 @@ github.com/swaggo/swag v1.5.1/go.mod h1:1Bl9F/ZBpVWh22nY0zmYyASPO1lI/zIwRDrpZU+t github.com/swaggo/swag v1.6.3/go.mod h1:wcc83tB4Mb2aNiL/HP4MFeQdpHUrca+Rp/DRNgWAUio= github.com/swaggo/swag v1.6.6-0.20200529100950-7c765ddd0476 h1:UjnSXdNPIG+5FJ6xLQODEdk7gSnJlMldu3sPAxxCO+4= github.com/swaggo/swag v1.6.6-0.20200529100950-7c765ddd0476/go.mod h1:xDhTyuFIujYiN3DKWC/H/83xcfHp+UE/IzWWampG7Zc= +github.com/thoas/go-funk v0.7.0 h1:GmirKrs6j6zJbhJIficOsz2aAI7700KsU/5YrdHRM1Y= +github.com/thoas/go-funk v0.7.0/go.mod h1:+IWnUfUmFO1+WVYQWQtIJHeRRdaIyyYglZN7xzUPe4Q= github.com/tidwall/gjson v1.3.5 h1:2oW9FBNu8qt9jy5URgrzsVx/T/KSn3qn/smJQ0crlDQ= github.com/tidwall/gjson v1.3.5/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJHhxOls= github.com/tidwall/match v1.0.1 h1:PnKP62LPNxHKTwvHHZZzdOAOCtsJTjo6dZLCwpKm5xc= diff --git a/pkg/apiserver/slowquery/queries.go b/pkg/apiserver/slowquery/queries.go index 9ed2b175da..4793d3c2ad 100644 --- a/pkg/apiserver/slowquery/queries.go +++ b/pkg/apiserver/slowquery/queries.go @@ -19,6 +19,7 @@ import ( "strings" "github.com/jinzhu/gorm" + "github.com/thoas/go-funk" ) const ( @@ -26,8 +27,7 @@ const ( SelectStmt = "*, (unix_timestamp(Time) + 0E0) as timestamp" ) -type Base struct { - // list fields +type SlowQuery struct { Digest string `gorm:"column:Digest" json:"digest"` Query string `gorm:"column:Query" json:"query"` @@ -36,19 +36,14 @@ type Base struct { ConnectionID uint `gorm:"column:Conn_ID" json:"connection_id"` Success int `gorm:"column:Succ" json:"success"` - Time string `gorm:"column:Time" json:"time_str"` // finish time - Timestamp float64 `gorm:"column:timestamp" json:"timestamp"` // unix_timestamp(Time) as timestamp - QueryTime float64 `gorm:"column:Query_time" json:"query_time"` // latency + Timestamp float64 `gorm:"column:timestamp" proj:"(unix_timestamp(Time) + 0E0)" json:"timestamp"` // finish time + QueryTime float64 `gorm:"column:Query_time" json:"query_time"` // latency ParseTime float64 `gorm:"column:Parse_time" json:"parse_time"` CompileTime float64 `gorm:"column:Compile_time" json:"compile_time"` ProcessTime float64 `gorm:"column:Process_time" json:"process_time"` MemoryMax int `gorm:"column:Mem_max" json:"memory_max"` TxnStartTS uint `gorm:"column:Txn_start_ts" json:"txn_start_ts"` -} - -type SlowQuery struct { - *Base `gorm:"embedded"` // Detail PrevStmt string `gorm:"column:Prev_stmt" json:"prev_stmt"` @@ -56,7 +51,7 @@ type SlowQuery struct { // Basic IsInternal int `gorm:"column:Is_internal" json:"is_internal"` - IndexNames string `gorm:"column:Index_name" json:"index_names"` + IndexNames string `gorm:"column:Index_names" json:"index_names"` Stats string `gorm:"column:Stats" json:"stats"` BackoffTypes string `gorm:"column:Backoff_types" json:"backoff_types"` @@ -106,46 +101,58 @@ type GetListRequest struct { // for showing slow queries in the statement detail page Plans []string `json:"plans" form:"plans"` Digest string `json:"digest" form:"digest"` -} -type GetDetailRequest struct { - Digest string `json:"digest" form:"digest"` - Time float64 `json:"time" form:"time"` - ConnectID int64 `json:"connect_id" form:"connect_id"` + Fields string `json:"fields" form:"fields"` // example: "Query,Digest" } -func getAllColumnNames() []string { - t := reflect.TypeOf(Base{}) +func getProjectionsByFields(jsonFields ...string) ([]string, error) { + fields := make(map[string]*reflect.StructField) + t := reflect.TypeOf(SlowQuery{}) fieldsNum := t.NumField() - ret := make([]string, 0, fieldsNum) for i := 0; i < fieldsNum; i++ { field := t.Field(i) - if s, ok := field.Tag.Lookup("gorm"); ok { - list := strings.Split(s, ":") - if len(list) < 1 { - panic(fmt.Sprintf("Unknown gorm tag field: %s", s)) - } - ret = append(ret, list[1]) + fields[strings.ToLower(field.Tag.Get("json"))] = &field + } + ret := make([]string, 0, len(jsonFields)) + for _, fieldName := range jsonFields { + field, ok := fields[strings.ToLower(fieldName)] + if !ok { + return nil, fmt.Errorf("unknown field %s", fieldName) + } + // ignore to check error because the field is defined by ourself + // we can confirm that it has "gorm" tag and fixed structure + s, _ := field.Tag.Lookup("gorm") + sourceField := strings.Split(s, ":")[1] + if proj, ok := field.Tag.Lookup("proj"); ok { + ret = append(ret, fmt.Sprintf("%s AS %s", proj, sourceField)) + } else { + ret = append(ret, sourceField) } } - ret = append(ret, "Time") - return ret + return ret, nil } -func isValidColumnName(name string) bool { - for _, item := range getAllColumnNames() { - if name == item { - return true - } - } - return false +type GetDetailRequest struct { + Digest string `json:"digest" form:"digest"` + Timestamp float64 `json:"timestamp" form:"timestamp"` + ConnectID int64 `json:"connect_id" form:"connect_id"` } -func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]Base, error) { +func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]SlowQuery, error) { + sqlFields := []string{"digest", "connection_id", "timestamp"} + if strings.TrimSpace(req.Fields) != "" { + sqlFields = append(sqlFields, strings.Split(req.Fields, ",")...) + sqlFields = funk.UniqString(sqlFields) + } + projections, err := getProjectionsByFields(sqlFields...) + if err != nil { + return nil, err + } + tx := db. Table(SlowQueryTable). - Select(SelectStmt). - Where("time between from_unixtime(?) and from_unixtime(?)", req.LogStartTS, req.LogEndTS). + Select(strings.Join(projections, ", ")). + Where("Time between from_unixtime(?) and from_unixtime(?)", req.LogStartTS, req.LogEndTS). Limit(req.Limit) if req.Text != "" { @@ -166,13 +173,19 @@ func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]Base, error) { tx = tx.Where("DB IN (?)", req.DB) } - order := req.OrderBy - if isValidColumnName(order) { - if req.DESC { - tx = tx.Order(fmt.Sprintf("%s desc", order)) - } else { - tx = tx.Order(fmt.Sprintf("%s asc", order)) - } + order, err := getProjectionsByFields(req.OrderBy) + if err != nil { + return nil, err + } + // to handle the special case: timestamp + // if req.OrderBy is "timestamp", then the order is "(unix_timestamp(Time) + 0E0) AS timestamp" + if strings.Contains(order[0], " AS ") { + order[0] = req.OrderBy + } + if req.DESC { + tx = tx.Order(fmt.Sprintf("%s desc", order[0])) + } else { + tx = tx.Order(fmt.Sprintf("%s asc", order[0])) } if len(req.Plans) > 0 { @@ -183,8 +196,8 @@ func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]Base, error) { tx = tx.Where("Digest = ?", req.Digest) } - var results []Base - err := tx.Find(&results).Error + var results []SlowQuery + err = tx.Find(&results).Error if err != nil { return nil, err } @@ -197,7 +210,7 @@ func QuerySlowLogDetail(db *gorm.DB, req *GetDetailRequest) (*SlowQuery, error) Table(SlowQueryTable). Select(SelectStmt). Where("Digest = ?", req.Digest). - Where("Time = from_unixtime(?)", req.Time). + Where("Time = from_unixtime(?)", req.Timestamp). Where("Conn_id = ?", req.ConnectID). First(&result).Error if err != nil { diff --git a/pkg/apiserver/slowquery/service.go b/pkg/apiserver/slowquery/service.go index c807e2735c..9f04fcd304 100644 --- a/pkg/apiserver/slowquery/service.go +++ b/pkg/apiserver/slowquery/service.go @@ -47,7 +47,7 @@ func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { // @Summary List all slow queries // @Param q query GetListRequest true "Query" -// @Success 200 {array} Base +// @Success 200 {array} SlowQuery // @Router /slow_query/list [get] // @Security JwtAuth // @Failure 401 {object} utils.APIError "Unauthorized failure" diff --git a/pkg/apiserver/statement/queries.go b/pkg/apiserver/statement/queries.go index d2b4e24bd4..d8c07b8eb6 100644 --- a/pkg/apiserver/statement/queries.go +++ b/pkg/apiserver/statement/queries.go @@ -20,6 +20,7 @@ import ( "strings" "github.com/jinzhu/gorm" + "github.com/thoas/go-funk" ) const ( @@ -128,35 +129,19 @@ func QueryStmtTypes(db *gorm.DB) (result []string, err error) { // endTime: 1586845800 // schemas: ["tpcc", "test"] // stmtTypes: ["select", "update"] +// fields: ["digest_text", "sum_latency"] func QueryStatementsOverview( db *gorm.DB, beginTime, endTime int, schemas, stmtTypes []string, - text string) (result []Model, err error) { - fields := getAggrFields( - "plan_count", - "table_names", - "schema_name", - "digest", - "digest_text", - "sum_latency", - "exec_count", - "max_latency", - "avg_latency", - "min_latency", - "avg_mem", - "max_mem", - "sum_errors", - "sum_warnings", - "avg_parse_latency", - "max_parse_latency", - "avg_compile_latency", - "max_compile_latency", - "avg_cop_process_time", - "max_cop_process_time") - // `table_names` is used to populate `related_schemas`. + text string, + fields []string, +) (result []Model, err error) { + fields = funk.UniqString(append(fields, "schema_name", "digest", "sum_latency")) // "schema_name", "digest" for group, "sum_latency" for order + aggrFields := getAggrFields(fields...) + query := db. - Select(strings.Join(fields, ", ")). + Select(strings.Join(aggrFields, ", ")). Table(statementsTable). Where("summary_begin_time >= FROM_UNIXTIME(?) AND summary_end_time <= FROM_UNIXTIME(?)", beginTime, endTime). Group("schema_name, digest"). diff --git a/pkg/apiserver/statement/statement.go b/pkg/apiserver/statement/statement.go index f6e845eece..2290dff902 100644 --- a/pkg/apiserver/statement/statement.go +++ b/pkg/apiserver/statement/statement.go @@ -15,6 +15,7 @@ package statement import ( "net/http" + "strings" "github.com/gin-gonic/gin" "go.uber.org/fx" @@ -122,6 +123,7 @@ type GetStatementsRequest struct { BeginTime int `json:"begin_time" form:"begin_time"` EndTime int `json:"end_time" form:"end_time"` Text string `json:"text" form:"text"` + Fields string `json:"fields" form:"fields"` } // @Summary Get a list of statement overviews @@ -137,7 +139,17 @@ func (s *Service) overviewsHandler(c *gin.Context) { return } db := utils.GetTiDBConnection(c) - overviews, err := QueryStatementsOverview(db, req.BeginTime, req.EndTime, req.Schemas, req.StmtTypes, req.Text) + fields := []string{} + if strings.TrimSpace(req.Fields) != "" { + fields = strings.Split(req.Fields, ",") + } + overviews, err := QueryStatementsOverview( + db, + req.BeginTime, req.EndTime, + req.Schemas, + req.StmtTypes, + req.Text, + fields) if err != nil { _ = c.Error(err) return diff --git a/ui/lib/apps/Overview/components/RecentSlowQueries.tsx b/ui/lib/apps/Overview/components/RecentSlowQueries.tsx index 6b348f5948..7567b80d14 100644 --- a/ui/lib/apps/Overview/components/RecentSlowQueries.tsx +++ b/ui/lib/apps/Overview/components/RecentSlowQueries.tsx @@ -4,33 +4,28 @@ import { useTranslation } from 'react-i18next' import { Link } from 'react-router-dom' import { DateTime } from '@lib/components' -import { SlowQueriesTable, useSlowQuery } from '@lib/apps/SlowQuery' -import { defSlowQueryColumnKeys } from '@lib/apps/SlowQuery/pages/List' -import { DEF_SLOW_QUERY_OPTIONS } from '@lib/apps/SlowQuery/utils/useSlowQuery' +import { + SlowQueriesTable, + useSlowQueryTableController, + DEF_SLOW_QUERY_COLUMN_KEYS, + DEF_SLOW_QUERY_OPTIONS, +} from '@lib/apps/SlowQuery' export default function RecentSlowQueries() { const { t } = useTranslation() + const controller = useSlowQueryTableController( + DEF_SLOW_QUERY_COLUMN_KEYS, + false, + { ...DEF_SLOW_QUERY_OPTIONS, limit: 10 }, + false + ) const { - orderOptions, - changeOrder, - - loadingSlowQueries, - slowQueries, - queryTimeRange, - - errors, - } = useSlowQuery({ ...DEF_SLOW_QUERY_OPTIONS, limit: 10 }, false) + queryTimeRange: { beginTime, endTime }, + } = controller return ( {t('overview.recent_slow_query.title')} @@ -38,11 +33,8 @@ export default function RecentSlowQueries() { } subTitle={ - {' '} - ~{' '} - + ~{' '} + } /> diff --git a/ui/lib/apps/Overview/components/RecentStatements.tsx b/ui/lib/apps/Overview/components/RecentStatements.tsx index 850fe875c5..59d4e63c29 100644 --- a/ui/lib/apps/Overview/components/RecentStatements.tsx +++ b/ui/lib/apps/Overview/components/RecentStatements.tsx @@ -3,40 +3,36 @@ import React from 'react' import { useTranslation } from 'react-i18next' import { Link } from 'react-router-dom' -import { StatementsTable, useStatement } from '@lib/apps/Statement' -import { DateTime } from '@lib/components' +import { + StatementsTable, + useStatementTableController, +} from '@lib/apps/Statement' +import { DateTime, IColumnKeys } from '@lib/components' + +const visibleColumnKeys: IColumnKeys = { + digest_text: true, + sum_latency: true, + avg_latency: true, + related_schemas: true, +} export default function RecentStatements() { const { t } = useTranslation() + const controller = useStatementTableController( + visibleColumnKeys, + false, + undefined, + false + ) const { - orderOptions, - changeOrder, - allTimeRanges, - validTimeRange, - loadingStatements, - statements, - - errors, - } = useStatement(undefined, false) + validTimeRange: { begin_time, end_time }, + } = controller return ( {t('overview.top_statements.title')} @@ -45,13 +41,8 @@ export default function RecentStatements() { subTitle={ allTimeRanges.length > 0 && ( - {' '} - ~{' '} - + ~{' '} + ) } diff --git a/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx b/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx index 19663a7edf..6c195f845e 100644 --- a/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx +++ b/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx @@ -1,46 +1,35 @@ import { usePersistFn } from '@umijs/hooks' -import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' -import React, { useCallback, useEffect, useMemo } from 'react' +import React, { useCallback } from 'react' import { useNavigate } from 'react-router-dom' -import { SlowqueryBase } from '@lib/client' import { CardTable, ICardTableProps } from '@lib/components' import openLink from '@lib/utils/openLink' import DetailPage from '../pages/Detail' -import { slowQueryColumns } from '../utils/tableColumns' +import { ISlowQueryTableController } from '../utils/useSlowQueryTableController' interface Props extends Partial { - loading: boolean - slowQueries: SlowqueryBase[] - showFullSQL?: boolean - onGetColumns?: (columns: IColumn[]) => void + controller: ISlowQueryTableController } -function SlowQueriesTable({ - loading, - slowQueries, - showFullSQL, - onGetColumns, - ...restProps -}: Props) { - const navigate = useNavigate() - - const columns = useMemo(() => slowQueryColumns(slowQueries, showFullSQL), [ +function SlowQueriesTable({ controller, ...restProps }: Props) { + const { + loadingSlowQueries, + tableColumns, slowQueries, - showFullSQL, - ]) - - useEffect(() => { - onGetColumns && onGetColumns(columns || []) - }, [onGetColumns, columns]) + orderOptions: { orderBy, desc }, + changeOrder, + errors, + visibleColumnKeys, + } = controller + const navigate = useNavigate() const handleRowClick = usePersistFn( (rec, _idx, ev: React.MouseEvent) => { const qs = DetailPage.buildQuery({ digest: rec.digest, connectId: rec.connection_id, - time: rec.timestamp, + timestamp: rec.timestamp, }) openLink(`/slow_query/detail?${qs}`, ev, navigate) } @@ -51,9 +40,14 @@ function SlowQueriesTable({ return ( diff --git a/ui/lib/apps/SlowQuery/index.tsx b/ui/lib/apps/SlowQuery/index.tsx index 9345ed5e5a..f9c706edf9 100644 --- a/ui/lib/apps/SlowQuery/index.tsx +++ b/ui/lib/apps/SlowQuery/index.tsx @@ -2,7 +2,6 @@ import React from 'react' import { Root } from '@lib/components' import { HashRouter as Router, Route, Routes } from 'react-router-dom' import { List, Detail } from './pages' -import useSlowQuery from './utils/useSlowQuery' export default function () { return ( @@ -19,4 +18,5 @@ export default function () { export * from './components' export * from './pages' -export { useSlowQuery } +export * from './utils/useSlowQueryTableController' +export { default as useSlowQueryTableController } from './utils/useSlowQueryTableController' diff --git a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTxn.tsx b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTxn.tsx index e44a0e7b12..714b5e2765 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTxn.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTxn.tsx @@ -24,7 +24,7 @@ export default function TabCopr({ data }: ITabTxnProps) { value: getValueFormat('bytes')(data.write_size || 0, 1), }, { - key: 'prewrite_regions', + key: 'prewrite_region', value: , }, { diff --git a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx index dbfd02b104..5ffe5a5650 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx @@ -29,7 +29,7 @@ import TabTxn from './DetailTabTxn' export interface IPageQuery { connectId?: number digest?: string - time?: number + timestamp?: number } function DetailPage() { @@ -43,7 +43,7 @@ function DetailPage() { .slowQueryDetailGet( query.connectId!, query.digest!, - query.time!, + query.timestamp!, reqConfig ) ) diff --git a/ui/lib/apps/SlowQuery/pages/List/index.tsx b/ui/lib/apps/SlowQuery/pages/List/index.tsx index 5558d8363d..379d27c373 100644 --- a/ui/lib/apps/SlowQuery/pages/List/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/List/index.tsx @@ -1,62 +1,51 @@ -import React, { useState } from 'react' +import React from 'react' import { useTranslation } from 'react-i18next' import { Select, Space, Tooltip, Input, Checkbox } from 'antd' import { ReloadOutlined, LoadingOutlined } from '@ant-design/icons' import { ScrollablePane } from 'office-ui-fabric-react/lib/ScrollablePane' -import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' import { useLocalStorageState } from '@umijs/hooks' import { Card, ColumnsSelector, - IColumnKeys, TimeRangeSelector, Toolbar, MultiSelect, } from '@lib/components' + import SlowQueriesTable from '../../components/SlowQueriesTable' -import useSlowQuery from '../../utils/useSlowQuery' +import useSlowQueryTableController, { + DEF_SLOW_QUERY_COLUMN_KEYS, +} from '../../utils/useSlowQueryTableController' const { Option } = Select const { Search } = Input -const VISIBLE_COLUMN_KEYS = 'slow_query.visible_column_keys' -const SHOW_FULL_SQL = 'slow_query.show_full_sql' +const SLOW_QUERY_VISIBLE_COLUMN_KEYS = 'slow_query.visible_column_keys' +const SLOW_QUERY_SHOW_FULL_SQL = 'slow_query.show_full_sql' const LIMITS = [100, 200, 500, 1000] -export const defSlowQueryColumnKeys: IColumnKeys = { - sql: true, - Time: true, - Query_time: true, - Mem_max: true, -} - function List() { const { t } = useTranslation() + const [visibleColumnKeys, setVisibleColumnKeys] = useLocalStorageState( + SLOW_QUERY_VISIBLE_COLUMN_KEYS, + DEF_SLOW_QUERY_COLUMN_KEYS + ) + const [showFullSQL, setShowFullSQL] = useLocalStorageState( + SLOW_QUERY_SHOW_FULL_SQL, + false + ) + + const controller = useSlowQueryTableController(visibleColumnKeys, showFullSQL) const { queryOptions, setQueryOptions, - orderOptions, - changeOrder, refresh, - allSchemas, loadingSlowQueries, - slowQueries, - - errors, - } = useSlowQuery() - - const [columns, setColumns] = useState([]) - const [visibleColumnKeys, setVisibleColumnKeys] = useLocalStorageState( - VISIBLE_COLUMN_KEYS, - defSlowQueryColumnKeys - ) - const [showFullSQL, setShowFullSQL] = useLocalStorageState( - SHOW_FULL_SQL, - false - ) + tableColumns, + } = controller return (
@@ -66,7 +55,10 @@ function List() { - setQueryOptions({ ...queryOptions, timeRange }) + setQueryOptions({ + ...queryOptions, + timeRange, + }) } /> - {columns.length > 0 && ( + {tableColumns.length > 0 && ( - +
diff --git a/ui/lib/apps/SlowQuery/translations/en.yaml b/ui/lib/apps/SlowQuery/translations/en.yaml index 8a691a4d0f..46d1e20805 100644 --- a/ui/lib/apps/SlowQuery/translations/en.yaml +++ b/ui/lib/apps/SlowQuery/translations/en.yaml @@ -6,6 +6,7 @@ slow_query: connection_id: Connection ID connection_id_tooltip: Unique connection ID of the query sql: Query + query: Query timestamp: Finish Time timestamp_tooltip: The time this query finished execution query_time: Latency @@ -60,8 +61,14 @@ slow_query: txn_start_ts_tooltip: Transaction start timestamp, a.k.a. Transaction ID write_keys: Write Keys write_size: Write Size - prewrite_regions: Prewrite Regions + prewrite_region: Prewrite Regions txn_retry: Transaction Retries + + prev_stmt: Previous Query + plan: Execution Plan + + cop_proc_avg: Mean Cop Proc # ? + cop_wait_avg: Mean Cop Wait # ? common: status: success: Success diff --git a/ui/lib/apps/SlowQuery/translations/zh.yaml b/ui/lib/apps/SlowQuery/translations/zh.yaml index 8efea0f50e..39b50c6801 100644 --- a/ui/lib/apps/SlowQuery/translations/zh.yaml +++ b/ui/lib/apps/SlowQuery/translations/zh.yaml @@ -6,6 +6,7 @@ slow_query: connection_id: 连接号 connection_id_tooltip: SQL 查询客户端连接 ID sql: SQL + query: SQL sql_tooltip: SQL timestamp: 结束运行时间 timestamp_tooltip: 该 SQL 查询结束运行时的时间 @@ -64,8 +65,14 @@ slow_query: txn_start_ts_tooltip: 事务开始的时间戳,也即是事务号 write_keys: 写入 Key 个数 write_size: 写入数据量 - prewrite_regions: Prewrite 涉及 Regions 个数 + prewrite_region: Prewrite 涉及 Regions 个数 txn_retry: 事务重试次数 + + prev_stmt: 前一条 SQL 查询 + plan: 执行计划 + + cop_proc_avg: 平均处理 # ? + cop_wait_avg: 平均等待 # ? common: status: success: 成功 diff --git a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx index 6e08efc0c8..d3ca0e48bc 100644 --- a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx +++ b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx @@ -1,21 +1,12 @@ -import { Badge, Tooltip } from 'antd' -import { max } from 'lodash' -import { - IColumn, - ColumnActionsMode, -} from 'office-ui-fabric-react/lib/DetailsList' +import { Badge } from 'antd' +import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' import React from 'react' import { useTranslation } from 'react-i18next' -import { getValueFormat } from '@baurine/grafana-value-formats' -import { SlowqueryBase } from '@lib/client' -import { - Bar, - DateTime, - HighlightSQL, - TextWithInfo, - TextWrap, -} from '@lib/components' +import { SlowquerySlowQuery } from '@lib/client' +import { TableColumnFactory } from '@lib/utils/tableColumnFactory' + +////////////////////////////////////////// function ResultStatusBadge({ status }: { status: 'success' | 'error' }) { const { t } = useTranslation() @@ -24,251 +15,96 @@ function ResultStatusBadge({ status }: { status: 'success' | 'error' }) { ) } -function commonColumnName(fieldName: string): any { - return -} - -function sqlColumn( - _rows?: { query?: string }[], // used for type check only - showFullSQL?: boolean -): IColumn { - return { - name: commonColumnName('sql'), - key: 'sql', - fieldName: 'sql', - minWidth: 200, - maxWidth: 500, - onRender: (rec) => - showFullSQL ? ( - - - - ) : ( - } - placement="right" - > - - - - - ), - } -} - -function digestColumn( - _rows?: { digest?: string }[] // used for type check only -): IColumn { - return { - name: commonColumnName('digest'), - key: 'Digest', - fieldName: 'digest', - minWidth: 100, - maxWidth: 150, - onRender: (rec) => ( - - {rec.digest} - - ), - } -} - -function instanceColumn( - _rows?: { instance?: string }[] // used for type check only -): IColumn { - return { - name: commonColumnName('instance'), - key: 'instance', - fieldName: 'instance', - minWidth: 100, - maxWidth: 150, - onRender: (rec) => ( - - {rec.instance} - - ), - } -} - -function dbColumn( - _rows?: { db?: string }[] // used for type check only -): IColumn { - return { - name: commonColumnName('db'), - key: 'DB', - fieldName: 'db', - minWidth: 100, - maxWidth: 150, - onRender: (rec) => ( - - {rec.db} - - ), - } -} - -function successColumn( - _rows?: { success?: number }[] // used for type check only -): IColumn { - // !! Don't call `useTranslation()` directly to avoid this method become the custom hook - // !! So we can use this inside the useMemo(), useEffect() and useState(()=>{...}) - // const { t } = useTranslation() - return { - name: commonColumnName('result'), - key: 'Succ', - fieldName: 'success', - minWidth: 50, - maxWidth: 100, - onRender: (rec) => ( - - ), - } -} - -function timestampColumn( - _rows?: { timestamp?: number }[] // used for type check only -): IColumn { - const key = 'Time' - return { - name: commonColumnName('timestamp'), - key, - fieldName: 'timestamp', - minWidth: 100, - maxWidth: 150, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - - - ), - } -} - -function queryTimeColumn(rows?: { query_time?: number }[]): IColumn { - const capacity = rows ? max(rows.map((v) => v.query_time)) ?? 0 : 0 - const key = 'Query_time' - return { - name: commonColumnName('query_time'), - key, - fieldName: 'query_time', - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - {getValueFormat('s')(rec.query_time, 1)} - - ), - } -} - -function parseTimeColumn(rows?: { parse_time?: number }[]): IColumn { - const capacity = rows ? max(rows.map((v) => v.parse_time)) ?? 0 : 0 - const key = 'Parse_time' - return { - name: commonColumnName('parse_time'), - key, - fieldName: 'parse_time', - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - {getValueFormat('s')(rec.parse_time, 1)} - - ), - } -} - -function compileTimeColumn(rows?: { compile_time?: number }[]): IColumn { - const capacity = rows ? max(rows.map((v) => v.compile_time)) ?? 0 : 0 - const key = 'Compile_time' - return { - name: commonColumnName('compile_time'), - key, - fieldName: 'compile_time', - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - {getValueFormat('s')(rec.compile_time, 1)} - - ), - } -} - -function processTimeColumn(rows?: { process_time?: number }[]): IColumn { - const capacity = rows ? max(rows.map((v) => v.process_time)) ?? 0 : 0 - const key = 'Process_time' - return { - name: commonColumnName('process_time'), - key, - fieldName: 'process_time', - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - {getValueFormat('s')(rec.process_time, 1)} - - ), - } -} - -function memoryColumn(rows?: { memory_max?: number }[]): IColumn { - const capacity = rows ? max(rows.map((v) => v.memory_max)) ?? 0 : 0 - const key = 'Mem_max' - return { - name: commonColumnName('memory_max'), - key, - fieldName: 'memory_max', - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - {getValueFormat('bytes')(rec.memory_max, 1)} - - ), - } -} - -function txnStartTsColumn( - _rows?: { txn_start_ts?: number }[] // used for type check only -): IColumn { - return { - name: commonColumnName('txn_start_ts'), - key: 'Txn_start_ts', - fieldName: 'txn_start_ts', - minWidth: 100, - maxWidth: 150, - onRender: (rec) => ( - - {rec.txn_start_ts} - - ), - } -} - ////////////////////////////////////////// +const TRANS_KEY_PREFIX = 'slow_query.fields' export function slowQueryColumns( - rows: SlowqueryBase[], + rows: SlowquerySlowQuery[], showFullSQL?: boolean ): IColumn[] { + const tcf = new TableColumnFactory(TRANS_KEY_PREFIX) return [ - sqlColumn(rows, showFullSQL), - digestColumn(rows), - instanceColumn(rows), - dbColumn(rows), - timestampColumn(rows), - queryTimeColumn(rows), - parseTimeColumn(rows), - compileTimeColumn(rows), - processTimeColumn(rows), - memoryColumn(rows), - txnStartTsColumn(rows), - successColumn(rows), + tcf.sqlText('query', showFullSQL, rows), + tcf.textWithTooltip('digest', rows), + tcf.textWithTooltip('instance', rows), + tcf.textWithTooltip('db', rows), + tcf.textWithTooltip('connection_id', rows), + tcf.timestamp('timestamp', rows), + + tcf.bar.single('query_time', 's', rows), + tcf.bar.single('parse_time', 's', rows), + tcf.bar.single('compile_time', 's', rows), + tcf.bar.single('process_time', 's', rows), + tcf.bar.single('memory_max', 'bytes', rows), + + tcf.textWithTooltip('txn_start_ts', rows), + // success columnn + { + ...tcf.textWithTooltip('success', rows), + name: tcf.columnName('result'), + minWidth: 50, + maxWidth: 100, + onRender: (rec) => ( + + ), + }, + + // basic + // is_internal column + { + ...tcf.textWithTooltip('is_internal', rows), + minWidth: 50, + maxWidth: 100, + onRender: (rec) => (rec.is_internal === 1 ? 'Yes' : 'No'), + }, + tcf.textWithTooltip('index_names', rows), + tcf.textWithTooltip('stats', rows), + tcf.textWithTooltip('backoff_types', rows), + // connection + tcf.textWithTooltip('user', rows), + tcf.textWithTooltip('host', rows), + // time + tcf.bar.single('wait_time', 'ns', rows), + tcf.bar.single('backoff_time', 'ns', rows), + tcf.bar.single('get_commit_ts_time', 'ns', rows), + tcf.bar.single('local_latch_wait_time', 'ns', rows), + tcf.bar.single('prewrite_time', 'ns', rows), + tcf.bar.single('commit_time', 'ns', rows), + tcf.bar.single('commit_backoff_time', 'ns', rows), + tcf.bar.single('resolve_lock_time', 'ns', rows), + // cop + tcf.bar.multiple( + { + bars: [ + { mean: 'cop_proc_avg' }, + { max: 'cop_proc_max' }, + { p90: 'cop_proc_p90' }, + ], + }, + 'ns', + rows + ), + tcf.bar.multiple( + { + bars: [ + { mean: 'cop_wait_avg' }, + { max: 'cop_wait_avg' }, + { p90: 'cop_wait_avg' }, + ], + }, + 'ns', + rows + ), + // transaction + tcf.bar.single('write_keys', 'short', rows), + tcf.bar.single('write_size', 'bytes', rows), + tcf.bar.single('prewrite_region', 'short', rows), + tcf.bar.single('txn_retry', 'short', rows), + // cop? + tcf.bar.single('request_count', 'short', rows), + tcf.bar.single('process_keys', 'short', rows), + tcf.bar.single('total_keys', 'short', rows), + tcf.textWithTooltip('cop_proc_addr', rows), + tcf.textWithTooltip('cop_wait_addr', rows), ] } diff --git a/ui/lib/apps/SlowQuery/utils/useSlowQuery.ts b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts similarity index 61% rename from ui/lib/apps/SlowQuery/utils/useSlowQuery.ts rename to ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts index 23a9e4a8f3..9e35fdc9f2 100644 --- a/ui/lib/apps/SlowQuery/utils/useSlowQuery.ts +++ b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts @@ -1,14 +1,25 @@ import { useEffect, useMemo, useState } from 'react' import { useSessionStorageState } from '@umijs/hooks' +import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' -import client, { ErrorStrategy, SlowqueryBase } from '@lib/client' -import { calcTimeRange, TimeRange } from '@lib/components' +import client, { ErrorStrategy, SlowquerySlowQuery } from '@lib/client' +import { calcTimeRange, TimeRange, IColumnKeys } from '@lib/components' import useOrderState, { IOrderOptions } from '@lib/utils/useOrderState' +import { slowQueryColumns } from './tableColumns' +import { getSelectedFields } from '@lib/utils/tableColumnFactory' + +export const DEF_SLOW_QUERY_COLUMN_KEYS: IColumnKeys = { + query: true, + timestamp: true, + query_time: true, + memory_max: true, +} + const QUERY_OPTIONS = 'slow_query.query_options' const DEF_ORDER_OPTIONS: IOrderOptions = { - orderBy: 'Time', + orderBy: 'timestamp', desc: true, } @@ -32,10 +43,30 @@ export const DEF_SLOW_QUERY_OPTIONS: ISlowQueryOptions = { plans: [], } -export default function useSlowQuery( +export interface ISlowQueryTableController { + queryOptions: ISlowQueryOptions + setQueryOptions: (options: ISlowQueryOptions) => void + orderOptions: IOrderOptions + changeOrder: (orderBy: string, desc: boolean) => void + refresh: () => void + + allSchemas: string[] + loadingSlowQueries: boolean + slowQueries: SlowquerySlowQuery[] + queryTimeRange: { beginTime: number; endTime: number } + + errors: Error[] + + tableColumns: IColumn[] + visibleColumnKeys: IColumnKeys +} + +export default function useSlowQueryTableController( + visibleColumnKeys: IColumnKeys, + showFullSQL: boolean, options?: ISlowQueryOptions, needSave: boolean = true -) { +): ISlowQueryTableController { const { orderOptions, changeOrder } = useOrderState( 'slow_query', needSave, @@ -60,7 +91,7 @@ export default function useSlowQuery( const [allSchemas, setAllSchemas] = useState([]) const [loadingSlowQueries, setLoadingSlowQueries] = useState(true) - const [slowQueries, setSlowQueries] = useState([]) + const [slowQueries, setSlowQueries] = useState([]) const [refreshTimes, setRefreshTimes] = useState(0) function setQueryOptions(newOptions: ISlowQueryOptions) { @@ -71,7 +102,7 @@ export default function useSlowQuery( } } - const [errors, setErrors] = useState([]) + const [errors, setErrors] = useState([]) function refresh() { setErrors([]) @@ -92,6 +123,18 @@ export default function useSlowQuery( querySchemas() }, []) + // Notice: slowQueries, tableColumns, selectedFields make loop dependencies + const tableColumns = useMemo( + () => slowQueryColumns(slowQueries, showFullSQL), + [slowQueries, showFullSQL] + ) + // make selectedFields as a string instead of an array to avoid infinite loop + // I have verified that it will cause infinite loop if we return selectedFields as an array + // so it is better to use the basic type (string, number...) instead of object as the dependency + const selectedFields = useMemo( + () => getSelectedFields(visibleColumnKeys, tableColumns).join(','), + [visibleColumnKeys, tableColumns] + ) useEffect(() => { async function getSlowQueryList() { setLoadingSlowQueries(true) @@ -102,6 +145,7 @@ export default function useSlowQuery( queryOptions.schemas, orderOptions.desc, queryOptions.digest, + selectedFields, queryOptions.limit, queryTimeRange.endTime, queryTimeRange.beginTime, @@ -121,7 +165,7 @@ export default function useSlowQuery( } getSlowQueryList() - }, [queryOptions, orderOptions, queryTimeRange, refreshTimes]) + }, [queryOptions, orderOptions, queryTimeRange, refreshTimes, selectedFields]) return { queryOptions, @@ -136,5 +180,8 @@ export default function useSlowQuery( queryTimeRange, errors, + + tableColumns, + visibleColumnKeys, } } diff --git a/ui/lib/apps/Statement/components/StatementsTable.tsx b/ui/lib/apps/Statement/components/StatementsTable.tsx index 05aec25e9e..a2b7444c84 100644 --- a/ui/lib/apps/Statement/components/StatementsTable.tsx +++ b/ui/lib/apps/Statement/components/StatementsTable.tsx @@ -1,49 +1,37 @@ import { usePersistFn } from '@umijs/hooks' -import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' -import React, { useCallback, useEffect, useMemo } from 'react' +import React, { useCallback } from 'react' import { useNavigate } from 'react-router-dom' -import { StatementModel, StatementTimeRange } from '@lib/client' import { CardTable, ICardTableProps } from '@lib/components' import openLink from '@lib/utils/openLink' import DetailPage from '../pages/Detail' -import { statementColumns } from '../utils/tableColumns' +import { IStatementTableController } from '../utils/useStatementTableController' interface Props extends Partial { - loading: boolean - statements: StatementModel[] - timeRange: StatementTimeRange - showFullSQL?: boolean - onGetColumns?: (columns: IColumn[]) => void + controller: IStatementTableController } -export default function StatementsTable({ - loading, - statements, - timeRange, - showFullSQL, - onGetColumns, - ...restPrpos -}: Props) { - const navigate = useNavigate() - - const columns = useMemo(() => statementColumns(statements, showFullSQL), [ +export default function StatementsTable({ controller, ...restPrpos }: Props) { + const { + orderOptions, + changeOrder, + validTimeRange: { begin_time, end_time }, + loadingStatements, statements, - showFullSQL, - ]) - - useEffect(() => { - onGetColumns && onGetColumns(columns || []) - }, [onGetColumns, columns]) + errors, + tableColumns, + visibleColumnKeys, + } = controller + const navigate = useNavigate() const handleRowClick = usePersistFn( (rec, _idx, ev: React.MouseEvent) => { const qs = DetailPage.buildQuery({ digest: rec.digest, schema: rec.schema_name, - beginTime: timeRange.begin_time, - endTime: timeRange.end_time, + beginTime: begin_time, + endTime: end_time, }) openLink(`/statement/detail?${qs}`, ev, navigate) } @@ -54,9 +42,14 @@ export default function StatementsTable({ return ( diff --git a/ui/lib/apps/Statement/index.tsx b/ui/lib/apps/Statement/index.tsx index fa78c872ac..792f4e7e57 100644 --- a/ui/lib/apps/Statement/index.tsx +++ b/ui/lib/apps/Statement/index.tsx @@ -3,7 +3,6 @@ import { HashRouter as Router, Routes, Route } from 'react-router-dom' import { Root } from '@lib/components' import { List, Detail } from './pages' -import useStatement from './utils/useStatement' export default function () { return ( @@ -20,4 +19,5 @@ export default function () { export * from './components' export * from './pages' -export { useStatement } +export * from './utils/useStatementTableController' +export { default as useStatementTableController } from './utils/useStatementTableController' diff --git a/ui/lib/apps/Statement/pages/Detail/SlowQueryTab.tsx b/ui/lib/apps/Statement/pages/Detail/SlowQueryTab.tsx index c1a99e532e..c84b48d279 100644 --- a/ui/lib/apps/Statement/pages/Detail/SlowQueryTab.tsx +++ b/ui/lib/apps/Statement/pages/Detail/SlowQueryTab.tsx @@ -1,23 +1,19 @@ import React from 'react' import SlowQueriesTable from '@lib/apps/SlowQuery/components/SlowQueriesTable' import { IQuery } from './PlanDetail' -import useSlowQuery, { +import useSlowQueryTableController, { DEF_SLOW_QUERY_OPTIONS, -} from '@lib/apps/SlowQuery/utils/useSlowQuery' -import { defSlowQueryColumnKeys } from '@lib/apps/SlowQuery/pages/List' + DEF_SLOW_QUERY_COLUMN_KEYS, +} from '@lib/apps/SlowQuery/utils/useSlowQueryTableController' export interface ISlowQueryTabProps { query: IQuery } export default function SlowQueryTab({ query }: ISlowQueryTabProps) { - const { - orderOptions, - changeOrder, - - slowQueries, - loadingSlowQueries, - } = useSlowQuery( + const controller = useSlowQueryTableController( + DEF_SLOW_QUERY_COLUMN_KEYS, + false, { ...DEF_SLOW_QUERY_OPTIONS, timeRange: { @@ -32,16 +28,5 @@ export default function SlowQueryTab({ query }: ISlowQueryTabProps) { false ) - return ( - - ) + return } diff --git a/ui/lib/apps/Statement/pages/List/index.tsx b/ui/lib/apps/Statement/pages/List/index.tsx index 7d8d54e50f..7590375686 100644 --- a/ui/lib/apps/Statement/pages/List/index.tsx +++ b/ui/lib/apps/Statement/pages/List/index.tsx @@ -7,65 +7,47 @@ import { LoadingOutlined, } from '@ant-design/icons' import { ScrollablePane } from 'office-ui-fabric-react/lib/ScrollablePane' -import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' import { useTranslation } from 'react-i18next' -import { - Card, - ColumnsSelector, - IColumnKeys, - Toolbar, - MultiSelect, -} from '@lib/components' + +import { Card, ColumnsSelector, Toolbar, MultiSelect } from '@lib/components' + import { StatementsTable } from '../../components' import StatementSettingForm from './StatementSettingForm' import TimeRangeSelector from './TimeRangeSelector' -import useStatement from '../../utils/useStatement' +import useStatementTableController, { + DEF_STMT_COLUMN_KEYS, +} from '../../utils/useStatementTableController' const { Search } = Input -const VISIBLE_COLUMN_KEYS = 'statement.visible_column_keys' -const SHOW_FULL_SQL = 'statement.show_full_sql' - -const defColumnKeys: IColumnKeys = { - digest_text: true, - sum_latency: true, - avg_latency: true, - exec_count: true, - plan_count: true, - related_schemas: true, -} +const STMT_VISIBLE_COLUMN_KEYS = 'statement.visible_column_keys' +const STMT_SHOW_FULL_SQL = 'statement.show_full_sql' export default function StatementsOverview() { const { t } = useTranslation() + const [showSettings, setShowSettings] = useState(false) + const [visibleColumnKeys, setVisibleColumnKeys] = useLocalStorageState( + STMT_VISIBLE_COLUMN_KEYS, + DEF_STMT_COLUMN_KEYS + ) + const [showFullSQL, setShowFullSQL] = useLocalStorageState( + STMT_SHOW_FULL_SQL, + false + ) + + const controller = useStatementTableController(visibleColumnKeys, showFullSQL) const { queryOptions, setQueryOptions, - orderOptions, - changeOrder, refresh, - enable, allTimeRanges, allSchemas, allStmtTypes, - validTimeRange, loadingStatements, - statements, - - errors, - } = useStatement() - - const [columns, setColumns] = useState([]) - const [showSettings, setShowSettings] = useState(false) - const [visibleColumnKeys, setVisibleColumnKeys] = useLocalStorageState( - VISIBLE_COLUMN_KEYS, - defColumnKeys - ) - const [showFullSQL, setShowFullSQL] = useLocalStorageState( - SHOW_FULL_SQL, - false - ) + tableColumns, + } = controller return (
@@ -127,11 +109,11 @@ export default function StatementsOverview() { - {columns.length > 0 && ( + {tableColumns.length > 0 && ( - +
) : ( diff --git a/ui/lib/apps/Statement/translations/en.yaml b/ui/lib/apps/Statement/translations/en.yaml index 837e8c4481..f91128ac54 100644 --- a/ui/lib/apps/Statement/translations/en.yaml +++ b/ui/lib/apps/Statement/translations/en.yaml @@ -90,6 +90,8 @@ statement: compile_latency_tooltip: Time consumed when optimizing the query wait_time: Coprocessor Wait Time process_time: Coprocessor Execution Time + total_process_time: Total Execution Time + total_wait_time: Total Wait Time backoff_time: Backoff Retry Time backoff_time_tooltip: The waiting time before retry when a query encounters errors that require a retry get_commit_ts_time: Get Commit Ts Time @@ -102,11 +104,11 @@ statement: query_time2: Query Time query_time2_tooltip: The execution time of a query (due to the parallel execution, it may be significantly smaller than the above time) sum_cop_task_num: Total Coprocessor Tasks - avg_processed_keys: Mean Visible Versions per query - max_processed_keys: Max Visible Versions per query - avg_total_keys: Mean Meet Versions per query + avg_processed_keys: Mean Visible Versions Per Query + max_processed_keys: Max Visible Versions Per Query + avg_total_keys: Mean Meet Versions Per Query avg_total_keys_tooltip: Meet versions contains overwritten or deleted versions - max_total_keys: Max Meet Versions per query + max_total_keys: Max Meet Versions Per Query avg_affected_rows: Mean Affected Rows sum_backoff_times: Total Backoff Count avg_write_keys: Mean Written Keys diff --git a/ui/lib/apps/Statement/translations/zh.yaml b/ui/lib/apps/Statement/translations/zh.yaml index 6455567230..2602746f16 100644 --- a/ui/lib/apps/Statement/translations/zh.yaml +++ b/ui/lib/apps/Statement/translations/zh.yaml @@ -92,6 +92,8 @@ statement: wait_time_tooltip: SQL 查询在 TiKV Coprocessor 上被等待执行的耗时,单个 SQL 查询所有 Coprocessor 任务累计后计算 process_time: Coprocessor 执行耗时 process_time_tooltip: SQL 查询在 TiKV Coprocessor 上的执行耗时,单个 SQL 查询所有 Coprocessor 任务累计后计算 + total_process_time: 所有执行耗时 + total_wait_time: 所有等待耗时 backoff_time: 重试等待耗时 backoff_time_tooltip: 单个 SQL 查询所有重试累计后计算 get_commit_ts_time: 取 Commit Ts 耗时 diff --git a/ui/lib/apps/Statement/utils/tableColumns.tsx b/ui/lib/apps/Statement/utils/tableColumns.tsx index a249e8610b..ea901ead38 100644 --- a/ui/lib/apps/Statement/utils/tableColumns.tsx +++ b/ui/lib/apps/Statement/utils/tableColumns.tsx @@ -6,194 +6,59 @@ import { } from 'office-ui-fabric-react/lib/DetailsList' import React from 'react' import { orange, red } from '@ant-design/colors' -import { getValueFormat } from '@baurine/grafana-value-formats' import { StatementModel } from '@lib/client' -import { Bar, HighlightSQL, Pre, TextWithInfo, TextWrap } from '@lib/components' - -function commonColumnName(fieldName: string): any { - return -} - -function planCountColumn( - _rows?: { plan_count?: number }[] // used for type check only -): IColumn { - return { - name: commonColumnName('plan_count'), - key: 'plan_count', - fieldName: 'plan_count', - minWidth: 100, - maxWidth: 300, - columnActionsMode: ColumnActionsMode.clickable, - } -} - -function planDigestColumn( - _rows?: { plan_digest?: string }[] // used for type check only -): IColumn { - return { - name: commonColumnName('plan_digest'), - key: 'plan_digest', - fieldName: 'plan_digest', - minWidth: 100, - maxWidth: 300, - onRender: (rec) => ( - - {rec.plan_digest || '(none)'} - - ), - } -} +import { Bar, Pre } from '@lib/components' +import { + TableColumnFactory, + formatVal, + IColumnWithSourceFields, +} from '@lib/utils/tableColumnFactory' -function digestColumn( - _rows?: { digest_text?: string }[], // used for type check only - showFullSQL?: boolean -): IColumn { - return { - name: commonColumnName('digest_text'), - key: 'digest_text', - fieldName: 'digest_text', - minWidth: 100, - maxWidth: 500, - isMultiline: showFullSQL, - onRender: (rec) => - showFullSQL ? ( - - - - ) : ( - } - placement="right" - > - - - - - ), - } -} - -function sumLatencyColumn(rows?: { sum_latency?: number }[]): IColumn { - const capacity = rows ? max(rows.map((v) => v.sum_latency)) ?? 0 : 0 - const key = 'sum_latency' - return { - name: commonColumnName(key), - key, - fieldName: key, - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - {getValueFormat('ns')(rec.sum_latency, 1)} - - ), - } -} +/////////////////////////////////////// +// statements order list in local by fieldName of IColumn +// slow query order list in backend by key of IColumn +const TRANS_KEY_PREFIX = 'statement.fields' function avgMinMaxLatencyColumn( + tcf: TableColumnFactory, rows?: { max_latency?: number; min_latency?: number; avg_latency?: number }[] ): IColumn { - const capacity = rows ? max(rows.map((v) => v.max_latency)) ?? 0 : 0 - const key = 'avg_latency' - return { - name: commonColumnName(key), - key, - fieldName: key, - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => { - const tooltipContent = ` -Mean: ${getValueFormat('ns')(rec.avg_latency, 1)} -Min: ${getValueFormat('ns')(rec.min_latency, 1)} -Max: ${getValueFormat('ns')(rec.max_latency, 1)}` - return ( - {tooltipContent.trim()}}> - - {getValueFormat('ns')(rec.avg_latency, 1)} - - - ) - }, - } -} - -function execCountColumn(rows?: { exec_count?: number }[]): IColumn { - const capacity = rows ? max(rows.map((v) => v.exec_count)) ?? 0 : 0 - const key = 'exec_count' - return { - name: commonColumnName(key), - key, - fieldName: key, - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => ( - - {getValueFormat('short')(rec.exec_count, 0, 1)} - - ), - } -} - -function avgMaxMemColumn( - rows?: { avg_mem?: number; max_mem?: number }[] -): IColumn { - const capacity = rows ? max(rows.map((v) => v.max_mem)) ?? 0 : 0 - const key = 'avg_mem' - return { - name: commonColumnName(key), - key, - fieldName: key, - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => { - const tooltipContent = ` -Mean: ${getValueFormat('bytes')(rec.avg_mem, 1)} -Max: ${getValueFormat('bytes')(rec.max_mem, 1)}` - return ( - {tooltipContent.trim()}}> - - {getValueFormat('bytes')(rec.avg_mem, 1)} - - - ) + return tcf.bar.multiple( + { + bars: [ + { mean: 'avg_latency' }, + { max: 'max_latency' }, + { min: 'min_latency' }, + ], }, - } + 'ns', + rows + ) } function errorsWarningsColumn( + tcf: TableColumnFactory, rows?: { sum_errors?: number; sum_warnings?: number }[] -): IColumn { +): IColumnWithSourceFields { const capacity = rows ? max(rows.map((v) => v.sum_errors! + v.sum_warnings!)) ?? 0 : 0 const key = 'sum_errors' return { - name: commonColumnName('errors_warnings'), + name: tcf.columnName('errors_warnings'), key, fieldName: key, + sourceFields: ['sum_errors', 'sum_warnings'], minWidth: 140, maxWidth: 200, columnActionsMode: ColumnActionsMode.clickable, onRender: (rec) => { + const errorsFmtVal = formatVal(rec.sum_errors, 'short') + const warningsFmtVal = formatVal(rec.sum_warnings, 'short') const tooltipContent = ` -Errors: ${getValueFormat('short')(rec.sum_errors, 0, 1)} -Warnings: ${getValueFormat('short')(rec.sum_warnings, 0, 1)}` +Errors: ${errorsFmtVal} +Warnings: ${warningsFmtVal}` return ( {tooltipContent.trim()}}> - {getValueFormat('short')(rec.sum_errors, 0, 1)} - {' / '} - {getValueFormat('short')(rec.sum_warnings, 0, 1)} - - - ) - }, - } -} - -function avgParseLatencyColumn( - rows?: { avg_parse_latency?: number; max_parse_latency?: number }[] -): IColumn { - const capacity = rows ? max(rows.map((v) => v.max_parse_latency)) ?? 0 : 0 - const key = 'avg_parse_latency' - return { - name: commonColumnName('parse_latency'), - key, - fieldName: key, - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => { - const tooltipContent = ` -Mean: ${getValueFormat('ns')(rec.avg_parse_latency, 1)} -Max: ${getValueFormat('ns')(rec.max_parse_latency, 1)}` - return ( - {tooltipContent.trim()}}> - - {getValueFormat('ns')(rec.avg_parse_latency, 1)} + {`${errorsFmtVal} / ${warningsFmtVal}`} ) @@ -244,84 +75,25 @@ Max: ${getValueFormat('ns')(rec.max_parse_latency, 1)}` } } -function avgCompileLatencyColumn( - rows?: { avg_compile_latency?: number; max_compile_latency?: number }[] -): IColumn { - const capacity = rows ? max(rows.map((v) => v.max_compile_latency)) ?? 0 : 0 - const key = 'avg_compile_latency' - return { - name: commonColumnName('compile_latency'), - key, - fieldName: key, - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => { - const tooltipContent = ` -Mean: ${getValueFormat('ns')(rec.avg_compile_latency, 1)} -Max: ${getValueFormat('ns')(rec.max_compile_latency, 1)}` - return ( - {tooltipContent.trim()}}> - - {getValueFormat('ns')(rec.avg_compile_latency, 1)} - - - ) - }, - } -} +//////////////////////////////////////////////// +// util methods -function avgCoprColumn( - rows?: { avg_cop_process_time?: number; max_cop_process_time?: number }[] +function avgMaxColumn( + tcf: TableColumnFactory, + avgKey: keyof T, + maxKey: keyof T, + displayTransKey: string, + unit: string, + rows?: T[] ): IColumn { - const capacity = rows ? max(rows.map((v) => v.max_cop_process_time)) ?? 0 : 0 - const key = 'avg_cop_process_time' - return { - name: commonColumnName('process_time'), - key, - fieldName: key, - minWidth: 140, - maxWidth: 200, - columnActionsMode: ColumnActionsMode.clickable, - onRender: (rec) => { - const tooltipContent = ` -Mean: ${getValueFormat('ns')(rec.avg_cop_process_time, 1)} -Max: ${getValueFormat('ns')(rec.max_cop_process_time, 1)}` - return ( - {tooltipContent.trim()}}> - - {getValueFormat('ns')(rec.avg_cop_process_time, 1)} - - - ) + return tcf.bar.multiple( + { + displayTransKey, + bars: [{ mean: avgKey }, { max: maxKey }], }, - } -} - -function relatedSchemasColumn( - _rows?: { related_schemas?: string }[] // used for type check only -): IColumn { - return { - name: commonColumnName('related_schemas'), - key: 'related_schemas', - minWidth: 160, - maxWidth: 240, - onRender: (rec) => ( - - {rec.related_schemas} - - ), - } + unit, + rows + ) } //////////////////////////////////////////////// @@ -329,28 +101,214 @@ function relatedSchemasColumn( export function statementColumns( rows: StatementModel[], showFullSQL?: boolean -): IColumn[] { +): IColumnWithSourceFields[] { + const tcf = new TableColumnFactory(TRANS_KEY_PREFIX) + return [ - digestColumn(rows, showFullSQL), - sumLatencyColumn(rows), - avgMinMaxLatencyColumn(rows), - execCountColumn(rows), - planCountColumn(rows), - avgMaxMemColumn(rows), - errorsWarningsColumn(rows), - avgParseLatencyColumn(rows), - avgCompileLatencyColumn(rows), - avgCoprColumn(rows), - relatedSchemasColumn(rows), + tcf.sqlText('digest_text', showFullSQL, rows), + tcf.textWithTooltip('digest', rows), + tcf.bar.single('sum_latency', 'ns', rows), + avgMinMaxLatencyColumn(tcf, rows), + tcf.bar.single('exec_count', 'short', rows), + { + ...tcf.textWithTooltip('plan_count', rows), + minWidth: 100, + maxWidth: 300, + columnActionsMode: ColumnActionsMode.clickable, + }, + avgMaxColumn(tcf, 'avg_mem', 'max_mem', 'avg_mem', 'bytes', rows), + errorsWarningsColumn(tcf, rows), + avgMaxColumn( + tcf, + 'avg_parse_latency', + 'max_parse_latency', + 'parse_latency', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_compile_latency', + 'max_compile_latency', + 'compile_latency', + 'ns', + rows + ), + tcf.bar.single('sum_cop_task_num', 'short', rows), + avgMaxColumn( + tcf, + 'avg_cop_process_time', + 'max_cop_process_time', + 'process_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_cop_wait_time', + 'max_cop_wait_time', + 'wait_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_process_time', + 'max_process_time', + 'total_process_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_wait_time', + 'max_wait_time', + 'total_wait_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_backoff_time', + 'max_backoff_time', + 'backoff_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_write_keys', + 'max_write_keys', + 'avg_write_keys', + 'short', + rows + ), + avgMaxColumn( + tcf, + 'avg_processed_keys', + 'max_processed_keys', + 'avg_processed_keys', + 'short', + rows + ), + avgMaxColumn( + tcf, + 'avg_total_keys', + 'max_total_keys', + 'avg_total_keys', + 'short', + rows + ), + avgMaxColumn( + tcf, + 'avg_prewrite_time', + 'max_prewrite_time', + 'prewrite_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_commit_time', + 'max_commit_time', + 'commit_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_get_commit_ts_time', + 'max_get_commit_ts_time', + 'get_commit_ts_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_commit_backoff_time', + 'max_commit_backoff_time', + 'commit_backoff_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_resolve_lock_time', + 'max_resolve_lock_time', + 'resolve_lock_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_local_latch_wait_time', + 'max_local_latch_wait_time', + 'local_latch_wait_time', + 'ns', + rows + ), + avgMaxColumn( + tcf, + 'avg_write_size', + 'max_write_size', + 'avg_write_size', + 'bytes', + rows + ), + avgMaxColumn( + tcf, + 'avg_prewrite_regions', + 'max_prewrite_regions', + 'avg_prewrite_regions', + 'short', + rows + ), + avgMaxColumn( + tcf, + 'avg_txn_retry', + 'max_txn_retry', + 'avg_txn_retry', + 'short', + rows + ), + + tcf.bar.single('sum_backoff_times', 'short', rows), + tcf.bar.single('avg_affected_rows', 'short', rows), + + tcf.timestamp('first_seen', rows), + tcf.timestamp('last_seen', rows), + tcf.textWithTooltip('sample_user', rows), + + tcf.sqlText('query_sample_text', showFullSQL, rows), + tcf.sqlText('prev_sample_text', showFullSQL, rows), + + tcf.textWithTooltip('schema_name', rows), + tcf.textWithTooltip('table_names', rows), + tcf.textWithTooltip('index_names', rows), + + tcf.textWithTooltip('plan_digest', rows), + + { + ...tcf.textWithTooltip('related_schemas', rows), + minWidth: 160, + maxWidth: 240, + sourceFields: ['table_names'], + }, ] } export function planColumns(rows: StatementModel[]): IColumn[] { + const tcf = new TableColumnFactory(TRANS_KEY_PREFIX) + return [ - planDigestColumn(rows), - sumLatencyColumn(rows), - avgMinMaxLatencyColumn(rows), - execCountColumn(rows), - avgMaxMemColumn(rows), + { + ...tcf.textWithTooltip('plan_digest'), + minWidth: 100, + maxWidth: 300, + }, + tcf.bar.single('sum_latency', 'ns', rows), + avgMinMaxLatencyColumn(tcf, rows), + tcf.bar.single('exec_count', 'short', rows), + avgMaxColumn(tcf, 'avg_mem', 'max_mem', 'avg_mem', 'bytes', rows), ] } diff --git a/ui/lib/apps/Statement/utils/useStatement.ts b/ui/lib/apps/Statement/utils/useStatementTableController.ts similarity index 71% rename from ui/lib/apps/Statement/utils/useStatement.ts rename to ui/lib/apps/Statement/utils/useStatementTableController.ts index 098b1e44bd..876549dc02 100644 --- a/ui/lib/apps/Statement/utils/useStatement.ts +++ b/ui/lib/apps/Statement/utils/useStatementTableController.ts @@ -1,11 +1,13 @@ import { useEffect, useMemo, useState } from 'react' import { useSessionStorageState } from '@umijs/hooks' +import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' import client, { ErrorStrategy, StatementModel, StatementTimeRange, } from '@lib/client' +import { IColumnKeys } from '@lib/components' import useOrderState, { IOrderOptions } from '@lib/utils/useOrderState' import { @@ -13,6 +15,17 @@ import { DEFAULT_TIME_RANGE, TimeRange, } from '../pages/List/TimeRangeSelector' +import { statementColumns } from './tableColumns' +import { getSelectedFields } from '@lib/utils/tableColumnFactory' + +export const DEF_STMT_COLUMN_KEYS: IColumnKeys = { + digest_text: true, + sum_latency: true, + avg_latency: true, + exec_count: true, + plan_count: true, + related_schemas: true, +} const QUERY_OPTIONS = 'statement.query_options' @@ -35,10 +48,33 @@ export const DEF_STMT_QUERY_OPTIONS: IStatementQueryOptions = { searchText: '', } -export default function useStatement( +export interface IStatementTableController { + queryOptions: IStatementQueryOptions + setQueryOptions: (options: IStatementQueryOptions) => void + orderOptions: IOrderOptions + changeOrder: (orderBy: string, desc: boolean) => void + refresh: () => void + + enable: boolean + allTimeRanges: StatementTimeRange[] + allSchemas: string[] + allStmtTypes: string[] + validTimeRange: StatementTimeRange + loadingStatements: boolean + statements: StatementModel[] + + errors: Error[] + + tableColumns: IColumn[] + visibleColumnKeys: IColumnKeys +} + +export default function useStatementTableController( + visibleColumnKeys: IColumnKeys, + showFullSQL: boolean, options?: IStatementQueryOptions, needSave: boolean = true -) { +): IStatementTableController { const { orderOptions, changeOrder } = useOrderState( 'statement', needSave, @@ -138,6 +174,18 @@ export default function useStatement( queryStmtTypes() }, [refreshTimes]) + // Notice: statements, tableColumns, selectedFields make loop dependencies + const tableColumns = useMemo( + () => statementColumns(statements, showFullSQL), + [statements, showFullSQL] + ) + // make selectedFields as a string instead of an array to avoid infinite loop + // I have verified that it will cause infinite loop if we return selectedFields as an array + // so it is better to use the basic type (string, number...) instead of object as the dependency + const selectedFields = useMemo( + () => getSelectedFields(visibleColumnKeys, tableColumns).join(','), + [visibleColumnKeys, tableColumns] + ) useEffect(() => { async function queryStatementList() { if (allTimeRanges.length === 0) { @@ -153,6 +201,7 @@ export default function useStatement( .statementsOverviewsGet( validTimeRange.begin_time!, validTimeRange.end_time!, + selectedFields, queryOptions.schemas, queryOptions.stmtTypes, queryOptions.searchText, @@ -169,7 +218,7 @@ export default function useStatement( } queryStatementList() - }, [queryOptions, allTimeRanges, validTimeRange]) + }, [queryOptions, allTimeRanges, validTimeRange, selectedFields]) return { queryOptions, @@ -187,5 +236,8 @@ export default function useStatement( statements, errors, + + tableColumns, + visibleColumnKeys, } } diff --git a/ui/lib/components/ColumnsSelector/index.tsx b/ui/lib/components/ColumnsSelector/index.tsx index 185cf8bf1e..fa243f85e3 100644 --- a/ui/lib/components/ColumnsSelector/index.tsx +++ b/ui/lib/components/ColumnsSelector/index.tsx @@ -35,7 +35,7 @@ export interface IColumnKeys { export interface IColumnsSelectorProps { columns: IColumn[] visibleColumnKeys?: IColumnKeys - resetColumnKeys?: IColumnKeys + defaultVisibleColumnKeys?: IColumnKeys onChange?: (visibleKeys: IColumnKeys) => void foot?: ReactNode } @@ -43,7 +43,7 @@ export interface IColumnsSelectorProps { export default function ColumnsSelector({ columns, visibleColumnKeys, - resetColumnKeys, + defaultVisibleColumnKeys, onChange, foot, }: IColumnsSelectorProps) { @@ -107,10 +107,10 @@ export default function ColumnsSelector({ > {t('component.columnsSelector.select')} - {resetColumnKeys && ( + {defaultVisibleColumnKeys && ( @@ -119,19 +119,28 @@ export default function ColumnsSelector({ ) const content = ( - - {filteredColumns.map((column) => ( - handleCheckChange(e, column)} - > - {column.name} - - ))} - +
+ + {filteredColumns.map((column) => ( + handleCheckChange(e, column)} + > + {column.name} + + ))} + {foot &&
{foot}
} - +
) return ( diff --git a/ui/lib/utils/tableColumnFactory.tsx b/ui/lib/utils/tableColumnFactory.tsx new file mode 100644 index 0000000000..ba43d41ba7 --- /dev/null +++ b/ui/lib/utils/tableColumnFactory.tsx @@ -0,0 +1,254 @@ +import { Tooltip } from 'antd' +import { max as _max, capitalize } from 'lodash' +import { + IColumn, + ColumnActionsMode, +} from 'office-ui-fabric-react/lib/DetailsList' +import React from 'react' +import { getValueFormat } from '@baurine/grafana-value-formats' + +import { + Bar, + Pre, + TextWithInfo, + TextWrap, + DateTime, + HighlightSQL, + IColumnKeys, +} from '@lib/components' + +type Bar = { [key: string]: keyof T } +type BarsConfig = { + displayTransKey?: string // it is same as avg field name default + bars: [Bar, Bar, Bar?] // [avg, max, min?] +} + +export type IColumnWithSourceFields = IColumn & { + sourceFields?: string[] +} + +export function formatVal(val: number, unit: string, decimals: number = 1) { + const formatFn = getValueFormat(unit) + return unit === 'short' ? formatFn(val, 0, decimals) : formatFn(val, decimals) +} + +export function commonColumnName(transPrefix: string, fieldName: string): any { + const fullTransKey = `${transPrefix}.${fieldName}` + return +} + +export class TableColumnFactory { + transPrefix: string + bar: BarColumn + + constructor(transKeyPrefix: string) { + this.transPrefix = transKeyPrefix + this.bar = new BarColumn(this) + } + + columnName(fieldName: string): any { + return commonColumnName(this.transPrefix, fieldName) + } + + columnFromField(fieldName: string) { + return { + name: this.columnName(fieldName), + key: fieldName, + fieldName: fieldName, + } + } + + textWithTooltip( + fieldName: T, + _rows?: U[] + ): IColumnWithSourceFields { + return { + ...this.columnFromField(fieldName), + minWidth: 100, + maxWidth: 150, + onRender: (rec: U) => ( + + {rec[fieldName]} + + ), + } + } + + singleBar( + fieldName: T, + unit: string, + rows?: U[] + ): IColumnWithSourceFields { + const capacity = rows ? _max(rows.map((v) => v[fieldName])) ?? 0 : 0 + return { + ...this.columnFromField(fieldName), + minWidth: 140, + maxWidth: 200, + columnActionsMode: ColumnActionsMode.clickable, + onRender: (rec: U) => { + const fmtVal = formatVal(rec[fieldName]!, unit) + return ( + + {fmtVal} + + ) + }, + } + } + + multipleBar( + barsConfig: BarsConfig, + unit: string, + rows?: T[] + ): IColumnWithSourceFields { + const { + displayTransKey, + bars: [avg_, max_, min_], + } = barsConfig + + const tooltioPrefixLens: number[] = [] + const avg = { + fieldName: Object.values(avg_)[0], + tooltipPrefix: Object.keys(avg_)[0], + } + tooltioPrefixLens.push(avg.tooltipPrefix.length) + const max = { + fieldName: Object.values(max_)[0], + tooltipPrefix: Object.keys(max_)[0], + } + tooltioPrefixLens.push(max.tooltipPrefix.length) + let min + if (min_) { + min = { + fieldName: Object.values(min_)[0], + tooltipPrefix: Object.keys(min_)[0], + } + tooltioPrefixLens.push(min.tooltipPrefix.length) + } else { + min = undefined + } + const maxTooltipPrefixLen = _max(tooltioPrefixLens) || 0 + + const capacity = rows ? _max(rows.map((v) => v[max.fieldName])) ?? 0 : 0 + let sourceFields = [avg.fieldName, max.fieldName] as string[] + if (min) { + sourceFields.push(min.fieldName) + } + return { + ...this.columnFromField(avg.fieldName as string), + name: this.columnName((displayTransKey || avg.fieldName) as string), + sourceFields, + minWidth: 140, + maxWidth: 200, + columnActionsMode: ColumnActionsMode.clickable, + onRender: (rec) => { + const avgVal = rec[avg.fieldName] + const maxVal = rec[max.fieldName] + const minVal = min ? rec[min.fieldName] : undefined + const tooltips = [avg, min, max] + .filter((el) => el !== undefined) + .map((bar) => { + const prefix = capitalize(bar!.tooltipPrefix + ':').padEnd( + maxTooltipPrefixLen + 2 + ) + const fmtVal = formatVal(rec[bar!.fieldName], unit) + return `${prefix}${fmtVal}` + }) + .join('\n') + return ( + {tooltips.trim()}}> + + {formatVal(avgVal, unit)} + + + ) + }, + } + } + + timestamp( + fieldName: T, + _rows?: U[] + ): IColumnWithSourceFields { + return { + ...this.columnFromField(fieldName), + minWidth: 100, + maxWidth: 150, + columnActionsMode: ColumnActionsMode.clickable, + onRender: (rec: U) => ( + + + + ), + } + } + + sqlText( + fieldName: T, + showFullSQL?: boolean, + _rows?: U[] + ): IColumnWithSourceFields { + return { + ...this.columnFromField(fieldName), + minWidth: 100, + maxWidth: 500, + isMultiline: showFullSQL, + onRender: (rec: U) => + showFullSQL ? ( + + + + ) : ( + } + placement="right" + > + + + + + ), + } + } +} + +export class BarColumn { + constructor(public factory: TableColumnFactory) {} + + single( + fieldName: T, + unit: string, + rows?: U[] + ) { + return this.factory.singleBar(fieldName, unit, rows) + } + + multiple(bars: BarsConfig, unit: string, rows?: T[]) { + return this.factory.multipleBar(bars, unit, rows) + } +} + +//////////////////////////////////////////// + +export function getSelectedFields( + visibleColumnKeys: IColumnKeys, + columns: IColumnWithSourceFields[] +) { + let fields: string[] = [] + columns.forEach((c) => { + if (visibleColumnKeys[c.key] === true) { + if (c.sourceFields !== undefined) { + fields = fields.concat(c.sourceFields) + } else { + fields.push(c.key) + } + } + }) + return fields +} diff --git a/ui/lib/utils/tableColumns.tsx b/ui/lib/utils/tableColumns.tsx index 581613a542..b37a7479dc 100644 --- a/ui/lib/utils/tableColumns.tsx +++ b/ui/lib/utils/tableColumns.tsx @@ -5,8 +5,9 @@ import React from 'react' import { useTranslation } from 'react-i18next' import { getValueFormat } from '@baurine/grafana-value-formats' -import { Bar, Pre, TextWithInfo } from '@lib/components' +import { Bar, Pre } from '@lib/components' import { addTranslationResource } from './i18n' +import { commonColumnName } from './tableColumnFactory' const translations = { en: { @@ -49,15 +50,12 @@ function TransText({ return {t(transKey, opt)} } -function commonColumnName(fieldName: string): any { - return ( - - ) -} +//////////////////////////////////// +const TRANS_KEY_PREFIX = 'component.commonColumn' function fieldsKeyColumn(transKeyPrefix: string): IColumn { return { - name: commonColumnName('name'), + name: commonColumnName(TRANS_KEY_PREFIX, 'name'), key: 'key', minWidth: 150, maxWidth: 250, @@ -72,7 +70,7 @@ function fieldsKeyColumn(transKeyPrefix: string): IColumn { function fieldsValueColumn(): IColumn { return { - name: commonColumnName('value'), + name: commonColumnName(TRANS_KEY_PREFIX, 'value'), key: 'value', fieldName: 'value', minWidth: 150, @@ -87,7 +85,7 @@ function fieldsTimeValueColumn( ? max(rows.map((v) => max([v.max, v.min, v.avg, v.value]))) ?? 0 : 0 return { - name: commonColumnName('time'), + name: commonColumnName(TRANS_KEY_PREFIX, 'time'), key: 'time', minWidth: 150, maxWidth: 200, @@ -130,7 +128,7 @@ function fieldsTimeValueColumn( function fieldsDescriptionColumn(transKeyPrefix: string): IColumn { return { - name: commonColumnName('desc'), + name: commonColumnName(TRANS_KEY_PREFIX, 'desc'), key: 'description', minWidth: 150, maxWidth: 300, From 5952b7ad6a56111271559b07c6981d5f86eea520 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=9C=C9=B4=D0=B2=CA=8F=D1=82=E1=B4=87?= Date: Tue, 13 Oct 2020 14:53:12 +0800 Subject: [PATCH 06/29] ui: improve the expansion and collapse of sider menu (#767) --- ui/dashboardApp/layout/main/Sider/Banner.tsx | 22 +++---- .../layout/main/Sider/index.module.less | 7 +- ui/dashboardApp/layout/main/Sider/index.tsx | 55 +++++++++------- ui/dashboardApp/layout/main/index.module.less | 24 ++++--- ui/dashboardApp/layout/main/index.tsx | 65 +------------------ 5 files changed, 60 insertions(+), 113 deletions(-) diff --git a/ui/dashboardApp/layout/main/Sider/Banner.tsx b/ui/dashboardApp/layout/main/Sider/Banner.tsx index 4c9b85ad07..25ec615a30 100644 --- a/ui/dashboardApp/layout/main/Sider/Banner.tsx +++ b/ui/dashboardApp/layout/main/Sider/Banner.tsx @@ -1,8 +1,7 @@ import React, { useMemo } from 'react' -import { MenuUnfoldOutlined, MenuFoldOutlined } from '@ant-design/icons' +import { MenuFoldOutlined, MenuUnfoldOutlined } from '@ant-design/icons' import { useSize } from '@umijs/hooks' import Flexbox from '@g07cha/flexbox-react' -import { useSpring, animated } from 'react-spring' import { useClientRequest } from '@lib/utils/useClientRequest' import client, { InfoInfoResponse } from '@lib/client' @@ -46,14 +45,14 @@ export default function ToggleBanner({ onToggle, }) { const [bannerSize, bannerRef] = useSize() - const transBanner = useSpring({ + const bannerStyle = { opacity: collapsed ? 0 : 1, height: collapsed ? toggleHeight : bannerSize.height || 0, - }) - const transButton = useSpring({ + } + const buttonStyle = { left: collapsed ? 0 : fullWidth - toggleWidth, width: collapsed ? collapsedWidth : toggleWidth, - }) + } const { data, isLoading } = useClientRequest((reqConfig) => client.getInstance().infoGet(reqConfig) @@ -68,10 +67,7 @@ export default function ToggleBanner({ return (
- +
- - +
+
{collapsed ? ( ) : ( )} - +
) } diff --git a/ui/dashboardApp/layout/main/Sider/index.module.less b/ui/dashboardApp/layout/main/Sider/index.module.less index 67dbe8cd8b..5d4843a92a 100644 --- a/ui/dashboardApp/layout/main/Sider/index.module.less +++ b/ui/dashboardApp/layout/main/Sider/index.module.less @@ -2,10 +2,11 @@ @sider-background: #f7f7fa; +.wrapper { + transition: width 444ms ease; +} + .sider { - position: fixed; - left: 0; - top: 0; height: 100%; z-index: 1; background: linear-gradient(@sider-background, #ebeffa); diff --git a/ui/dashboardApp/layout/main/Sider/index.tsx b/ui/dashboardApp/layout/main/Sider/index.tsx index 3c222823ff..68fe971363 100644 --- a/ui/dashboardApp/layout/main/Sider/index.tsx +++ b/ui/dashboardApp/layout/main/Sider/index.tsx @@ -1,10 +1,9 @@ -import React, { useState, useMemo } from 'react' -import { ExperimentOutlined, BugOutlined } from '@ant-design/icons' +import React, { useCallback, useMemo, useState } from 'react' +import { BugOutlined, ExperimentOutlined } from '@ant-design/icons' import { Layout, Menu } from 'antd' import { Link } from 'react-router-dom' import { useEventListener } from '@umijs/hooks' import { useTranslation } from 'react-i18next' -import { useSpring, animated } from 'react-spring' import client from '@lib/client' import Banner from './Banner' @@ -18,10 +17,9 @@ function useAppMenuItem(registry, appId, title?: string) { return null } return ( - + : null}> - {app.icon ? : null} - {title ? title : t(`${appId}.nav_title`, appId)} + {title ? title : t(`${appId}.nav_title`, appId)} ) @@ -38,6 +36,12 @@ function useActiveAppId(registry) { return appId } +function triggerResizeEvent() { + const event = document.createEvent('HTMLEvents') + event.initEvent('resize', true, false) + window.dispatchEvent(event) +} + function Sider({ registry, fullWidth, @@ -61,12 +65,8 @@ function Sider({ const debugSubMenu = ( - - {t('nav.sider.debug')} - - } + icon={} + title={t('nav.sider.debug')} > {debugSubMenuItems} @@ -79,12 +79,8 @@ function Sider({ const experimentalSubMenu = ( - - {t('nav.sider.experimental')} - - } + icon={} + title={t('nav.sider.experimental')} > {experimentalSubMenuItems} @@ -115,9 +111,9 @@ function Sider({ useAppMenuItem(registry, 'user_profile', displayName), ] - const transSider = useSpring({ + const siderStyle = { width: collapsed ? collapsedWidth : fullWidth, - }) + } const defaultOpenKeys = useMemo(() => { if (defaultCollapsed) { @@ -127,8 +123,17 @@ function Sider({ } }, [defaultCollapsed]) + const wrapperRef = useCallback((wrapper) => { + if (wrapper !== null) { + wrapper.addEventListener('transitionend', (e) => { + if (e.target !== wrapper || e.propertyName !== 'width') return + triggerResizeEvent() + }) + } + }, []) + return ( - +
{extraMenuItems} - +
) } diff --git a/ui/dashboardApp/layout/main/index.module.less b/ui/dashboardApp/layout/main/index.module.less index 4c050027a6..05709a45e0 100644 --- a/ui/dashboardApp/layout/main/index.module.less +++ b/ui/dashboardApp/layout/main/index.module.less @@ -1,14 +1,28 @@ @import '~antd/es/style/themes/default.less'; .container { + display: flex; + position: fixed; + top: 0; + bottom: 0; + right: 0; + left: 0; height: 100vh; + width: 100vw; } .content { position: relative; + + flex: 1; + z-index: 3; background: #fff; min-height: 100vh; + box-shadow: 0 0 30px rgba(#000, 0.15); + + overflow-x: hidden; + overflow-y: auto; &:before, &:after { @@ -17,13 +31,3 @@ display: table; } } - -.contentBack { - position: fixed; - z-index: 2; - background: #fff; - top: 0; - height: 100%; - right: 0; - box-shadow: 0 0 30px rgba(#000, 0.15); -} diff --git a/ui/dashboardApp/layout/main/index.tsx b/ui/dashboardApp/layout/main/index.tsx index 5a23210942..594a8702da 100644 --- a/ui/dashboardApp/layout/main/index.tsx +++ b/ui/dashboardApp/layout/main/index.tsx @@ -1,45 +1,14 @@ -import React, { useState, useCallback, useEffect } from 'react' +import React, { useCallback, useState } from 'react' import { Root } from '@lib/components' import { useLocalStorageState } from '@umijs/hooks' import { HashRouter as Router } from 'react-router-dom' -import { useSpring, animated } from 'react-spring' +import { animated, useSpring } from 'react-spring' import Sider from './Sider' import styles from './index.module.less' const siderWidth = 260 const siderCollapsedWidth = 80 -const collapsedContentOffset = siderCollapsedWidth - siderWidth -const contentOffsetTrigger = collapsedContentOffset * 0.99 - -function triggerResizeEvent() { - const event = document.createEvent('HTMLEvents') - event.initEvent('resize', true, false) - window.dispatchEvent(event) -} - -const useContentLeftOffset = (collapsed) => { - const [offset, setOffset] = useState(siderWidth) - const onAnimationStart = useCallback(() => { - if (!collapsed) { - setOffset(siderWidth) - } - }, [collapsed]) - const onAnimationFrame = useCallback( - ({ x }) => { - if (collapsed && x < contentOffsetTrigger) { - setOffset(siderCollapsedWidth) - } - }, - [collapsed] - ) - useEffect(triggerResizeEvent, [offset]) - return { - contentLeftOffset: offset, - onAnimationStart, - onAnimationFrame, - } -} export default function App({ registry }) { const [collapsed, setCollapsed] = useLocalStorageState( @@ -47,16 +16,6 @@ export default function App({ registry }) { false ) const [defaultCollapsed] = useState(collapsed) - const { - contentLeftOffset, - onAnimationStart, - onAnimationFrame, - } = useContentLeftOffset(collapsed) - const transContentBack = useSpring({ - x: collapsed ? collapsedContentOffset : 0, - onStart: onAnimationStart, - onFrame: onAnimationFrame, - }) const transContainer = useSpring({ opacity: 1, from: { opacity: 0 }, @@ -84,27 +43,9 @@ export default function App({ registry }) { collapsedWidth={siderCollapsedWidth} animationDelay={0} /> - `translate3d(${x}px, 0, 0)` - ), - }} - > )} -
+
From 088efcf30b0151303e32ceacf7fecc0d15bb82c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=9C=C9=B4=D0=B2=CA=8F=D1=82=E1=B4=87?= Date: Thu, 22 Oct 2020 11:14:49 +0800 Subject: [PATCH 07/29] ui: memorize expand/collapse full text in detail pages (#775) --- ui/lib/apps/SlowQuery/pages/Detail/index.tsx | 66 +++++++++++-------- .../Statement/pages/Detail/PlanDetail.tsx | 60 ++++++++++------- ui/lib/apps/Statement/pages/Detail/index.tsx | 14 ++-- 3 files changed, 85 insertions(+), 55 deletions(-) diff --git a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx index 5ffe5a5650..1f47b2e777 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx @@ -1,25 +1,25 @@ import React from 'react' import { Space } from 'antd' import { useTranslation } from 'react-i18next' -import { useLocation, Link } from 'react-router-dom' +import { Link, useLocation } from 'react-router-dom' import { ArrowLeftOutlined } from '@ant-design/icons' -import { useToggle } from '@umijs/hooks' +import { useLocalStorageState } from '@umijs/hooks' import client from '@lib/client' import { useClientRequest } from '@lib/utils/useClientRequest' -import { parseQueryFn, buildQueryFn } from '@lib/utils/query' +import { buildQueryFn, parseQueryFn } from '@lib/utils/query' import formatSql from '@lib/utils/formatSql' import { - Head, - Descriptions, - TextWithInfo, - Pre, - HighlightSQL, - Expand, - CopyLink, - CardTabs, AnimatedSkeleton, + CardTabs, + CopyLink, + Descriptions, ErrorBar, + Expand, + Head, + HighlightSQL, + Pre, + TextWithInfo, } from '@lib/components' import TabBasic from './DetailTabBasic' import TabTime from './DetailTabTime' @@ -32,6 +32,8 @@ export interface IPageQuery { timestamp?: number } +const SLOW_QUERY_DETAIL_EXPAND = 'slow_query.detail_expand' + function DetailPage() { const query = DetailPage.parseQuery(useLocation().search) @@ -48,11 +50,21 @@ function DetailPage() { ) ) - const { state: sqlExpanded, toggle: toggleSqlExpanded } = useToggle(false) - const { state: prevSqlExpanded, toggle: togglePrevSqlExpanded } = useToggle( - false + const [detailExpand, setDetailExpand] = useLocalStorageState( + SLOW_QUERY_DETAIL_EXPAND, + { + prev_query: false, + query: false, + plan: false, + } ) - const { state: planExpanded, toggle: togglePlanExpanded } = useToggle(false) + + const togglePrevQuery = () => + setDetailExpand((prev) => ({ ...prev, prev_query: !prev.prev_query })) + const toggleQuery = () => + setDetailExpand((prev) => ({ ...prev, query: !prev.query })) + const togglePlan = () => + setDetailExpand((prev) => ({ ...prev, plan: !prev.plan })) return (
@@ -71,20 +83,20 @@ function DetailPage() { toggleSqlExpanded()} + expanded={detailExpand.query} + onClick={toggleQuery} /> } > } @@ -97,20 +109,20 @@ function DetailPage() { return ( togglePrevSqlExpanded()} + expanded={detailExpand.prev_query} + onClick={togglePrevQuery} /> } > } @@ -122,19 +134,19 @@ function DetailPage() { })()} togglePlanExpanded()} + expanded={detailExpand.plan} + onClick={togglePlan} /> } > - +
{data.plan}
diff --git a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx index ef16819a97..b686c63345 100644 --- a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx +++ b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx @@ -1,18 +1,18 @@ import React from 'react' import { Space } from 'antd' -import { useToggle } from '@umijs/hooks' +import { useLocalStorageState } from '@umijs/hooks' import { useTranslation } from 'react-i18next' import { + AnimatedSkeleton, Card, - Descriptions, - HighlightSQL, - TextWithInfo, - Pre, CardTabs, - Expand, CopyLink, - AnimatedSkeleton, + Descriptions, ErrorBar, + Expand, + HighlightSQL, + Pre, + TextWithInfo, } from '@lib/components' import { useClientRequest } from '@lib/utils/useClientRequest' import client from '@lib/client' @@ -34,6 +34,8 @@ export interface IPlanDetailProps { query: IQuery } +const STMT_DETAIL_PLAN_EXPAND = 'statement.detail_plan_expand' + function PlanDetail({ query }: IPlanDetailProps) { const { t } = useTranslation() const { data, isLoading, error } = useClientRequest((reqConfig) => @@ -48,11 +50,22 @@ function PlanDetail({ query }: IPlanDetailProps) { reqConfig ) ) - const { state: sqlExpanded, toggle: toggleSqlExpanded } = useToggle(false) - const { state: prevSqlExpanded, toggle: togglePrevSqlExpanded } = useToggle( - false + + const [detailExpand, setDetailExpand] = useLocalStorageState( + STMT_DETAIL_PLAN_EXPAND, + { + prev_query: false, + query: false, + plan: false, + } ) - const { state: planExpanded, toggle: togglePlanExpanded } = useToggle(false) + + const togglePrevQuery = () => + setDetailExpand((prev) => ({ ...prev, prev_query: !prev.prev_query })) + const toggleQuery = () => + setDetailExpand((prev) => ({ ...prev, query: !prev.query })) + const togglePlan = () => + setDetailExpand((prev) => ({ ...prev, plan: !prev.plan })) let title_key if (query.allPlans === 1) { @@ -62,7 +75,6 @@ function PlanDetail({ query }: IPlanDetailProps) { } else { title_key = 'some' } - return ( toggleSqlExpanded()} + expanded={detailExpand.query} + onClick={toggleQuery} /> } > } @@ -100,20 +112,20 @@ function PlanDetail({ query }: IPlanDetailProps) { {data.prev_sample_text ? ( togglePrevSqlExpanded()} + expanded={detailExpand.prev_query} + onClick={togglePrevQuery} /> } > } @@ -124,19 +136,19 @@ function PlanDetail({ query }: IPlanDetailProps) { ) : null} togglePlanExpanded()} + expanded={detailExpand.plan} + onClick={togglePlan} /> } > - +
{data.plan}
diff --git a/ui/lib/apps/Statement/pages/Detail/index.tsx b/ui/lib/apps/Statement/pages/Detail/index.tsx index 858747e8b0..8f97175764 100644 --- a/ui/lib/apps/Statement/pages/Detail/index.tsx +++ b/ui/lib/apps/Statement/pages/Detail/index.tsx @@ -5,7 +5,7 @@ import React, { useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import { Link, useLocation } from 'react-router-dom' import { ArrowLeftOutlined } from '@ant-design/icons' -import { useToggle } from '@umijs/hooks' +import { useLocalStorageState } from '@umijs/hooks' import client, { StatementModel } from '@lib/client' import { @@ -13,11 +13,11 @@ import { CardTable, DateTime, Descriptions, + ErrorBar, Expand, Head, HighlightSQL, TextWithInfo, - ErrorBar, } from '@lib/components' import CopyLink from '@lib/components/CopyLink' import formatSql from '@lib/utils/formatSql' @@ -34,6 +34,8 @@ export interface IPageQuery { endTime?: number } +const STMT_DETAIL_EXPAND = 'statement.detail_expand' + function DetailPage() { const query = DetailPage.parseQuery(useLocation().search) const { data: plans, isLoading, error } = useClientRequest((reqConfig) => @@ -60,7 +62,11 @@ function DetailPage() { }) ) - const { state: sqlExpanded, toggle: toggleSqlExpanded } = useToggle(false) + const [sqlExpanded, setSqlExpanded] = useLocalStorageState( + STMT_DETAIL_EXPAND, + false + ) + const toggleSqlExpanded = () => setSqlExpanded((prev) => !prev) useEffect(() => { if (plans && plans.length > 0) { @@ -91,7 +97,7 @@ function DetailPage() { toggleSqlExpanded()} + onClick={toggleSqlExpanded} /> From 6570ddb98c2d95797717d83b21211b2eace856c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=9C=C9=B4=D0=B2=CA=8F=D1=82=E1=B4=87?= Date: Thu, 22 Oct 2020 11:35:05 +0800 Subject: [PATCH 08/29] ui: break loop dependencies (#771) --- ui/lib/apps/SlowQuery/utils/tableColumns.tsx | 39 ++- .../utils/useSlowQueryTableController.ts | 16 +- ui/lib/apps/Statement/utils/tableColumns.tsx | 260 ++++++------------ .../utils/useStatementTableController.ts | 16 +- ui/lib/utils/tableColumnFactory.tsx | 123 +++++---- 5 files changed, 188 insertions(+), 266 deletions(-) diff --git a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx index d3ca0e48bc..72e11399b3 100644 --- a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx +++ b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx @@ -18,6 +18,21 @@ function ResultStatusBadge({ status }: { status: 'success' | 'error' }) { ////////////////////////////////////////// const TRANS_KEY_PREFIX = 'slow_query.fields' +export const derivedFields = { + cop_proc_avg: [ + { tooltipPrefix: 'mean', fieldName: 'cop_proc_avg' }, + { tooltipPrefix: 'max', fieldName: 'cop_proc_max' }, + { tooltipPrefix: 'p90', fieldName: 'cop_proc_p90' }, + ], + cop_wait_avg: [ + { tooltipPrefix: 'mean', fieldName: 'cop_wait_avg' }, + { tooltipPrefix: 'max', fieldName: 'cop_wait_max' }, + { tooltipPrefix: 'p90', fieldName: 'cop_wait_p90' }, + ], +} + +////////////////////////////////////////// + export function slowQueryColumns( rows: SlowquerySlowQuery[], showFullSQL?: boolean @@ -73,28 +88,8 @@ export function slowQueryColumns( tcf.bar.single('commit_backoff_time', 'ns', rows), tcf.bar.single('resolve_lock_time', 'ns', rows), // cop - tcf.bar.multiple( - { - bars: [ - { mean: 'cop_proc_avg' }, - { max: 'cop_proc_max' }, - { p90: 'cop_proc_p90' }, - ], - }, - 'ns', - rows - ), - tcf.bar.multiple( - { - bars: [ - { mean: 'cop_wait_avg' }, - { max: 'cop_wait_avg' }, - { p90: 'cop_wait_avg' }, - ], - }, - 'ns', - rows - ), + tcf.bar.multiple({ sources: derivedFields.cop_proc_avg }, 'ns', rows), + tcf.bar.multiple({ sources: derivedFields.cop_wait_avg }, 'ns', rows), // transaction tcf.bar.single('write_keys', 'short', rows), tcf.bar.single('write_size', 'bytes', rows), diff --git a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts index 9e35fdc9f2..dcb4370bc7 100644 --- a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts +++ b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts @@ -6,7 +6,7 @@ import client, { ErrorStrategy, SlowquerySlowQuery } from '@lib/client' import { calcTimeRange, TimeRange, IColumnKeys } from '@lib/components' import useOrderState, { IOrderOptions } from '@lib/utils/useOrderState' -import { slowQueryColumns } from './tableColumns' +import { derivedFields, slowQueryColumns } from './tableColumns' import { getSelectedFields } from '@lib/utils/tableColumnFactory' export const DEF_SLOW_QUERY_COLUMN_KEYS: IColumnKeys = { @@ -123,18 +123,16 @@ export default function useSlowQueryTableController( querySchemas() }, []) - // Notice: slowQueries, tableColumns, selectedFields make loop dependencies + const selectedFields = useMemo( + () => getSelectedFields(visibleColumnKeys, derivedFields).join(','), + [visibleColumnKeys] + ) + const tableColumns = useMemo( () => slowQueryColumns(slowQueries, showFullSQL), [slowQueries, showFullSQL] ) - // make selectedFields as a string instead of an array to avoid infinite loop - // I have verified that it will cause infinite loop if we return selectedFields as an array - // so it is better to use the basic type (string, number...) instead of object as the dependency - const selectedFields = useMemo( - () => getSelectedFields(visibleColumnKeys, tableColumns).join(','), - [visibleColumnKeys, tableColumns] - ) + useEffect(() => { async function getSlowQueryList() { setLoadingSlowQueries(true) diff --git a/ui/lib/apps/Statement/utils/tableColumns.tsx b/ui/lib/apps/Statement/utils/tableColumns.tsx index ea901ead38..94bdab4875 100644 --- a/ui/lib/apps/Statement/utils/tableColumns.tsx +++ b/ui/lib/apps/Statement/utils/tableColumns.tsx @@ -10,9 +10,9 @@ import { orange, red } from '@ant-design/colors' import { StatementModel } from '@lib/client' import { Bar, Pre } from '@lib/components' import { - TableColumnFactory, formatVal, - IColumnWithSourceFields, + genDerivedBarSources, + TableColumnFactory, } from '@lib/utils/tableColumnFactory' /////////////////////////////////////// @@ -20,27 +20,76 @@ import { // slow query order list in backend by key of IColumn const TRANS_KEY_PREFIX = 'statement.fields' +export const derivedFields = { + avg_latency: genDerivedBarSources( + 'avg_latency', + 'max_latency', + 'min_latency' + ), + parse_latency: genDerivedBarSources('avg_parse_latency', 'max_parse_latency'), + compile_latency: genDerivedBarSources( + 'avg_compile_latency', + 'max_compile_latency' + ), + process_time: genDerivedBarSources( + 'avg_cop_process_time', + 'max_cop_process_time' + ), + wait_time: genDerivedBarSources('avg_cop_wait_time', 'max_cop_wait_time'), + total_process_time: genDerivedBarSources( + 'avg_process_time', + 'max_process_time' + ), + total_wait_time: genDerivedBarSources('avg_wait_time', 'max_wait_time'), + backoff_time: genDerivedBarSources('avg_backoff_time', 'max_backoff_time'), + avg_write_keys: genDerivedBarSources('avg_write_keys', 'max_write_keys'), + avg_processed_keys: genDerivedBarSources( + 'avg_processed_keys', + 'max_processed_keys' + ), + avg_total_keys: genDerivedBarSources('avg_total_keys', 'max_total_keys'), + prewrite_time: genDerivedBarSources('avg_prewrite_time', 'max_prewrite_time'), + commit_time: genDerivedBarSources('avg_commit_time', 'max_commit_time'), + get_commit_ts_time: genDerivedBarSources( + 'avg_get_commit_ts_time', + 'max_get_commit_ts_time' + ), + commit_backoff_time: genDerivedBarSources( + 'avg_commit_backoff_time', + 'max_commit_backoff_time' + ), + resolve_lock_time: genDerivedBarSources( + 'avg_resolve_lock_time', + 'max_resolve_lock_time' + ), + local_latch_wait_time: genDerivedBarSources( + 'avg_local_latch_wait_time', + 'max_local_latch_wait_time' + ), + avg_write_size: genDerivedBarSources('avg_write_size', 'max_write_size'), + avg_prewrite_regions: genDerivedBarSources( + 'avg_prewrite_regions', + 'max_prewrite_regions' + ), + avg_txn_retry: genDerivedBarSources('avg_txn_retry', 'max_txn_retry'), + avg_mem: genDerivedBarSources('avg_mem', 'max_mem'), + sum_errors: ['sum_errors', 'sum_warnings'], + related_schemas: ['table_names'], +} + +////////////////////////////////////////// + function avgMinMaxLatencyColumn( tcf: TableColumnFactory, rows?: { max_latency?: number; min_latency?: number; avg_latency?: number }[] ): IColumn { - return tcf.bar.multiple( - { - bars: [ - { mean: 'avg_latency' }, - { max: 'max_latency' }, - { min: 'min_latency' }, - ], - }, - 'ns', - rows - ) + return tcf.bar.multiple({ sources: derivedFields.avg_latency }, 'ns', rows) } function errorsWarningsColumn( tcf: TableColumnFactory, rows?: { sum_errors?: number; sum_warnings?: number }[] -): IColumnWithSourceFields { +): IColumn { const capacity = rows ? max(rows.map((v) => v.sum_errors! + v.sum_warnings!)) ?? 0 : 0 @@ -49,7 +98,6 @@ function errorsWarningsColumn( name: tcf.columnName('errors_warnings'), key, fieldName: key, - sourceFields: ['sum_errors', 'sum_warnings'], minWidth: 140, maxWidth: 200, columnActionsMode: ColumnActionsMode.clickable, @@ -80,8 +128,6 @@ Warnings: ${warningsFmtVal}` function avgMaxColumn( tcf: TableColumnFactory, - avgKey: keyof T, - maxKey: keyof T, displayTransKey: string, unit: string, rows?: T[] @@ -89,7 +135,7 @@ function avgMaxColumn( return tcf.bar.multiple( { displayTransKey, - bars: [{ mean: avgKey }, { max: maxKey }], + sources: derivedFields[displayTransKey], }, unit, rows @@ -101,7 +147,7 @@ function avgMaxColumn( export function statementColumns( rows: StatementModel[], showFullSQL?: boolean -): IColumnWithSourceFields[] { +): IColumn[] { const tcf = new TableColumnFactory(TRANS_KEY_PREFIX) return [ @@ -116,161 +162,28 @@ export function statementColumns( maxWidth: 300, columnActionsMode: ColumnActionsMode.clickable, }, - avgMaxColumn(tcf, 'avg_mem', 'max_mem', 'avg_mem', 'bytes', rows), + avgMaxColumn(tcf, 'avg_mem', 'bytes', rows), errorsWarningsColumn(tcf, rows), - avgMaxColumn( - tcf, - 'avg_parse_latency', - 'max_parse_latency', - 'parse_latency', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_compile_latency', - 'max_compile_latency', - 'compile_latency', - 'ns', - rows - ), + avgMaxColumn(tcf, 'parse_latency', 'ns', rows), + avgMaxColumn(tcf, 'compile_latency', 'ns', rows), tcf.bar.single('sum_cop_task_num', 'short', rows), - avgMaxColumn( - tcf, - 'avg_cop_process_time', - 'max_cop_process_time', - 'process_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_cop_wait_time', - 'max_cop_wait_time', - 'wait_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_process_time', - 'max_process_time', - 'total_process_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_wait_time', - 'max_wait_time', - 'total_wait_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_backoff_time', - 'max_backoff_time', - 'backoff_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_write_keys', - 'max_write_keys', - 'avg_write_keys', - 'short', - rows - ), - avgMaxColumn( - tcf, - 'avg_processed_keys', - 'max_processed_keys', - 'avg_processed_keys', - 'short', - rows - ), - avgMaxColumn( - tcf, - 'avg_total_keys', - 'max_total_keys', - 'avg_total_keys', - 'short', - rows - ), - avgMaxColumn( - tcf, - 'avg_prewrite_time', - 'max_prewrite_time', - 'prewrite_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_commit_time', - 'max_commit_time', - 'commit_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_get_commit_ts_time', - 'max_get_commit_ts_time', - 'get_commit_ts_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_commit_backoff_time', - 'max_commit_backoff_time', - 'commit_backoff_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_resolve_lock_time', - 'max_resolve_lock_time', - 'resolve_lock_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_local_latch_wait_time', - 'max_local_latch_wait_time', - 'local_latch_wait_time', - 'ns', - rows - ), - avgMaxColumn( - tcf, - 'avg_write_size', - 'max_write_size', - 'avg_write_size', - 'bytes', - rows - ), - avgMaxColumn( - tcf, - 'avg_prewrite_regions', - 'max_prewrite_regions', - 'avg_prewrite_regions', - 'short', - rows - ), - avgMaxColumn( - tcf, - 'avg_txn_retry', - 'max_txn_retry', - 'avg_txn_retry', - 'short', - rows - ), + avgMaxColumn(tcf, 'process_time', 'ns', rows), + avgMaxColumn(tcf, 'wait_time', 'ns', rows), + avgMaxColumn(tcf, 'total_process_time', 'ns', rows), + avgMaxColumn(tcf, 'total_wait_time', 'ns', rows), + avgMaxColumn(tcf, 'backoff_time', 'ns', rows), + avgMaxColumn(tcf, 'avg_write_keys', 'short', rows), + avgMaxColumn(tcf, 'avg_processed_keys', 'short', rows), + avgMaxColumn(tcf, 'avg_total_keys', 'short', rows), + avgMaxColumn(tcf, 'prewrite_time', 'ns', rows), + avgMaxColumn(tcf, 'commit_time', 'ns', rows), + avgMaxColumn(tcf, 'get_commit_ts_time', 'ns', rows), + avgMaxColumn(tcf, 'commit_backoff_time', 'ns', rows), + avgMaxColumn(tcf, 'resolve_lock_time', 'ns', rows), + avgMaxColumn(tcf, 'local_latch_wait_time', 'ns', rows), + avgMaxColumn(tcf, 'avg_write_size', 'bytes', rows), + avgMaxColumn(tcf, 'avg_prewrite_regions', 'short', rows), + avgMaxColumn(tcf, 'avg_txn_retry', 'short', rows), tcf.bar.single('sum_backoff_times', 'short', rows), tcf.bar.single('avg_affected_rows', 'short', rows), @@ -292,7 +205,6 @@ export function statementColumns( ...tcf.textWithTooltip('related_schemas', rows), minWidth: 160, maxWidth: 240, - sourceFields: ['table_names'], }, ] } @@ -309,6 +221,6 @@ export function planColumns(rows: StatementModel[]): IColumn[] { tcf.bar.single('sum_latency', 'ns', rows), avgMinMaxLatencyColumn(tcf, rows), tcf.bar.single('exec_count', 'short', rows), - avgMaxColumn(tcf, 'avg_mem', 'max_mem', 'avg_mem', 'bytes', rows), + avgMaxColumn(tcf, 'avg_mem', 'bytes', rows), ] } diff --git a/ui/lib/apps/Statement/utils/useStatementTableController.ts b/ui/lib/apps/Statement/utils/useStatementTableController.ts index 876549dc02..ddfbf41b0d 100644 --- a/ui/lib/apps/Statement/utils/useStatementTableController.ts +++ b/ui/lib/apps/Statement/utils/useStatementTableController.ts @@ -15,7 +15,7 @@ import { DEFAULT_TIME_RANGE, TimeRange, } from '../pages/List/TimeRangeSelector' -import { statementColumns } from './tableColumns' +import { derivedFields, statementColumns } from './tableColumns' import { getSelectedFields } from '@lib/utils/tableColumnFactory' export const DEF_STMT_COLUMN_KEYS: IColumnKeys = { @@ -174,18 +174,16 @@ export default function useStatementTableController( queryStmtTypes() }, [refreshTimes]) - // Notice: statements, tableColumns, selectedFields make loop dependencies + const selectedFields = useMemo( + () => getSelectedFields(visibleColumnKeys, derivedFields).join(','), + [visibleColumnKeys] + ) + const tableColumns = useMemo( () => statementColumns(statements, showFullSQL), [statements, showFullSQL] ) - // make selectedFields as a string instead of an array to avoid infinite loop - // I have verified that it will cause infinite loop if we return selectedFields as an array - // so it is better to use the basic type (string, number...) instead of object as the dependency - const selectedFields = useMemo( - () => getSelectedFields(visibleColumnKeys, tableColumns).join(','), - [visibleColumnKeys, tableColumns] - ) + useEffect(() => { async function queryStatementList() { if (allTimeRanges.length === 0) { diff --git a/ui/lib/utils/tableColumnFactory.tsx b/ui/lib/utils/tableColumnFactory.tsx index ba43d41ba7..9abb88f56a 100644 --- a/ui/lib/utils/tableColumnFactory.tsx +++ b/ui/lib/utils/tableColumnFactory.tsx @@ -17,15 +17,17 @@ import { IColumnKeys, } from '@lib/components' -type Bar = { [key: string]: keyof T } -type BarsConfig = { +export type DerivedField = { displayTransKey?: string // it is same as avg field name default - bars: [Bar, Bar, Bar?] // [avg, max, min?] + sources: T[] } -export type IColumnWithSourceFields = IColumn & { - sourceFields?: string[] -} +export type DerivedBar = DerivedField<{ + tooltipPrefix: string + fieldName: string +}> + +export type DerivedCol = DerivedField export function formatVal(val: number, unit: string, decimals: number = 1) { const formatFn = getValueFormat(unit) @@ -61,7 +63,7 @@ export class TableColumnFactory { textWithTooltip( fieldName: T, _rows?: U[] - ): IColumnWithSourceFields { + ): IColumn { return { ...this.columnFromField(fieldName), minWidth: 100, @@ -78,7 +80,7 @@ export class TableColumnFactory { fieldName: T, unit: string, rows?: U[] - ): IColumnWithSourceFields { + ): IColumn { const capacity = rows ? _max(rows.map((v) => v[fieldName])) ?? 0 : 0 return { ...this.columnFromField(fieldName), @@ -96,48 +98,27 @@ export class TableColumnFactory { } } - multipleBar( - barsConfig: BarsConfig, - unit: string, - rows?: T[] - ): IColumnWithSourceFields { + multipleBar(barsConfig: DerivedBar, unit: string, rows?: T[]): IColumn { const { displayTransKey, - bars: [avg_, max_, min_], + sources: [avg, max, min], } = barsConfig - const tooltioPrefixLens: number[] = [] - const avg = { - fieldName: Object.values(avg_)[0], - tooltipPrefix: Object.keys(avg_)[0], - } - tooltioPrefixLens.push(avg.tooltipPrefix.length) - const max = { - fieldName: Object.values(max_)[0], - tooltipPrefix: Object.keys(max_)[0], - } - tooltioPrefixLens.push(max.tooltipPrefix.length) - let min - if (min_) { - min = { - fieldName: Object.values(min_)[0], - tooltipPrefix: Object.keys(min_)[0], - } - tooltioPrefixLens.push(min.tooltipPrefix.length) - } else { - min = undefined - } - const maxTooltipPrefixLen = _max(tooltioPrefixLens) || 0 + const tooltipPrefixLens: number[] = [] - const capacity = rows ? _max(rows.map((v) => v[max.fieldName])) ?? 0 : 0 - let sourceFields = [avg.fieldName, max.fieldName] as string[] + tooltipPrefixLens.push(avg.tooltipPrefix.length) + tooltipPrefixLens.push(max.tooltipPrefix.length) if (min) { - sourceFields.push(min.fieldName) + tooltipPrefixLens.push(min.tooltipPrefix.length) } + + const maxTooltipPrefixLen = _max(tooltipPrefixLens) || 0 + + const capacity = rows ? _max(rows.map((v) => v[max.fieldName])) ?? 0 : 0 + return { - ...this.columnFromField(avg.fieldName as string), - name: this.columnName((displayTransKey || avg.fieldName) as string), - sourceFields, + ...this.columnFromField(avg.fieldName), + name: this.columnName(displayTransKey || avg.fieldName), minWidth: 140, maxWidth: 200, columnActionsMode: ColumnActionsMode.clickable, @@ -175,7 +156,7 @@ export class TableColumnFactory { timestamp( fieldName: T, _rows?: U[] - ): IColumnWithSourceFields { + ): IColumn { return { ...this.columnFromField(fieldName), minWidth: 100, @@ -193,7 +174,7 @@ export class TableColumnFactory { fieldName: T, showFullSQL?: boolean, _rows?: U[] - ): IColumnWithSourceFields { + ): IColumn { return { ...this.columnFromField(fieldName), minWidth: 100, @@ -229,26 +210,64 @@ export class BarColumn { return this.factory.singleBar(fieldName, unit, rows) } - multiple(bars: BarsConfig, unit: string, rows?: T[]) { + multiple(bars: DerivedBar, unit: string, rows?: T[]) { return this.factory.multipleBar(bars, unit, rows) } } //////////////////////////////////////////// +export type DerivedFields = Record< + string, + DerivedBar['sources'] | DerivedCol['sources'] +> + +export function genDerivedBarSources( + avg: string, + max: string, + min?: string +): DerivedBar['sources'] { + const res = [ + { + tooltipPrefix: 'mean', + fieldName: avg, + }, + { + tooltipPrefix: 'max', + fieldName: max, + }, + ] + if (min) { + res.push({ + tooltipPrefix: 'min', + fieldName: min, + }) + } + return res +} + +function isDerivedBarSources(v: any): v is DerivedBar['sources'] { + return !!v[0].fieldName +} + export function getSelectedFields( visibleColumnKeys: IColumnKeys, - columns: IColumnWithSourceFields[] + derivedFields: DerivedFields ) { let fields: string[] = [] - columns.forEach((c) => { - if (visibleColumnKeys[c.key] === true) { - if (c.sourceFields !== undefined) { - fields = fields.concat(c.sourceFields) + let sources: DerivedFields[keyof DerivedFields] + for (const columnKey in visibleColumnKeys) { + if (visibleColumnKeys[columnKey]) { + if ((sources = derivedFields[columnKey])) { + if (isDerivedBarSources(sources)) { + fields.push(...sources.map((b) => b.fieldName)) + } else { + fields.push(...sources) + } } else { - fields.push(c.key) + fields.push(columnKey) } } - }) + } return fields } From aed52102b2e79036bc614397561919ab9163f31b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=9C=C9=B4=D0=B2=CA=8F=D1=82=E1=B4=87?= Date: Thu, 22 Oct 2020 21:58:42 +0800 Subject: [PATCH 09/29] ui: fix browser compatibility check (#776) - remove experimental API in checkBrowser.js - polyfill for Object.entries --- ui/gulpfile.esm.js | 2 +- ui/public/{checkBrowser.js => compat.js} | 19 ++++++++++++++++++- ui/public/diagnoseReport.html | 2 +- ui/public/index.html | 2 +- 4 files changed, 21 insertions(+), 4 deletions(-) rename ui/public/{checkBrowser.js => compat.js} (82%) diff --git a/ui/gulpfile.esm.js b/ui/gulpfile.esm.js index 8b0b3ab89d..6af6beb95c 100644 --- a/ui/gulpfile.esm.js +++ b/ui/gulpfile.esm.js @@ -59,7 +59,7 @@ function updateBrowserList() { } task('gen:browserlist', () => { - return src('public/checkBrowser.js') + return src('public/compat.js') .pipe(updateBrowserList()) .pipe(dest('public', { overwrite: true })) }) diff --git a/ui/public/checkBrowser.js b/ui/public/compat.js similarity index 82% rename from ui/public/checkBrowser.js rename to ui/public/compat.js index 786e2c6604..ba54664c7a 100644 --- a/ui/public/checkBrowser.js +++ b/ui/public/compat.js @@ -40,8 +40,25 @@ function checkBrowser() { d.getElementsByTagName('a')[0].onclick = function () { d.getElementsByTagName('div')[0].style.top = '-60px' } - document.body.prepend(d) + document.body.insertBefore(d, document.body.firstChild) } } checkBrowser() + +// Dealing with compatibility issues manually for special cases + +// Object.entries +// see https://github.com/pingcap-incubator/tidb-dashboard/issues/770 +// polyfill from https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/entries +if (!Object.entries) { + Object.entries = function( obj ){ + var ownProps = Object.keys( obj ), + i = ownProps.length, + resArray = new Array(i); // preallocate the Array + while (i--) + resArray[i] = [ownProps[i], obj[ownProps[i]]]; + + return resArray; + }; +} diff --git a/ui/public/diagnoseReport.html b/ui/public/diagnoseReport.html index 6114d4ae8a..aed243ffda 100644 --- a/ui/public/diagnoseReport.html +++ b/ui/public/diagnoseReport.html @@ -10,7 +10,7 @@ - +
diff --git a/ui/public/index.html b/ui/public/index.html index 08a29e991d..750c0f265c 100644 --- a/ui/public/index.html +++ b/ui/public/index.html @@ -91,7 +91,7 @@ - +
From a37d7b8418338f58ebda86b114b69e7a00e5e90f Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Tue, 27 Oct 2020 20:22:58 +0800 Subject: [PATCH 10/29] ui: Refine store location, add zoom and pan (#772) --- ui/.storybook/main.js | 5 +- .../StoreLocationTree/index.stories.tsx | 4 +- .../components/StoreLocationTree/index.tsx | 151 ++++++++++++++---- 3 files changed, 123 insertions(+), 37 deletions(-) diff --git a/ui/.storybook/main.js b/ui/.storybook/main.js index 106e4b05bb..e92052f8cd 100644 --- a/ui/.storybook/main.js +++ b/ui/.storybook/main.js @@ -22,7 +22,10 @@ function includeMorePaths(config) { const custom = require('../config-overrides') module.exports = { - stories: ['../lib/components/**/*.stories.@(ts|tsx|js|jsx)'], + stories: [ + '../lib/components/**/*.stories.@(ts|tsx|js|jsx)', + '../lib/apps/**/*.stories.@(ts|tsx|js|jsx)', + ], addons: [ '@storybook/preset-create-react-app', '@storybook/addon-actions', diff --git a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.stories.tsx b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.stories.tsx index 9997f80e92..ad09b200e3 100644 --- a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.stories.tsx +++ b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.stories.tsx @@ -6,7 +6,7 @@ export default { } const dataSource1 = { - name: 'labels', + name: 'Stores', children: [ { name: 'sh', @@ -65,7 +65,7 @@ const dataSource1 = { export const onlyName = () => const dataSource2 = { - name: 'labels', + name: 'Stores', value: '', children: [ { diff --git a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx index 996a9578ca..4fcdbefac5 100644 --- a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx +++ b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx @@ -1,83 +1,139 @@ import React, { useRef, useEffect } from 'react' import * as d3 from 'd3' +import { + ZoomInOutlined, + ZoomOutOutlined, + ReloadOutlined, + QuestionCircleOutlined, +} from '@ant-design/icons' +import { Space, Tooltip } from 'antd' export interface IStoreLocationProps { dataSource: any } -const margin = { top: 40, right: 120, bottom: 10, left: 80 } -const width = 954 +const margin = { left: 60, right: 40, top: 60, bottom: 100 } const dx = 40 -const dy = width / 6 - -const tree = d3.tree().nodeSize([dx, dy]) const diagonal = d3 .linkHorizontal() .x((d: any) => d.y) .y((d: any) => d.x) +function calcHeight(root) { + let x0 = Infinity + let x1 = -x0 + root.each((d) => { + if (d.x > x1) x1 = d.x + if (d.x < x0) x0 = d.x + }) + return x1 - x0 +} + export default function StoreLocationTree({ dataSource }: IStoreLocationProps) { - const ref = useRef(null) + const divRef = useRef(null) useEffect(() => { + let divWidth = divRef.current?.clientWidth || 0 const root = d3.hierarchy(dataSource) as any - root.x0 = dy / 2 - root.y0 = 0 root.descendants().forEach((d, i) => { d.id = i d._children = d.children // collapse all nodes default // if (d.depth) d.children = null }) + const dy = divWidth / (root.height + 2) + let tree = d3.tree().nodeSize([dx, dy]) - const svg = d3.select(ref.current) - svg.selectAll('g').remove() - svg - .attr('viewBox', [-margin.left, -margin.top, width, dx] as any) - .style('font', '16px sans-serif') + const div = d3.select(divRef.current) + div.select('svg#slt').remove() + const svg = div + .append('svg') + .attr('id', 'slt') + .attr('width', divWidth) + .attr('height', dx + margin.top + margin.bottom) + .style('font', '14px sans-serif') .style('user-select', 'none') - const gLink = svg + const bound = svg + .append('g') + .attr('transform', `translate(${margin.left}, ${margin.top})`) + const gLink = bound .append('g') .attr('fill', 'none') .attr('stroke', '#555') .attr('stroke-opacity', 0.4) .attr('stroke-width', 2) - - const gNode = svg + const gNode = bound .append('g') .attr('cursor', 'pointer') .attr('pointer-events', 'all') + // zoom + const zoom = d3 + .zoom() + .scaleExtent([0.1, 5]) + .filter(function () { + // ref: https://godbasin.github.io/2018/02/07/d3-tree-notes-4-zoom-amd-drag/ + // only zoom when pressing CTRL + const isWheelEvent = d3.event instanceof WheelEvent + return !isWheelEvent || (isWheelEvent && d3.event.ctrlKey) + }) + .on('zoom', () => { + const t = d3.event.transform + bound.attr( + 'transform', + `translate(${t.x + margin.left}, ${t.y + margin.top}) scale(${t.k})` + ) + + // this will cause unexpected result when dragging + // svg.attr('transform', d3.event.transform) + }) + svg.call(zoom as any) + + // zoom actions + d3.select('#slt-zoom-in').on('click', function () { + zoom.scaleBy(svg.transition().duration(500) as any, 1.2) + }) + d3.select('#slt-zoom-out').on('click', function () { + zoom.scaleBy(svg.transition().duration(500) as any, 0.8) + }) + d3.select('#slt-zoom-reset').on('click', function () { + // https://stackoverflow.com/a/51981636/2998877 + svg + .transition() + .duration(500) + .call(zoom.transform as any, d3.zoomIdentity) + }) + + update(root) + function update(source) { - const duration = d3.event && d3.event.altKey ? 2500 : 250 + // use altKey to slow down the animation, interesting! + const duration = d3.event && d3.event.altKey ? 2500 : 500 const nodes = root.descendants().reverse() const links = root.links() // compute the new tree layout // it modifies root self tree(root) - - let left = root - let right = root - root.eachBefore((node) => { - if (node.x < left.x) left = node - if (node.x > right.x) right = node + const boundHeight = calcHeight(root) + // node.x represent the y axes position actually + // [root.y, root.x] is [0, 0], we need to move it to [0, boundHeight/2] + root.descendants().forEach((d, i) => { + d.x += boundHeight / 2 }) - - const height = right.x - left.x + margin.top + margin.bottom + if (root.x0 === undefined) { + // initial root.x0, root.y0, only need to set it once + root.x0 = root.x + root.y0 = root.y + } const transition = svg .transition() .duration(duration) - .attr('viewBox', [ - -margin.left, - left.x - margin.top, - width, - height, - ] as any) - .tween('resize', () => () => svg.dispatch('toggle')) + .attr('width', divWidth) + .attr('height', boundHeight + margin.top + margin.bottom) // update the nodes const node = gNode.selectAll('g').data(nodes, (d: any) => d.id) @@ -169,10 +225,37 @@ export default function StoreLocationTree({ dataSource }: IStoreLocationProps) { }) } - update(root) + function resizeHandler() { + divWidth = divRef.current?.clientWidth || 0 + const dy = divWidth / (root.height + 2) + tree = d3.tree().nodeSize([dx, dy]) + update(root) + } + + window.addEventListener('resize', resizeHandler) + return () => { + window.removeEventListener('resize', resizeHandler) + } }, [dataSource]) - return + return ( +
+ + + + + + + + +
+ ) } // refs: From de46b00b3c65362a28804853a3761c6f29eba2a9 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Wed, 28 Oct 2020 10:35:07 +0800 Subject: [PATCH 11/29] ui: show disk usage information for statement and slow query (#777) --- pkg/apiserver/slowquery/queries.go | 1 + pkg/apiserver/statement/models.go | 2 ++ ui/lib/apps/SlowQuery/pages/Detail/DetailTabBasic.tsx | 4 ++++ ui/lib/apps/SlowQuery/translations/en.yaml | 2 ++ ui/lib/apps/SlowQuery/translations/zh.yaml | 2 ++ ui/lib/apps/SlowQuery/utils/tableColumns.tsx | 1 + ui/lib/apps/Statement/pages/Detail/PlanDetailTabBasic.tsx | 8 ++++++++ ui/lib/apps/Statement/translations/en.yaml | 4 ++++ ui/lib/apps/Statement/translations/zh.yaml | 4 ++++ ui/lib/apps/Statement/utils/tableColumns.tsx | 2 ++ 10 files changed, 30 insertions(+) diff --git a/pkg/apiserver/slowquery/queries.go b/pkg/apiserver/slowquery/queries.go index 4793d3c2ad..0ac04185bd 100644 --- a/pkg/apiserver/slowquery/queries.go +++ b/pkg/apiserver/slowquery/queries.go @@ -43,6 +43,7 @@ type SlowQuery struct { ProcessTime float64 `gorm:"column:Process_time" json:"process_time"` MemoryMax int `gorm:"column:Mem_max" json:"memory_max"` + DiskMax int `gorm:"column:Disk_max" json:"disk_max"` TxnStartTS uint `gorm:"column:Txn_start_ts" json:"txn_start_ts"` // Detail diff --git a/pkg/apiserver/statement/models.go b/pkg/apiserver/statement/models.go index a32e32ada8..3352700056 100644 --- a/pkg/apiserver/statement/models.go +++ b/pkg/apiserver/statement/models.go @@ -84,6 +84,8 @@ type Model struct { AggSumBackoffTimes int `json:"sum_backoff_times" agg:"SUM(sum_backoff_times)"` AggAvgMem int `json:"avg_mem" agg:"ROUND(SUM(exec_count * avg_mem) / SUM(exec_count))"` AggMaxMem int `json:"max_mem" agg:"MAX(max_mem)"` + AggAvgDisk int `json:"avg_disk" agg:"ROUND(SUM(exec_count * avg_disk) / SUM(exec_count))"` + AggMaxDisk int `json:"max_disk" agg:"MAX(max_disk)"` AggAvgAffectedRows int `json:"avg_affected_rows" agg:"ROUND(SUM(exec_count * avg_affected_rows) / SUM(exec_count))"` AggFirstSeen int `json:"first_seen" agg:"UNIX_TIMESTAMP(MIN(first_seen))"` AggLastSeen int `json:"last_seen" agg:"UNIX_TIMESTAMP(MAX(last_seen))"` diff --git a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabBasic.tsx b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabBasic.tsx index 0421e81ea3..359289f28c 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabBasic.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabBasic.tsx @@ -29,6 +29,10 @@ export default function TabBasic({ data }: ITabBasicProps) { key: 'memory_max', value: getValueFormat('bytes')(data.memory_max || 0, 1), }, + { + key: 'disk_max', + value: getValueFormat('bytes')(data.disk_max || 0, 1), + }, { key: 'instance', value: data.instance }, { key: 'connection_id', value: data.connection_id }, { key: 'user', value: data.user }, diff --git a/ui/lib/apps/SlowQuery/translations/en.yaml b/ui/lib/apps/SlowQuery/translations/en.yaml index 46d1e20805..9ec99334f3 100644 --- a/ui/lib/apps/SlowQuery/translations/en.yaml +++ b/ui/lib/apps/SlowQuery/translations/en.yaml @@ -13,6 +13,8 @@ slow_query: query_time_tooltip: Execution time of the query memory_max: Max Memory memory_max_tooltip: Maximum memory usage of the query + disk_max: Max Disk + disk_max_tooltip: Maximum disk usage of the query digest: Query Template ID digest_tooltip: a.k.a. Query digest is_internal: Is Internal? diff --git a/ui/lib/apps/SlowQuery/translations/zh.yaml b/ui/lib/apps/SlowQuery/translations/zh.yaml index 39b50c6801..f59e140159 100644 --- a/ui/lib/apps/SlowQuery/translations/zh.yaml +++ b/ui/lib/apps/SlowQuery/translations/zh.yaml @@ -14,6 +14,8 @@ slow_query: query_time_tooltip: 该 SQL 查询总的执行时间 memory_max: 最大内存 memory_max_tooltip: 该 SQL 查询执行时占用的最大内存空间 + disk_max: 最大磁盘空间 + disk_max_tooltip: 该 SQL 查询执行时占用的最大磁盘空间 digest: SQL 模板 ID digest_tooltip: SQL 模板的唯一标识(SQL 指纹) is_internal: 是否为内部 SQL 查询 diff --git a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx index 72e11399b3..fc65a4819c 100644 --- a/ui/lib/apps/SlowQuery/utils/tableColumns.tsx +++ b/ui/lib/apps/SlowQuery/utils/tableColumns.tsx @@ -51,6 +51,7 @@ export function slowQueryColumns( tcf.bar.single('compile_time', 's', rows), tcf.bar.single('process_time', 's', rows), tcf.bar.single('memory_max', 'bytes', rows), + tcf.bar.single('disk_max', 'bytes', rows), tcf.textWithTooltip('txn_start_ts', rows), // success columnn diff --git a/ui/lib/apps/Statement/pages/Detail/PlanDetailTabBasic.tsx b/ui/lib/apps/Statement/pages/Detail/PlanDetailTabBasic.tsx index d0115bc1e6..f4a11813a9 100644 --- a/ui/lib/apps/Statement/pages/Detail/PlanDetailTabBasic.tsx +++ b/ui/lib/apps/Statement/pages/Detail/PlanDetailTabBasic.tsx @@ -66,6 +66,14 @@ export default function TabBasic({ data }: ITabBasicProps) { key: 'max_mem', value: getValueFormat('bytes')(data.max_mem || 0, 1), }, + { + key: 'avg_disk', + value: getValueFormat('bytes')(data.avg_disk || 0, 1), + }, + { + key: 'max_disk', + value: getValueFormat('bytes')(data.max_disk || 0, 1), + }, ] const columns = valueColumns('statement.fields.') return ( diff --git a/ui/lib/apps/Statement/translations/en.yaml b/ui/lib/apps/Statement/translations/en.yaml index f91128ac54..b4a44d1417 100644 --- a/ui/lib/apps/Statement/translations/en.yaml +++ b/ui/lib/apps/Statement/translations/en.yaml @@ -74,6 +74,10 @@ statement: avg_mem_tooltip: Memory usage of single query max_mem: Max Memory max_mem_tooltip: Maximum memory usage of single query + avg_disk: Mean Disk + avg_disk_tooltip: Disk usage of single query + max_disk: Max Disk + max_disk_tooltip: Maximum disk usage of single query index_names: Index Name index_names_tooltip: The name of the used index first_seen: First Seen diff --git a/ui/lib/apps/Statement/translations/zh.yaml b/ui/lib/apps/Statement/translations/zh.yaml index 2602746f16..f15541f201 100644 --- a/ui/lib/apps/Statement/translations/zh.yaml +++ b/ui/lib/apps/Statement/translations/zh.yaml @@ -73,6 +73,10 @@ statement: avg_mem_tooltip: 单条 SQL 查询的消耗内存大小 max_mem: 最大内存 max_mem_tooltip: 最大单条 SQL 查询消耗内存大小 + avg_disk: 平均磁盘空间 + avg_disk_tooltip: 单条 SQL 查询占用的磁盘空间大小 + max_disk: 最大磁盘空间 + max_disk_tooltip: 最大单条 SQL 查询占用的磁盘空间大小 table_names: 表名 index_names: 索引名 index_names_tooltip: SQL 执行时使用的索引名称 diff --git a/ui/lib/apps/Statement/utils/tableColumns.tsx b/ui/lib/apps/Statement/utils/tableColumns.tsx index 94bdab4875..bc2ae0266e 100644 --- a/ui/lib/apps/Statement/utils/tableColumns.tsx +++ b/ui/lib/apps/Statement/utils/tableColumns.tsx @@ -73,6 +73,7 @@ export const derivedFields = { ), avg_txn_retry: genDerivedBarSources('avg_txn_retry', 'max_txn_retry'), avg_mem: genDerivedBarSources('avg_mem', 'max_mem'), + avg_disk: genDerivedBarSources('avg_disk', 'max_disk'), sum_errors: ['sum_errors', 'sum_warnings'], related_schemas: ['table_names'], } @@ -163,6 +164,7 @@ export function statementColumns( columnActionsMode: ColumnActionsMode.clickable, }, avgMaxColumn(tcf, 'avg_mem', 'bytes', rows), + avgMaxColumn(tcf, 'avg_disk', 'bytes', rows), errorsWarningsColumn(tcf, rows), avgMaxColumn(tcf, 'parse_latency', 'ns', rows), avgMaxColumn(tcf, 'compile_latency', 'ns', rows), From d67bcbcb6a1111d1ed0d35b3c5a7c6cad3966051 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=9C=C9=B4=D0=B2=CA=8F=D1=82=E1=B4=87?= Date: Wed, 4 Nov 2020 19:25:58 +0800 Subject: [PATCH 12/29] ui: use qps instead of ops (#786) --- ui/lib/apps/Overview/index.tsx | 2 +- ui/yarn.lock | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ui/lib/apps/Overview/index.tsx b/ui/lib/apps/Overview/index.tsx index 6b663d41a7..4da266f0fa 100644 --- a/ui/lib/apps/Overview/index.tsx +++ b/ui/lib/apps/Overview/index.tsx @@ -22,7 +22,7 @@ function QPS() { name: 'Queries {result}', }, ]} - unit="ops" + unit="qps" type="bar" /> ) diff --git a/ui/yarn.lock b/ui/yarn.lock index 47c8a528b5..109ffea9a8 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -2019,9 +2019,9 @@ to-fast-properties "^2.0.0" "@baurine/grafana-value-formats@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@baurine/grafana-value-formats/-/grafana-value-formats-1.0.0.tgz#030e19a602799d364814d5f010a55ca2ea67b140" - integrity sha512-dJqzIgBOrEfxioROhMfIoUsfFGYH4mzPOEV3VLnX0rtnkNrkjQzemtuEQ+T0N0saPg3vozIBhehHCcKa8417pA== + version "1.0.2" + resolved "https://registry.yarnpkg.com/@baurine/grafana-value-formats/-/grafana-value-formats-1.0.2.tgz#f1129cf19ca0a887b77451cdefa31f94afc8500a" + integrity sha512-LdT/B7mHGSQ/k+o6B0oEjGpRRqMlLvnTezerp71wWW8Io89eePd1q/Lk0JWj/Vspv2RIdQzhE1Z7HZJtjc3IFg== "@cnakazawa/watch@^1.0.3": version "1.0.4" From c01db67b194bcb32aac5d6b65918c0cf0feed9db Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Thu, 5 Nov 2020 12:06:09 +0800 Subject: [PATCH 13/29] statement: support export (#778) --- go.mod | 3 + go.sum | 6 + pkg/apiserver/statement/models.go | 8 +- pkg/apiserver/statement/queries.go | 11 +- pkg/apiserver/statement/statement.go | 208 ++++++++++++++++-- .../SearchLogs/components/SearchProgress.tsx | 2 +- ui/lib/apps/Statement/pages/List/index.tsx | 71 +++++- ui/lib/apps/Statement/translations/en.yaml | 2 + ui/lib/apps/Statement/translations/zh.yaml | 2 + .../utils/useStatementTableController.ts | 28 ++- 10 files changed, 312 insertions(+), 29 deletions(-) diff --git a/go.mod b/go.mod index 947eb421dd..84184fdcbc 100644 --- a/go.mod +++ b/go.mod @@ -4,6 +4,7 @@ go 1.13 require ( github.com/VividCortex/mysqlerr v0.0.0-20200629151747-c28746d985dd + github.com/Xeoncross/go-aesctr-with-hmac v0.0.0-20200623134604-12b17a7ff502 github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 github.com/appleboy/gin-jwt/v2 v2.6.3 github.com/cenkalti/backoff/v4 v4.0.2 @@ -19,6 +20,7 @@ require ( github.com/jinzhu/gorm v1.9.12 github.com/joho/godotenv v1.3.0 github.com/joomcode/errorx v1.0.1 + github.com/oleiade/reflections v1.0.0 // indirect github.com/pingcap/check v0.0.0-20191216031241-8a5a85928f12 github.com/pingcap/errors v0.11.5-0.20190809092503-95897b64e011 github.com/pingcap/kvproto v0.0.0-20200411081810-b85805c9476c @@ -42,4 +44,5 @@ require ( golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f // indirect golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2 // indirect google.golang.org/grpc v1.25.1 + gopkg.in/oleiade/reflections.v1 v1.0.0 ) diff --git a/go.sum b/go.sum index 5cb1fd340d..36b1c7ab53 100644 --- a/go.sum +++ b/go.sum @@ -12,6 +12,8 @@ github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d h1:G0m3OIz70MZUW github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= github.com/VividCortex/mysqlerr v0.0.0-20200629151747-c28746d985dd h1:59Whn6shj5MTVjTf2OX6+7iMcmY6h5CK0kTWwRaplL4= github.com/VividCortex/mysqlerr v0.0.0-20200629151747-c28746d985dd/go.mod h1:f3HiCrHjHBdcm6E83vGaXh1KomZMA2P6aeo3hKx/wg0= +github.com/Xeoncross/go-aesctr-with-hmac v0.0.0-20200623134604-12b17a7ff502 h1:L8IbaI/W6h5Cwgh0n4zGeZpVK78r/jBf9ASurHo9+/o= +github.com/Xeoncross/go-aesctr-with-hmac v0.0.0-20200623134604-12b17a7ff502/go.mod h1:pmnBM9bxWSiHvC/gSWunUIyDvGn33EkP2CUjxFKtTTM= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -226,6 +228,8 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY= github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q= +github.com/oleiade/reflections v1.0.0 h1:0ir4pc6v8/PJ0yw5AEtMddfXpWBXg9cnG7SgSoJuCgY= +github.com/oleiade/reflections v1.0.0/go.mod h1:RbATFBbKYkVdqmSFtx13Bb/tVhR0lgOBXunWTZKeL4w= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/pelletier/go-toml v1.3.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pingcap/check v0.0.0-20190102082844-67f458068fc8 h1:USx2/E1bX46VG32FIw034Au6seQ2fY9NEILmNh/UlQg= @@ -503,6 +507,8 @@ gopkg.in/go-playground/validator.v9 v9.29.1 h1:SvGtYmN60a5CVKTOzMSyfzWDeZRxRuGvR gopkg.in/go-playground/validator.v9 v9.29.1/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= +gopkg.in/oleiade/reflections.v1 v1.0.0 h1:nV9NFaFd5bXKjilVvPvA+/V/tNQk1pOEEc9gGWDkj+s= +gopkg.in/oleiade/reflections.v1 v1.0.0/go.mod h1:SpA8pv+LUnF0FbB2hyRxc8XSng78D6iLBZ11PDb8Z5g= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/pkg/apiserver/statement/models.go b/pkg/apiserver/statement/models.go index 3352700056..e4120dcab9 100644 --- a/pkg/apiserver/statement/models.go +++ b/pkg/apiserver/statement/models.go @@ -34,7 +34,8 @@ type TimeRange struct { } type Model struct { - AggPlanCount int `json:"plan_count" agg:"COUNT(DISTINCT plan_digest)"` + AggDigestText string `json:"digest_text" agg:"ANY_VALUE(digest_text)"` + AggDigest string `json:"digest" agg:"ANY_VALUE(digest)"` AggExecCount int `json:"exec_count" agg:"SUM(exec_count)"` AggSumErrors int `json:"sum_errors" agg:"SUM(sum_errors)"` AggSumWarnings int `json:"sum_warnings" agg:"SUM(sum_warnings)"` @@ -95,10 +96,9 @@ type Model struct { AggSchemaName string `json:"schema_name" agg:"ANY_VALUE(schema_name)"` AggTableNames string `json:"table_names" agg:"ANY_VALUE(table_names)"` AggIndexNames string `json:"index_names" agg:"ANY_VALUE(index_names)"` - AggDigestText string `json:"digest_text" agg:"ANY_VALUE(digest_text)"` - AggDigest string `json:"digest" agg:"ANY_VALUE(digest)"` - AggPlanDigest string `json:"plan_digest" agg:"ANY_VALUE(plan_digest)"` + AggPlanCount int `json:"plan_count" agg:"COUNT(DISTINCT plan_digest)"` AggPlan string `json:"plan" agg:"ANY_VALUE(plan)"` + AggPlanDigest string `json:"plan_digest" agg:"ANY_VALUE(plan_digest)"` // Computed fields RelatedSchemas string `json:"related_schemas"` } diff --git a/pkg/apiserver/statement/queries.go b/pkg/apiserver/statement/queries.go index d8c07b8eb6..fc07ef7339 100644 --- a/pkg/apiserver/statement/queries.go +++ b/pkg/apiserver/statement/queries.go @@ -130,15 +130,20 @@ func QueryStmtTypes(db *gorm.DB) (result []string, err error) { // schemas: ["tpcc", "test"] // stmtTypes: ["select", "update"] // fields: ["digest_text", "sum_latency"] -func QueryStatementsOverview( +func QueryStatements( db *gorm.DB, beginTime, endTime int, schemas, stmtTypes []string, text string, fields []string, ) (result []Model, err error) { - fields = funk.UniqString(append(fields, "schema_name", "digest", "sum_latency")) // "schema_name", "digest" for group, "sum_latency" for order - aggrFields := getAggrFields(fields...) + var aggrFields []string + if len(fields) == 1 && fields[0] == "*" { + aggrFields = getAllAggrFields() + } else { + fields = funk.UniqString(append(fields, "schema_name", "digest", "sum_latency")) // "schema_name", "digest" for group, "sum_latency" for order + aggrFields = getAggrFields(fields...) + } query := db. Select(strings.Join(aggrFields, ", ")). diff --git a/pkg/apiserver/statement/statement.go b/pkg/apiserver/statement/statement.go index 2290dff902..951828a160 100644 --- a/pkg/apiserver/statement/statement.go +++ b/pkg/apiserver/statement/statement.go @@ -14,11 +14,27 @@ package statement import ( + "encoding/base64" + "encoding/csv" + "errors" + "fmt" + "io" + "io/ioutil" "net/http" + "os" + "reflect" "strings" + "time" "github.com/gin-gonic/gin" + "github.com/gtank/cryptopasta" + "github.com/pingcap/log" "go.uber.org/fx" + "go.uber.org/zap" + + aesctr "github.com/Xeoncross/go-aesctr-with-hmac" + + "gopkg.in/oleiade/reflections.v1" "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/user" "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/utils" @@ -40,15 +56,23 @@ func NewService(p ServiceParams) *Service { func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/statements") - endpoint.Use(auth.MWAuthRequired()) - endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) - endpoint.GET("/config", s.configHandler) - endpoint.POST("/config", s.modifyConfigHandler) - endpoint.GET("/time_ranges", s.timeRangesHandler) - endpoint.GET("/stmt_types", s.stmtTypesHandler) - endpoint.GET("/overviews", s.overviewsHandler) - endpoint.GET("/plans", s.getPlansHandler) - endpoint.GET("/plan/detail", s.getPlanDetailHandler) + { + endpoint.GET("/download", s.downloadHandler) + + endpoint.Use(auth.MWAuthRequired()) + endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) + { + endpoint.GET("/config", s.configHandler) + endpoint.POST("/config", s.modifyConfigHandler) + endpoint.GET("/time_ranges", s.timeRangesHandler) + endpoint.GET("/stmt_types", s.stmtTypesHandler) + endpoint.GET("/list", s.listHandler) + endpoint.GET("/plans", s.plansHandler) + endpoint.GET("/plan/detail", s.planDetailHandler) + + endpoint.POST("/download/token", s.downloadTokenHandler) + } + } } // @Summary Get statement configurations @@ -126,13 +150,13 @@ type GetStatementsRequest struct { Fields string `json:"fields" form:"fields"` } -// @Summary Get a list of statement overviews +// @Summary Get a list of statements // @Param q query GetStatementsRequest true "Query" // @Success 200 {array} Model -// @Router /statements/overviews [get] +// @Router /statements/list [get] // @Security JwtAuth // @Failure 401 {object} utils.APIError "Unauthorized failure" -func (s *Service) overviewsHandler(c *gin.Context) { +func (s *Service) listHandler(c *gin.Context) { var req GetStatementsRequest if err := c.ShouldBindQuery(&req); err != nil { utils.MakeInvalidRequestErrorFromError(c, err) @@ -143,7 +167,7 @@ func (s *Service) overviewsHandler(c *gin.Context) { if strings.TrimSpace(req.Fields) != "" { fields = strings.Split(req.Fields, ",") } - overviews, err := QueryStatementsOverview( + overviews, err := QueryStatements( db, req.BeginTime, req.EndTime, req.Schemas, @@ -170,7 +194,7 @@ type GetPlansRequest struct { // @Router /statements/plans [get] // @Security JwtAuth // @Failure 401 {object} utils.APIError "Unauthorized failure" -func (s *Service) getPlansHandler(c *gin.Context) { +func (s *Service) plansHandler(c *gin.Context) { var req GetPlansRequest if err := c.ShouldBindQuery(&req); err != nil { utils.MakeInvalidRequestErrorFromError(c, err) @@ -196,7 +220,7 @@ type GetPlanDetailRequest struct { // @Router /statements/plan/detail [get] // @Security JwtAuth // @Failure 401 {object} utils.APIError "Unauthorized failure" -func (s *Service) getPlanDetailHandler(c *gin.Context) { +func (s *Service) planDetailHandler(c *gin.Context) { var req GetPlanDetailRequest if err := c.ShouldBindQuery(&req); err != nil { utils.MakeInvalidRequestErrorFromError(c, err) @@ -210,3 +234,157 @@ func (s *Service) getPlanDetailHandler(c *gin.Context) { } c.JSON(http.StatusOK, result) } + +// @Router /statements/download/token [post] +// @Summary Generate a download token for exported statements +// @Produce plain +// @Param request body GetStatementsRequest true "Request body" +// @Success 200 {string} string "xxx" +// @Security JwtAuth +// @Failure 401 {object} utils.APIError "Unauthorized failure" +func (s *Service) downloadTokenHandler(c *gin.Context) { + var req GetStatementsRequest + if err := c.ShouldBindJSON(&req); err != nil { + utils.MakeInvalidRequestErrorFromError(c, err) + return + } + db := utils.GetTiDBConnection(c) + fields := []string{} + if strings.TrimSpace(req.Fields) != "" { + fields = strings.Split(req.Fields, ",") + } + overviews, err := QueryStatements( + db, + req.BeginTime, req.EndTime, + req.Schemas, + req.StmtTypes, + req.Text, + fields) + if err != nil { + _ = c.Error(err) + return + } + if len(overviews) == 0 { + utils.MakeInvalidRequestErrorFromError(c, errors.New("no data to export")) + return + } + + // convert data + fieldsMap := make(map[string]string) + t := reflect.TypeOf(overviews[0]) + fieldsNum := t.NumField() + allFields := make([]string, fieldsNum) + for i := 0; i < fieldsNum; i++ { + field := t.Field(i) + allFields[i] = strings.ToLower(field.Tag.Get("json")) + fieldsMap[allFields[i]] = field.Name + } + if len(fields) == 1 && fields[0] == "*" { + fields = allFields + } + + csvData := [][]string{fields} + timeLayout := "01-02 15:04:05" + for _, overview := range overviews { + row := []string{} + for _, field := range fields { + filedName := fieldsMap[field] + s, _ := reflections.GetField(overview, filedName) + var val string + switch t := s.(type) { + case int: + if field == "first_seen" || field == "last_seen" { + val = time.Unix(int64(t), 0).Format(timeLayout) + } else { + val = fmt.Sprintf("%d", t) + } + default: + val = fmt.Sprintf("%s", t) + } + row = append(row, val) + } + csvData = append(csvData, row) + } + + // generate temp file that persist encrypted data + timeLayout = "01021504" + beginTime := time.Unix(int64(req.BeginTime), 0).Format(timeLayout) + endTime := time.Unix(int64(req.EndTime), 0).Format(timeLayout) + csvFile, err := ioutil.TempFile("", fmt.Sprintf("statements_%s_%s_*.csv", beginTime, endTime)) + if err != nil { + _ = c.Error(err) + return + } + defer csvFile.Close() + + // generate encryption key + secretKey := *cryptopasta.NewEncryptionKey() + + pr, pw := io.Pipe() + go func() { + csvwriter := csv.NewWriter(pw) + _ = csvwriter.WriteAll(csvData) + pw.Close() + }() + err = aesctr.Encrypt(pr, csvFile, secretKey[0:16], secretKey[16:]) + if err != nil { + _ = c.Error(err) + return + } + + // generate token by filepath and secretKey + secretKeyStr := base64.StdEncoding.EncodeToString(secretKey[:]) + token, err := utils.NewJWTString("statements/download", secretKeyStr+" "+csvFile.Name()) + if err != nil { + _ = c.Error(err) + return + } + c.String(http.StatusOK, token) +} + +// @Router /statements/download [get] +// @Summary Download statements +// @Produce text/csv +// @Param token query string true "download token" +// @Failure 400 {object} utils.APIError +// @Failure 401 {object} utils.APIError "Unauthorized failure" +func (s *Service) downloadHandler(c *gin.Context) { + token := c.Query("token") + tokenPlain, err := utils.ParseJWTString("statements/download", token) + if err != nil { + utils.MakeInvalidRequestErrorFromError(c, err) + return + } + arr := strings.Fields(tokenPlain) + if len(arr) != 2 { + utils.MakeInvalidRequestErrorFromError(c, errors.New("invalid token")) + return + } + secretKey, err := base64.StdEncoding.DecodeString(arr[0]) + if err != nil { + utils.MakeInvalidRequestErrorFromError(c, err) + return + } + + filePath := arr[1] + fileInfo, err := os.Stat(filePath) + if err != nil { + _ = c.Error(err) + return + } + f, err := os.Open(filePath) + if err != nil { + _ = c.Error(err) + return + } + + c.Writer.Header().Set("Content-type", "text/csv") + c.Writer.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", fileInfo.Name())) + err = aesctr.Decrypt(f, c.Writer, secretKey[0:16], secretKey[16:]) + if err != nil { + log.Error("decrypt csv failed", zap.Error(err)) + } + // delete it anyway + f.Close() + _ = os.Remove(filePath) +} diff --git a/ui/lib/apps/SearchLogs/components/SearchProgress.tsx b/ui/lib/apps/SearchLogs/components/SearchProgress.tsx index 40172f122b..4626eca1d4 100644 --- a/ui/lib/apps/SearchLogs/components/SearchProgress.tsx +++ b/ui/lib/apps/SearchLogs/components/SearchProgress.tsx @@ -154,7 +154,7 @@ export default function SearchProgress({ return } const url = `${client.getBasePath()}/logs/download?token=${token}` - window.open(url) + window.location.href = url } async function handleCancel() { diff --git a/ui/lib/apps/Statement/pages/List/index.tsx b/ui/lib/apps/Statement/pages/List/index.tsx index 7590375686..79ca5d9cb7 100644 --- a/ui/lib/apps/Statement/pages/List/index.tsx +++ b/ui/lib/apps/Statement/pages/List/index.tsx @@ -1,14 +1,28 @@ import React, { useState } from 'react' -import { Space, Tooltip, Drawer, Button, Checkbox, Result, Input } from 'antd' +import { + Space, + Tooltip, + Drawer, + Button, + Checkbox, + Result, + Input, + Dropdown, + Menu, + message, +} from 'antd' import { useLocalStorageState } from '@umijs/hooks' import { - SettingOutlined, ReloadOutlined, LoadingOutlined, + MenuOutlined, + SettingOutlined, + ExportOutlined, } from '@ant-design/icons' import { ScrollablePane } from 'office-ui-fabric-react/lib/ScrollablePane' import { useTranslation } from 'react-i18next' +import client from '@lib/client' import { Card, ColumnsSelector, Toolbar, MultiSelect } from '@lib/components' import { StatementsTable } from '../../components' @@ -47,8 +61,53 @@ export default function StatementsOverview() { allStmtTypes, loadingStatements, tableColumns, + + genDownloadToken, + downloading, } = controller + async function exportCSV() { + const hide = message.loading( + t('statement.pages.overview.toolbar.exporting') + '...', + 0 + ) + try { + const token = await genDownloadToken() + if (token) { + const url = `${client.getBasePath()}/statements/download?token=${token}` + // `window.open(url)` would cause browser popup interception if genDownloadToken takes long time + // window.open(url) + window.location.href = url + } + } finally { + hide() + } + } + + function menuItemClick({ key }) { + switch (key) { + case 'settings': + setShowSettings(true) + break + case 'export': + exportCSV() + break + } + } + + const dropdownMenu = ( + + }> + {t('statement.settings.title')} + + }> + {downloading + ? t('statement.pages.overview.toolbar.exporting') + : t('statement.pages.overview.toolbar.export')} + + + ) + return (
@@ -127,9 +186,6 @@ export default function StatementsOverview() { } /> )} - - setShowSettings(true)} /> - {loadingStatements ? ( @@ -137,6 +193,11 @@ export default function StatementsOverview() { )} + +
+ +
+
diff --git a/ui/lib/apps/Statement/translations/en.yaml b/ui/lib/apps/Statement/translations/en.yaml index b4a44d1417..030c4d1e21 100644 --- a/ui/lib/apps/Statement/translations/en.yaml +++ b/ui/lib/apps/Statement/translations/en.yaml @@ -37,6 +37,8 @@ statement: recent: Recent usual_time_ranges: Common custom_time_ranges: Custom + export: Export + exporting: Exporting settings: title: Settings disabled_result: diff --git a/ui/lib/apps/Statement/translations/zh.yaml b/ui/lib/apps/Statement/translations/zh.yaml index f15541f201..e2b5045a13 100644 --- a/ui/lib/apps/Statement/translations/zh.yaml +++ b/ui/lib/apps/Statement/translations/zh.yaml @@ -37,6 +37,8 @@ statement: recent: 最近 usual_time_ranges: 常用时间范围 custom_time_ranges: 自定义时间范围 + export: 导出 + exporting: 正在导出 settings: title: 设置 disabled_result: diff --git a/ui/lib/apps/Statement/utils/useStatementTableController.ts b/ui/lib/apps/Statement/utils/useStatementTableController.ts index ddfbf41b0d..5abcf2499b 100644 --- a/ui/lib/apps/Statement/utils/useStatementTableController.ts +++ b/ui/lib/apps/Statement/utils/useStatementTableController.ts @@ -67,6 +67,9 @@ export interface IStatementTableController { tableColumns: IColumn[] visibleColumnKeys: IColumnKeys + + genDownloadToken: () => Promise + downloading: boolean } export default function useStatementTableController( @@ -196,7 +199,7 @@ export default function useStatementTableController( try { const res = await client .getInstance() - .statementsOverviewsGet( + .statementsListGet( validTimeRange.begin_time!, validTimeRange.end_time!, selectedFields, @@ -218,6 +221,26 @@ export default function useStatementTableController( queryStatementList() }, [queryOptions, allTimeRanges, validTimeRange, selectedFields]) + const [downloading, setDownloading] = useState(false) + async function genDownloadToken() { + let token = '' + try { + setDownloading(true) + const res = await client.getInstance().statementsDownloadTokenPost({ + begin_time: validTimeRange.begin_time, + end_time: validTimeRange.end_time, + fields: '*', + schemas: queryOptions.schemas, + stmt_types: queryOptions.stmtTypes, + text: queryOptions.searchText, + }) + token = res.data + } finally { + setDownloading(false) + } + return token + } + return { queryOptions, setQueryOptions, @@ -237,5 +260,8 @@ export default function useStatementTableController( tableColumns, visibleColumnKeys, + + genDownloadToken, + downloading, } } From 2632bb846d5bbadbdf852d6bf5709c8512d8bdcd Mon Sep 17 00:00:00 2001 From: Wenxuan Date: Thu, 12 Nov 2020 11:46:42 +0800 Subject: [PATCH 14/29] *: Fix slow query and start_ts not working in some cases (#793) Signed-off-by: Breezewish --- pkg/apiserver/slowquery/queries.go | 53 ++++++++++--------- pkg/apiserver/slowquery/service.go | 8 +-- ui/lib/apps/SlowQuery/pages/Detail/index.tsx | 2 +- .../utils/useSlowQueryTableController.ts | 4 +- 4 files changed, 35 insertions(+), 32 deletions(-) diff --git a/pkg/apiserver/slowquery/queries.go b/pkg/apiserver/slowquery/queries.go index 0ac04185bd..28141c9f43 100644 --- a/pkg/apiserver/slowquery/queries.go +++ b/pkg/apiserver/slowquery/queries.go @@ -24,27 +24,29 @@ import ( const ( SlowQueryTable = "INFORMATION_SCHEMA.CLUSTER_SLOW_QUERY" - SelectStmt = "*, (unix_timestamp(Time) + 0E0) as timestamp" + SelectStmt = "*, (UNIX_TIMESTAMP(Time) + 0E0) AS timestamp" ) type SlowQuery struct { Digest string `gorm:"column:Digest" json:"digest"` Query string `gorm:"column:Query" json:"query"` - Instance string `gorm:"column:INSTANCE" json:"instance"` - DB string `gorm:"column:DB" json:"db"` - ConnectionID uint `gorm:"column:Conn_ID" json:"connection_id"` + Instance string `gorm:"column:INSTANCE" json:"instance"` + DB string `gorm:"column:DB" json:"db"` + // TODO: Switch back to uint64 when modern browser as well as Swagger handles BigInt well. + ConnectionID string `gorm:"column:Conn_ID" json:"connection_id"` Success int `gorm:"column:Succ" json:"success"` - Timestamp float64 `gorm:"column:timestamp" proj:"(unix_timestamp(Time) + 0E0)" json:"timestamp"` // finish time + Timestamp float64 `gorm:"column:timestamp" proj:"(UNIX_TIMESTAMP(Time) + 0E0)" json:"timestamp"` // finish time QueryTime float64 `gorm:"column:Query_time" json:"query_time"` // latency ParseTime float64 `gorm:"column:Parse_time" json:"parse_time"` CompileTime float64 `gorm:"column:Compile_time" json:"compile_time"` ProcessTime float64 `gorm:"column:Process_time" json:"process_time"` - MemoryMax int `gorm:"column:Mem_max" json:"memory_max"` - DiskMax int `gorm:"column:Disk_max" json:"disk_max"` - TxnStartTS uint `gorm:"column:Txn_start_ts" json:"txn_start_ts"` + MemoryMax int `gorm:"column:Mem_max" json:"memory_max"` + DiskMax int `gorm:"column:Disk_max" json:"disk_max"` + // TODO: Switch back to uint64 when modern browser as well as Swagger handles BigInt well. + TxnStartTS string `gorm:"column:Txn_start_ts" json:"txn_start_ts"` // Detail PrevStmt string `gorm:"column:Prev_stmt" json:"prev_stmt"` @@ -91,13 +93,13 @@ type SlowQuery struct { } type GetListRequest struct { - LogStartTS int64 `json:"logStartTS" form:"logStartTS"` - LogEndTS int64 `json:"logEndTS" form:"logEndTS"` - DB []string `json:"db" form:"db"` - Limit int `json:"limit" form:"limit"` - Text string `json:"text" form:"text"` - OrderBy string `json:"orderBy" form:"orderBy"` - DESC bool `json:"desc" form:"desc"` + RangeBeginTs uint `json:"rangeBeginTs" form:"rangeBeginTs"` + RangeEndTs uint `json:"rangeEndTs" form:"rangeEndTs"` + DB []string `json:"db" form:"db"` + Limit uint `json:"limit" form:"limit"` + Text string `json:"text" form:"text"` + OrderBy string `json:"orderBy" form:"orderBy"` + IsDesc bool `json:"desc" form:"desc"` // for showing slow queries in the statement detail page Plans []string `json:"plans" form:"plans"` @@ -136,7 +138,8 @@ func getProjectionsByFields(jsonFields ...string) ([]string, error) { type GetDetailRequest struct { Digest string `json:"digest" form:"digest"` Timestamp float64 `json:"timestamp" form:"timestamp"` - ConnectID int64 `json:"connect_id" form:"connect_id"` + // TODO: Switch back to uint64 when modern browser as well as Swagger handles BigInt well. + ConnectID string `json:"connect_id" form:"connect_id"` } func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]SlowQuery, error) { @@ -153,7 +156,7 @@ func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]SlowQuery, error) { tx := db. Table(SlowQueryTable). Select(strings.Join(projections, ", ")). - Where("Time between from_unixtime(?) and from_unixtime(?)", req.LogStartTS, req.LogEndTS). + Where("Time BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)", req.RangeBeginTs, req.RangeEndTs). Limit(req.Limit) if req.Text != "" { @@ -161,10 +164,10 @@ func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]SlowQuery, error) { arr := strings.Fields(lowerStr) for _, v := range arr { tx = tx.Where( - `txn_start_ts REGEXP ? - OR LOWER(digest) REGEXP ? - OR LOWER(CONVERT(prev_stmt USING utf8)) REGEXP ? - OR LOWER(CONVERT(query USING utf8)) REGEXP ?`, + `Txn_start_ts REGEXP ? + OR LOWER(Digest) REGEXP ? + OR LOWER(CONVERT(Prev_stmt USING utf8)) REGEXP ? + OR LOWER(CONVERT(Query USING utf8)) REGEXP ?`, v, v, v, v, ) } @@ -183,10 +186,10 @@ func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]SlowQuery, error) { if strings.Contains(order[0], " AS ") { order[0] = req.OrderBy } - if req.DESC { - tx = tx.Order(fmt.Sprintf("%s desc", order[0])) + if req.IsDesc { + tx = tx.Order(fmt.Sprintf("%s DESC", order[0])) } else { - tx = tx.Order(fmt.Sprintf("%s asc", order[0])) + tx = tx.Order(fmt.Sprintf("%s ASC", order[0])) } if len(req.Plans) > 0 { @@ -211,7 +214,7 @@ func QuerySlowLogDetail(db *gorm.DB, req *GetDetailRequest) (*SlowQuery, error) Table(SlowQueryTable). Select(SelectStmt). Where("Digest = ?", req.Digest). - Where("Time = from_unixtime(?)", req.Timestamp). + Where("Time = FROM_UNIXTIME(?)", req.Timestamp). Where("Conn_id = ?", req.ConnectID). First(&result).Error if err != nil { diff --git a/pkg/apiserver/slowquery/service.go b/pkg/apiserver/slowquery/service.go index 9f04fcd304..235609a744 100644 --- a/pkg/apiserver/slowquery/service.go +++ b/pkg/apiserver/slowquery/service.go @@ -41,8 +41,8 @@ func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/slow_query") endpoint.Use(auth.MWAuthRequired()) endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) - endpoint.GET("/list", s.listHandler) - endpoint.GET("/detail", s.detailhandler) + endpoint.GET("/list", s.getList) + endpoint.GET("/detail", s.getDetails) } // @Summary List all slow queries @@ -51,7 +51,7 @@ func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { // @Router /slow_query/list [get] // @Security JwtAuth // @Failure 401 {object} utils.APIError "Unauthorized failure" -func (s *Service) listHandler(c *gin.Context) { +func (s *Service) getList(c *gin.Context) { var req GetListRequest if err := c.ShouldBindQuery(&req); err != nil { utils.MakeInvalidRequestErrorFromError(c, err) @@ -73,7 +73,7 @@ func (s *Service) listHandler(c *gin.Context) { // @Router /slow_query/detail [get] // @Security JwtAuth // @Failure 401 {object} utils.APIError "Unauthorized failure" -func (s *Service) detailhandler(c *gin.Context) { +func (s *Service) getDetails(c *gin.Context) { var req GetDetailRequest if err := c.ShouldBindQuery(&req); err != nil { utils.MakeInvalidRequestErrorFromError(c, err) diff --git a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx index 1f47b2e777..358d23cfd0 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx @@ -27,7 +27,7 @@ import TabCopr from './DetailTabCopr' import TabTxn from './DetailTabTxn' export interface IPageQuery { - connectId?: number + connectId?: string digest?: string timestamp?: number } diff --git a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts index dcb4370bc7..7e575ed2ea 100644 --- a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts +++ b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts @@ -145,10 +145,10 @@ export default function useSlowQueryTableController( queryOptions.digest, selectedFields, queryOptions.limit, - queryTimeRange.endTime, - queryTimeRange.beginTime, orderOptions.orderBy, queryOptions.plans, + queryTimeRange.beginTime, + queryTimeRange.endTime, queryOptions.searchText, { errorStrategy: ErrorStrategy.Custom, From 9fe7947d0418924e839dc8abbe7a8e8c5ce5f747 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Fri, 13 Nov 2020 09:41:30 +0800 Subject: [PATCH 15/29] ui: fix errors doesn't display (#794) --- .../SlowQuery/utils/useSlowQueryTableController.ts | 4 ++-- .../Statement/utils/useStatementTableController.ts | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts index 7e575ed2ea..f252ba2f35 100644 --- a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts +++ b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts @@ -117,7 +117,7 @@ export default function useSlowQueryTableController( }) setAllSchemas(res?.data || []) } catch (e) { - setErrors((prev) => [...prev, { ...e }]) + setErrors((prev) => prev.concat(e)) } } querySchemas() @@ -157,7 +157,7 @@ export default function useSlowQueryTableController( setSlowQueries(res.data || []) setErrors([]) } catch (e) { - setErrors((prev) => [...prev, { ...e }]) + setErrors((prev) => prev.concat(e)) } setLoadingSlowQueries(false) } diff --git a/ui/lib/apps/Statement/utils/useStatementTableController.ts b/ui/lib/apps/Statement/utils/useStatementTableController.ts index 5abcf2499b..bf713252a0 100644 --- a/ui/lib/apps/Statement/utils/useStatementTableController.ts +++ b/ui/lib/apps/Statement/utils/useStatementTableController.ts @@ -134,7 +134,7 @@ export default function useStatementTableController( }) setEnable(res?.data.enable!) } catch (e) { - setErrors((prev) => [...prev, { ...e }]) + setErrors((prev) => prev.concat(e)) } } @@ -145,7 +145,7 @@ export default function useStatementTableController( }) setAllSchemas(res?.data || []) } catch (e) { - setErrors((prev) => [...prev, { ...e }]) + setErrors((prev) => prev.concat(e)) } } @@ -156,7 +156,7 @@ export default function useStatementTableController( }) setAllTimeRanges(res?.data || []) } catch (e) { - setErrors((prev) => [...prev, { ...e }]) + setErrors((prev) => prev.concat(e)) } } @@ -167,7 +167,7 @@ export default function useStatementTableController( }) setAllStmtTypes(res?.data || []) } catch (e) { - setErrors((prev) => [...prev, { ...e }]) + setErrors((prev) => prev.concat(e)) } } @@ -213,7 +213,7 @@ export default function useStatementTableController( setStatements(res?.data || []) setErrors([]) } catch (e) { - setErrors((prev) => [...prev, { ...e }]) + setErrors((prev) => prev.concat(e)) } setLoadingStatements(false) } From 09dd36acd3ad1a3e6825210c6e97dbf8fd93b9fa Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Mon, 23 Nov 2020 12:25:49 +0800 Subject: [PATCH 16/29] ui: fix the error message doesn't show correct (#799) --- ui/lib/client/index.tsx | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/ui/lib/client/index.tsx b/ui/lib/client/index.tsx index 907a053efd..e821724cf0 100644 --- a/ui/lib/client/index.tsx +++ b/ui/lib/client/index.tsx @@ -39,6 +39,7 @@ export enum ErrorStrategy { Default = 'default', Custom = 'custom', } +const ERR_CODE_OTHER = 'error.api.other' function initAxios() { i18n.addTranslations(require.context('./translations/', false, /\.yaml$/)) @@ -50,15 +51,18 @@ function initAxios() { const method = (config.method as string).toLowerCase() let errCode: string + let content: string if (err.message === 'Network Error') { errCode = 'error.network' } else { - errCode = response?.data?.code || 'error.api.other' - if (errCode === 'error.api.other') { - errCode = response?.data?.message || err.message - } + errCode = response?.data?.code + } + if (errCode !== ERR_CODE_OTHER && i18next.exists(errCode)) { + content = i18next.t(errCode) + } else { + content = + response?.data?.message || err.message || i18next.t(ERR_CODE_OTHER) } - const content = i18next.t(errCode) err.message = content if (errCode === 'error.api.unauthorized') { From a673ded130a71b0062683047df32448fa7f763e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=9C=C9=B4=D0=B2=CA=8F=D1=82=E1=B4=87?= Date: Tue, 24 Nov 2020 10:21:34 +0800 Subject: [PATCH 17/29] slow_queries: support export (#792) --- pkg/apiserver/slowquery/queries.go | 106 +++++++++----- pkg/apiserver/slowquery/service.go | 88 ++++++++++- pkg/apiserver/statement/models.go | 52 ++++--- .../statement/{statement.go => service.go} | 131 +++-------------- pkg/apiserver/utils/export.go | 137 ++++++++++++++++++ ui/lib/apps/SlowQuery/pages/List/index.tsx | 51 ++++++- ui/lib/apps/SlowQuery/translations/en.yaml | 10 ++ ui/lib/apps/SlowQuery/translations/zh.yaml | 10 ++ .../utils/useSlowQueryTableController.ts | 36 ++++- ui/lib/apps/Statement/pages/List/index.tsx | 17 +-- .../utils/useStatementTableController.ts | 14 +- ui/lib/client/translations/en.yaml | 5 + ui/lib/client/translations/zh.yaml | 5 + 13 files changed, 468 insertions(+), 194 deletions(-) rename pkg/apiserver/statement/{statement.go => service.go} (72%) create mode 100644 pkg/apiserver/utils/export.go diff --git a/pkg/apiserver/slowquery/queries.go b/pkg/apiserver/slowquery/queries.go index 28141c9f43..f200e80431 100644 --- a/pkg/apiserver/slowquery/queries.go +++ b/pkg/apiserver/slowquery/queries.go @@ -93,13 +93,13 @@ type SlowQuery struct { } type GetListRequest struct { - RangeBeginTs uint `json:"rangeBeginTs" form:"rangeBeginTs"` - RangeEndTs uint `json:"rangeEndTs" form:"rangeEndTs"` - DB []string `json:"db" form:"db"` - Limit uint `json:"limit" form:"limit"` - Text string `json:"text" form:"text"` - OrderBy string `json:"orderBy" form:"orderBy"` - IsDesc bool `json:"desc" form:"desc"` + BeginTime int `json:"begin_time" form:"begin_time"` + EndTime int `json:"end_time" form:"end_time"` + DB []string `json:"db" form:"db"` + Limit uint `json:"limit" form:"limit"` + Text string `json:"text" form:"text"` + OrderBy string `json:"orderBy" form:"orderBy"` + IsDesc bool `json:"desc" form:"desc"` // for showing slow queries in the statement detail page Plans []string `json:"plans" form:"plans"` @@ -108,33 +108,58 @@ type GetListRequest struct { Fields string `json:"fields" form:"fields"` // example: "Query,Digest" } -func getProjectionsByFields(jsonFields ...string) ([]string, error) { - fields := make(map[string]*reflect.StructField) - t := reflect.TypeOf(SlowQuery{}) - fieldsNum := t.NumField() - for i := 0; i < fieldsNum; i++ { - field := t.Field(i) - fields[strings.ToLower(field.Tag.Get("json"))] = &field +var cachedProjectionsMap map[string]string + +func getProjectionsMap() map[string]string { + if cachedProjectionsMap == nil { + t := reflect.TypeOf(SlowQuery{}) + fieldsNum := t.NumField() + ret := map[string]string{} + for i := 0; i < fieldsNum; i++ { + field := t.Field(i) + // ignore to check error because the field is defined by ourself + // we can confirm that it has "gorm" tag and fixed structure + s, _ := field.Tag.Lookup("gorm") + jsonField := strings.ToLower(field.Tag.Get("json")) + sourceField := strings.Split(s, ":")[1] + if proj, ok := field.Tag.Lookup("proj"); ok { + ret[jsonField] = fmt.Sprintf("%s AS %s", proj, sourceField) + } else { + ret[jsonField] = sourceField + } + } + cachedProjectionsMap = ret } + return cachedProjectionsMap +} + +func getProjectionsByFields(jsonFields ...string) ([]string, error) { + projMap := getProjectionsMap() ret := make([]string, 0, len(jsonFields)) for _, fieldName := range jsonFields { - field, ok := fields[strings.ToLower(fieldName)] + field, ok := projMap[strings.ToLower(fieldName)] if !ok { return nil, fmt.Errorf("unknown field %s", fieldName) } - // ignore to check error because the field is defined by ourself - // we can confirm that it has "gorm" tag and fixed structure - s, _ := field.Tag.Lookup("gorm") - sourceField := strings.Split(s, ":")[1] - if proj, ok := field.Tag.Lookup("proj"); ok { - ret = append(ret, fmt.Sprintf("%s AS %s", proj, sourceField)) - } else { - ret = append(ret, sourceField) - } + ret = append(ret, field) } return ret, nil } +var cachedAllProjections []string + +func getAllProjections() []string { + if cachedAllProjections == nil { + projMap := getProjectionsMap() + ret := make([]string, 0, len(projMap)) + for _, proj := range projMap { + ret = append(ret, proj) + } + cachedAllProjections = ret + } + return cachedAllProjections +} + type GetDetailRequest struct { Digest string `json:"digest" form:"digest"` Timestamp float64 `json:"timestamp" form:"timestamp"` @@ -143,21 +168,29 @@ type GetDetailRequest struct { } func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]SlowQuery, error) { - sqlFields := []string{"digest", "connection_id", "timestamp"} - if strings.TrimSpace(req.Fields) != "" { - sqlFields = append(sqlFields, strings.Split(req.Fields, ",")...) - sqlFields = funk.UniqString(sqlFields) - } - projections, err := getProjectionsByFields(sqlFields...) - if err != nil { - return nil, err + var projections []string + var err error + reqFields := strings.Split(req.Fields, ",") + if len(reqFields) == 1 && reqFields[0] == "*" { + projections = getAllProjections() + } else { + projections, err = getProjectionsByFields( + funk.UniqString( + append([]string{"digest", "connection_id", "timestamp"}, reqFields...), + )...) + if err != nil { + return nil, err + } } tx := db. Table(SlowQueryTable). Select(strings.Join(projections, ", ")). - Where("Time BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)", req.RangeBeginTs, req.RangeEndTs). - Limit(req.Limit) + Where("Time BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)", req.BeginTime, req.EndTime) + + if req.Limit > 0 { + tx = tx.Limit(req.Limit) + } if req.Text != "" { lowerStr := strings.ToLower(req.Text) @@ -177,6 +210,11 @@ func QuerySlowLogList(db *gorm.DB, req *GetListRequest) ([]SlowQuery, error) { tx = tx.Where("DB IN (?)", req.DB) } + // more robust + if req.OrderBy == "" { + req.OrderBy = "timestamp" + } + order, err := getProjectionsByFields(req.OrderBy) if err != nil { return nil, err diff --git a/pkg/apiserver/slowquery/service.go b/pkg/apiserver/slowquery/service.go index 235609a744..6df8d5d6c2 100644 --- a/pkg/apiserver/slowquery/service.go +++ b/pkg/apiserver/slowquery/service.go @@ -14,7 +14,12 @@ package slowquery import ( + "fmt" "net/http" + "strings" + "time" + + "github.com/joomcode/errorx" "github.com/gin-gonic/gin" "go.uber.org/fx" @@ -24,6 +29,11 @@ import ( "github.com/pingcap-incubator/tidb-dashboard/pkg/tidb" ) +var ( + ErrNS = errorx.NewNamespace("error.api.slow_query") + ErrNoData = ErrNS.NewType("export_no_data") +) + type ServiceParams struct { fx.In TiDBClient *tidb.Client @@ -39,10 +49,17 @@ func NewService(p ServiceParams) *Service { func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/slow_query") - endpoint.Use(auth.MWAuthRequired()) - endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) - endpoint.GET("/list", s.getList) - endpoint.GET("/detail", s.getDetails) + { + endpoint.GET("/download", s.downloadHandler) + endpoint.Use(auth.MWAuthRequired()) + endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) + { + endpoint.GET("/list", s.getList) + endpoint.GET("/detail", s.getDetails) + + endpoint.POST("/download/token", s.downloadTokenHandler) + } + } } // @Summary List all slow queries @@ -88,3 +105,66 @@ func (s *Service) getDetails(c *gin.Context) { } c.JSON(http.StatusOK, *result) } + +// @Router /slow_query/download/token [post] +// @Summary Generate a download token for exported slow query statements +// @Produce plain +// @Param request body GetListRequest true "Request body" +// @Success 200 {string} string "xxx" +// @Security JwtAuth +// @Failure 401 {object} utils.APIError "Unauthorized failure" +func (s *Service) downloadTokenHandler(c *gin.Context) { + var req GetListRequest + if err := c.ShouldBindJSON(&req); err != nil { + utils.MakeInvalidRequestErrorFromError(c, err) + return + } + db := utils.GetTiDBConnection(c) + fields := []string{} + if strings.TrimSpace(req.Fields) != "" { + fields = strings.Split(req.Fields, ",") + } + list, err := QuerySlowLogList(db, &req) + if err != nil { + _ = c.Error(err) + return + } + if len(list) == 0 { + _ = c.Error(ErrNoData.NewWithNoMessage()) + return + } + + // interface{} tricky + rawData := make([]interface{}, len(list)) + for i, v := range list { + rawData[i] = v + } + + // convert data + csvData := utils.GenerateCSVFromRaw(rawData, fields, []string{}) + + // generate temp file that persist encrypted data + timeLayout := "0102150405" + beginTime := time.Unix(int64(req.BeginTime), 0).Format(timeLayout) + endTime := time.Unix(int64(req.EndTime), 0).Format(timeLayout) + token, err := utils.ExportCSV(csvData, + fmt.Sprintf("slowquery_%s_%s_*.csv", beginTime, endTime), + "slowquery/download") + + if err != nil { + _ = c.Error(err) + return + } + c.String(http.StatusOK, token) +} + +// @Router /slow_query/download [get] +// @Summary Download slow query statements +// @Produce text/csv +// @Param token query string true "download token" +// @Failure 400 {object} utils.APIError +// @Failure 401 {object} utils.APIError "Unauthorized failure" +func (s *Service) downloadHandler(c *gin.Context) { + token := c.Query("token") + utils.DownloadByToken(token, "slowquery/download", c) +} diff --git a/pkg/apiserver/statement/models.go b/pkg/apiserver/statement/models.go index e4120dcab9..d93e6c79e8 100644 --- a/pkg/apiserver/statement/models.go +++ b/pkg/apiserver/statement/models.go @@ -103,22 +103,31 @@ type Model struct { RelatedSchemas string `json:"related_schemas"` } -func getAggrFields(sqlFields ...string) []string { - fields := make(map[string]*reflect.StructField) - t := reflect.TypeOf(Model{}) - fieldsNum := t.NumField() - for i := 0; i < fieldsNum; i++ { - field := t.Field(i) - fields[strings.ToLower(field.Tag.Get("json"))] = &field +var cachedAggrMap map[string]string // jsonFieldName => aggr + +func getAggrMap() map[string]string { + if cachedAggrMap == nil { + t := reflect.TypeOf(Model{}) + fieldsNum := t.NumField() + ret := map[string]string{} + for i := 0; i < fieldsNum; i++ { + field := t.Field(i) + jsonField := strings.ToLower(field.Tag.Get("json")) + if agg, ok := field.Tag.Lookup("agg"); ok { + ret[jsonField] = fmt.Sprintf("%s AS %s", agg, gorm.ToColumnName(field.Name)) + } + } + cachedAggrMap = ret } + return cachedAggrMap +} + +func getAggrFields(sqlFields ...string) []string { + aggrMap := getAggrMap() ret := make([]string, 0, len(sqlFields)) for _, fieldName := range sqlFields { - if field, ok := fields[strings.ToLower(fieldName)]; ok { - if agg, ok := field.Tag.Lookup("agg"); ok { - ret = append(ret, fmt.Sprintf("%s AS %s", agg, gorm.ToColumnName(field.Name))) - } else { - panic(fmt.Sprintf("field %s cannot be aggregated", fieldName)) - } + if aggr, ok := aggrMap[strings.ToLower(fieldName)]; ok { + ret = append(ret, aggr) } else { panic(fmt.Sprintf("unknown aggregation field %s", fieldName)) } @@ -126,17 +135,18 @@ func getAggrFields(sqlFields ...string) []string { return ret } +var cachedAllAggrFields []string + func getAllAggrFields() []string { - t := reflect.TypeOf(Model{}) - fieldsNum := t.NumField() - ret := make([]string, 0, fieldsNum) - for i := 0; i < fieldsNum; i++ { - field := t.Field(i) - if agg, ok := field.Tag.Lookup("agg"); ok { - ret = append(ret, fmt.Sprintf("%s AS %s", agg, gorm.ToColumnName(field.Name))) + if cachedAllAggrFields == nil { + aggrMap := getAggrMap() + ret := make([]string, 0, len(aggrMap)) + for _, aggr := range aggrMap { + ret = append(ret, aggr) } + cachedAllAggrFields = ret } - return ret + return cachedAllAggrFields } // tableNames example: "d1.a1,d2.a2,d1.a1,d3.a3" diff --git a/pkg/apiserver/statement/statement.go b/pkg/apiserver/statement/service.go similarity index 72% rename from pkg/apiserver/statement/statement.go rename to pkg/apiserver/statement/service.go index 951828a160..7b25b3b16e 100644 --- a/pkg/apiserver/statement/statement.go +++ b/pkg/apiserver/statement/service.go @@ -14,33 +14,27 @@ package statement import ( - "encoding/base64" - "encoding/csv" - "errors" "fmt" - "io" - "io/ioutil" "net/http" - "os" - "reflect" "strings" "time" - "github.com/gin-gonic/gin" - "github.com/gtank/cryptopasta" - "github.com/pingcap/log" - "go.uber.org/fx" - "go.uber.org/zap" + "github.com/joomcode/errorx" - aesctr "github.com/Xeoncross/go-aesctr-with-hmac" + "github.com/gin-gonic/gin" - "gopkg.in/oleiade/reflections.v1" + "go.uber.org/fx" "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/user" "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/utils" "github.com/pingcap-incubator/tidb-dashboard/pkg/tidb" ) +var ( + ErrNS = errorx.NewNamespace("error.api.statement") + ErrNoData = ErrNS.NewType("export_no_data") +) + type ServiceParams struct { fx.In TiDBClient *tidb.Client @@ -265,76 +259,27 @@ func (s *Service) downloadTokenHandler(c *gin.Context) { return } if len(overviews) == 0 { - utils.MakeInvalidRequestErrorFromError(c, errors.New("no data to export")) + _ = c.Error(ErrNoData.NewWithNoMessage()) return } - // convert data - fieldsMap := make(map[string]string) - t := reflect.TypeOf(overviews[0]) - fieldsNum := t.NumField() - allFields := make([]string, fieldsNum) - for i := 0; i < fieldsNum; i++ { - field := t.Field(i) - allFields[i] = strings.ToLower(field.Tag.Get("json")) - fieldsMap[allFields[i]] = field.Name - } - if len(fields) == 1 && fields[0] == "*" { - fields = allFields + // interface{} tricky + rawData := make([]interface{}, len(overviews)) + for i, v := range overviews { + rawData[i] = v } - csvData := [][]string{fields} - timeLayout := "01-02 15:04:05" - for _, overview := range overviews { - row := []string{} - for _, field := range fields { - filedName := fieldsMap[field] - s, _ := reflections.GetField(overview, filedName) - var val string - switch t := s.(type) { - case int: - if field == "first_seen" || field == "last_seen" { - val = time.Unix(int64(t), 0).Format(timeLayout) - } else { - val = fmt.Sprintf("%d", t) - } - default: - val = fmt.Sprintf("%s", t) - } - row = append(row, val) - } - csvData = append(csvData, row) - } + // convert data + csvData := utils.GenerateCSVFromRaw(rawData, fields, []string{"first_seen", "last_seen"}) // generate temp file that persist encrypted data - timeLayout = "01021504" + timeLayout := "01021504" beginTime := time.Unix(int64(req.BeginTime), 0).Format(timeLayout) endTime := time.Unix(int64(req.EndTime), 0).Format(timeLayout) - csvFile, err := ioutil.TempFile("", fmt.Sprintf("statements_%s_%s_*.csv", beginTime, endTime)) - if err != nil { - _ = c.Error(err) - return - } - defer csvFile.Close() - - // generate encryption key - secretKey := *cryptopasta.NewEncryptionKey() + token, err := utils.ExportCSV(csvData, + fmt.Sprintf("statements_%s_%s_*.csv", beginTime, endTime), + "statements/download") - pr, pw := io.Pipe() - go func() { - csvwriter := csv.NewWriter(pw) - _ = csvwriter.WriteAll(csvData) - pw.Close() - }() - err = aesctr.Encrypt(pr, csvFile, secretKey[0:16], secretKey[16:]) - if err != nil { - _ = c.Error(err) - return - } - - // generate token by filepath and secretKey - secretKeyStr := base64.StdEncoding.EncodeToString(secretKey[:]) - token, err := utils.NewJWTString("statements/download", secretKeyStr+" "+csvFile.Name()) if err != nil { _ = c.Error(err) return @@ -350,41 +295,5 @@ func (s *Service) downloadTokenHandler(c *gin.Context) { // @Failure 401 {object} utils.APIError "Unauthorized failure" func (s *Service) downloadHandler(c *gin.Context) { token := c.Query("token") - tokenPlain, err := utils.ParseJWTString("statements/download", token) - if err != nil { - utils.MakeInvalidRequestErrorFromError(c, err) - return - } - arr := strings.Fields(tokenPlain) - if len(arr) != 2 { - utils.MakeInvalidRequestErrorFromError(c, errors.New("invalid token")) - return - } - secretKey, err := base64.StdEncoding.DecodeString(arr[0]) - if err != nil { - utils.MakeInvalidRequestErrorFromError(c, err) - return - } - - filePath := arr[1] - fileInfo, err := os.Stat(filePath) - if err != nil { - _ = c.Error(err) - return - } - f, err := os.Open(filePath) - if err != nil { - _ = c.Error(err) - return - } - - c.Writer.Header().Set("Content-type", "text/csv") - c.Writer.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", fileInfo.Name())) - err = aesctr.Decrypt(f, c.Writer, secretKey[0:16], secretKey[16:]) - if err != nil { - log.Error("decrypt csv failed", zap.Error(err)) - } - // delete it anyway - f.Close() - _ = os.Remove(filePath) + utils.DownloadByToken(token, "statements/download", c) } diff --git a/pkg/apiserver/utils/export.go b/pkg/apiserver/utils/export.go new file mode 100644 index 0000000000..e255f8cba2 --- /dev/null +++ b/pkg/apiserver/utils/export.go @@ -0,0 +1,137 @@ +package utils + +import ( + "encoding/base64" + "encoding/csv" + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "strings" + + "github.com/Xeoncross/go-aesctr-with-hmac" + "github.com/gin-gonic/gin" + "github.com/gtank/cryptopasta" + "github.com/pingcap/log" + "go.uber.org/zap" + "gopkg.in/oleiade/reflections.v1" + + "reflect" + "time" +) + +func GenerateCSVFromRaw(rawData []interface{}, fields []string, timeFields []string) (data [][]string) { + timeFieldsMap := make(map[string]struct{}) + for _, f := range timeFields { + timeFieldsMap[f] = struct{}{} + } + + fieldsMap := make(map[string]string) + t := reflect.TypeOf(rawData[0]) + fieldsNum := t.NumField() + allFields := make([]string, fieldsNum) + for i := 0; i < fieldsNum; i++ { + field := t.Field(i) + allFields[i] = strings.ToLower(field.Tag.Get("json")) + fieldsMap[allFields[i]] = field.Name + } + if len(fields) == 1 && fields[0] == "*" { + fields = allFields + } + + data = [][]string{fields} + timeLayout := "01-02 15:04:05" + for _, overview := range rawData { + row := []string{} + for _, field := range fields { + fieldName := fieldsMap[field] + s, _ := reflections.GetField(overview, fieldName) + var val string + switch t := s.(type) { + case int: + if _, ok := timeFieldsMap[field]; ok { + val = time.Unix(int64(t), 0).Format(timeLayout) + } else { + val = fmt.Sprintf("%d", t) + } + case uint: + val = fmt.Sprintf("%d", t) + case float64: + val = fmt.Sprintf("%f", t) + default: + val = fmt.Sprintf("%s", t) + } + row = append(row, val) + } + data = append(data, row) + } + return +} + +func ExportCSV(data [][]string, filename, tokenNamespace string) (token string, err error) { + csvFile, err := ioutil.TempFile("", filename) + if err != nil { + return + } + defer csvFile.Close() + + // generate encryption key + secretKey := *cryptopasta.NewEncryptionKey() + + pr, pw := io.Pipe() + go func() { + csvwriter := csv.NewWriter(pw) + _ = csvwriter.WriteAll(data) + pw.Close() + }() + err = aesctr.Encrypt(pr, csvFile, secretKey[0:16], secretKey[16:]) + if err != nil { + return + } + + // generate token by filepath and secretKey + secretKeyStr := base64.StdEncoding.EncodeToString(secretKey[:]) + token, err = NewJWTString(tokenNamespace, secretKeyStr+" "+csvFile.Name()) + return +} + +func DownloadByToken(token, tokenNamespace string, c *gin.Context) { + tokenPlain, err := ParseJWTString(tokenNamespace, token) + if err != nil { + MakeInvalidRequestErrorFromError(c, err) + return + } + arr := strings.Fields(tokenPlain) + if len(arr) != 2 { + MakeInvalidRequestErrorFromError(c, errors.New("invalid token")) + return + } + secretKey, err := base64.StdEncoding.DecodeString(arr[0]) + if err != nil { + MakeInvalidRequestErrorFromError(c, err) + return + } + + filePath := arr[1] + fileInfo, err := os.Stat(filePath) + if err != nil { + _ = c.Error(err) + return + } + f, err := os.Open(filePath) + if err != nil { + _ = c.Error(err) + return + } + + c.Writer.Header().Set("Content-type", "text/csv") + c.Writer.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, fileInfo.Name())) + err = aesctr.Decrypt(f, c.Writer, secretKey[0:16], secretKey[16:]) + if err != nil { + log.Error("decrypt csv failed", zap.Error(err)) + } + // delete it anyway + f.Close() + _ = os.Remove(filePath) +} diff --git a/ui/lib/apps/SlowQuery/pages/List/index.tsx b/ui/lib/apps/SlowQuery/pages/List/index.tsx index 379d27c373..0393c4ad06 100644 --- a/ui/lib/apps/SlowQuery/pages/List/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/List/index.tsx @@ -1,7 +1,21 @@ import React from 'react' import { useTranslation } from 'react-i18next' -import { Select, Space, Tooltip, Input, Checkbox } from 'antd' -import { ReloadOutlined, LoadingOutlined } from '@ant-design/icons' +import { + Select, + Space, + Tooltip, + Input, + Checkbox, + message, + Menu, + Dropdown, +} from 'antd' +import { + ReloadOutlined, + LoadingOutlined, + MenuOutlined, + ExportOutlined, +} from '@ant-design/icons' import { ScrollablePane } from 'office-ui-fabric-react/lib/ScrollablePane' import { useLocalStorageState } from '@umijs/hooks' @@ -45,8 +59,36 @@ function List() { allSchemas, loadingSlowQueries, tableColumns, + downloadCSV, + downloading, } = controller + function exportCSV() { + const hide = message.loading( + t('statement.pages.overview.toolbar.exporting') + '...', + 0 + ) + downloadCSV().finally(hide) + } + + function menuItemClick({ key }) { + switch (key) { + case 'export': + exportCSV() + break + } + } + + const dropdownMenu = ( + + }> + {downloading + ? t('statement.pages.overview.toolbar.exporting') + : t('statement.pages.overview.toolbar.export')} + + + ) + return (
@@ -124,6 +166,11 @@ function List() { )} + +
+ +
+
diff --git a/ui/lib/apps/SlowQuery/translations/en.yaml b/ui/lib/apps/SlowQuery/translations/en.yaml index 9ec99334f3..f59f30746e 100644 --- a/ui/lib/apps/SlowQuery/translations/en.yaml +++ b/ui/lib/apps/SlowQuery/translations/en.yaml @@ -87,3 +87,13 @@ slow_query: time: Time copr: Coprocessor txn: Transaction + toolbar: + schemas: + placeholder: All Databases + selected: '{{ n }} Databases' + columnTitle: Database Name + select_columns: + show_full_sql: Show Full Query Text + refresh: Refresh + export: Export + exporting: Exporting diff --git a/ui/lib/apps/SlowQuery/translations/zh.yaml b/ui/lib/apps/SlowQuery/translations/zh.yaml index f59e140159..f1521f82dd 100644 --- a/ui/lib/apps/SlowQuery/translations/zh.yaml +++ b/ui/lib/apps/SlowQuery/translations/zh.yaml @@ -91,3 +91,13 @@ slow_query: time: 执行时间 copr: Coprocessor 读取 txn: 事务 + toolbar: + schemas: + placeholder: 所有数据库 + selected: '{{ n }} 数据库' + columnTitle: 数据库名 + select_columns: + show_full_sql: 显示完整 SQL 文本 + refresh: 刷新 + export: 导出 + exporting: 正在导出 diff --git a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts index f252ba2f35..a619025c41 100644 --- a/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts +++ b/ui/lib/apps/SlowQuery/utils/useSlowQueryTableController.ts @@ -59,6 +59,9 @@ export interface ISlowQueryTableController { tableColumns: IColumn[] visibleColumnKeys: IColumnKeys + + downloadCSV: () => Promise + downloading: boolean } export default function useSlowQueryTableController( @@ -120,6 +123,7 @@ export default function useSlowQueryTableController( setErrors((prev) => prev.concat(e)) } } + querySchemas() }, []) @@ -140,15 +144,15 @@ export default function useSlowQueryTableController( const res = await client .getInstance() .slowQueryListGet( + queryTimeRange.beginTime, queryOptions.schemas, orderOptions.desc, queryOptions.digest, + queryTimeRange.endTime, selectedFields, queryOptions.limit, orderOptions.orderBy, queryOptions.plans, - queryTimeRange.beginTime, - queryTimeRange.endTime, queryOptions.searchText, { errorStrategy: ErrorStrategy.Custom, @@ -165,6 +169,31 @@ export default function useSlowQueryTableController( getSlowQueryList() }, [queryOptions, orderOptions, queryTimeRange, refreshTimes, selectedFields]) + const [downloading, setDownloading] = useState(false) + + async function downloadCSV() { + try { + setDownloading(true) + const res = await client.getInstance().slowQueryDownloadTokenPost({ + fields: '*', + db: queryOptions.schemas, + digest: queryOptions.digest, + text: queryOptions.searchText, + plans: queryOptions.plans, + orderBy: orderOptions.orderBy, + desc: orderOptions.desc, + end_time: queryTimeRange.endTime, + begin_time: queryTimeRange.beginTime, + }) + const token = res.data + if (token) { + window.location.href = `${client.getBasePath()}/slow_query/download?token=${token}` + } + } finally { + setDownloading(false) + } + } + return { queryOptions, setQueryOptions, @@ -181,5 +210,8 @@ export default function useSlowQueryTableController( tableColumns, visibleColumnKeys, + + downloading, + downloadCSV, } } diff --git a/ui/lib/apps/Statement/pages/List/index.tsx b/ui/lib/apps/Statement/pages/List/index.tsx index 79ca5d9cb7..07af13a7df 100644 --- a/ui/lib/apps/Statement/pages/List/index.tsx +++ b/ui/lib/apps/Statement/pages/List/index.tsx @@ -22,7 +22,6 @@ import { import { ScrollablePane } from 'office-ui-fabric-react/lib/ScrollablePane' import { useTranslation } from 'react-i18next' -import client from '@lib/client' import { Card, ColumnsSelector, Toolbar, MultiSelect } from '@lib/components' import { StatementsTable } from '../../components' @@ -62,26 +61,16 @@ export default function StatementsOverview() { loadingStatements, tableColumns, - genDownloadToken, + downloadCSV, downloading, } = controller - async function exportCSV() { + function exportCSV() { const hide = message.loading( t('statement.pages.overview.toolbar.exporting') + '...', 0 ) - try { - const token = await genDownloadToken() - if (token) { - const url = `${client.getBasePath()}/statements/download?token=${token}` - // `window.open(url)` would cause browser popup interception if genDownloadToken takes long time - // window.open(url) - window.location.href = url - } - } finally { - hide() - } + downloadCSV().finally(hide) } function menuItemClick({ key }) { diff --git a/ui/lib/apps/Statement/utils/useStatementTableController.ts b/ui/lib/apps/Statement/utils/useStatementTableController.ts index bf713252a0..cd66a366f4 100644 --- a/ui/lib/apps/Statement/utils/useStatementTableController.ts +++ b/ui/lib/apps/Statement/utils/useStatementTableController.ts @@ -68,7 +68,7 @@ export interface IStatementTableController { tableColumns: IColumn[] visibleColumnKeys: IColumnKeys - genDownloadToken: () => Promise + downloadCSV: () => Promise downloading: boolean } @@ -222,8 +222,8 @@ export default function useStatementTableController( }, [queryOptions, allTimeRanges, validTimeRange, selectedFields]) const [downloading, setDownloading] = useState(false) - async function genDownloadToken() { - let token = '' + + async function downloadCSV() { try { setDownloading(true) const res = await client.getInstance().statementsDownloadTokenPost({ @@ -234,11 +234,13 @@ export default function useStatementTableController( stmt_types: queryOptions.stmtTypes, text: queryOptions.searchText, }) - token = res.data + const token = res.data + if (token) { + window.location.href = `${client.getBasePath()}/statements/download?token=${token}` + } } finally { setDownloading(false) } - return token } return { @@ -261,7 +263,7 @@ export default function useStatementTableController( tableColumns, visibleColumnKeys, - genDownloadToken, + downloadCSV, downloading, } } diff --git a/ui/lib/client/translations/en.yaml b/ui/lib/client/translations/en.yaml index cbd393ae3d..a40a474edb 100644 --- a/ui/lib/client/translations/en.yaml +++ b/ui/lib/client/translations/en.yaml @@ -3,9 +3,14 @@ error: network: Network connection error api: unauthorized: Session is expired. Please sign in again. + invalid_request: Bad Request user: signin: invalid_code: Authorization Code is invalid or expired + slow_query: + export_no_data: No slow queires can be exported + statement: + export_no_data: No statements can be exported other: Other error tidb: no_alive_tidb: No live TiDB instance in the cluster diff --git a/ui/lib/client/translations/zh.yaml b/ui/lib/client/translations/zh.yaml index f54f31e44d..db031cd3ec 100644 --- a/ui/lib/client/translations/zh.yaml +++ b/ui/lib/client/translations/zh.yaml @@ -3,9 +3,14 @@ error: network: 网络连接失败 api: unauthorized: 会话已过期,请重新登录 + invalid_request: 请求出错 user: signin: invalid_code: 授权码无效或已过期 + slow_query: + export_no_data: 没有可导出的慢查询日志 + statement: + export_no_data: 没有可导出的语句 other: 其他错误 tidb: no_alive_tidb: 集群未启动 TiDB 实例 From 8804bf8d25c3c77caa828ca9827502dec7643aa9 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Tue, 24 Nov 2020 13:17:31 +0800 Subject: [PATCH 18/29] ui: add MySqlFormatter to customize the sql formatter (#805) --- ui/lib/apps/SlowQuery/pages/Detail/index.tsx | 2 +- .../Statement/pages/Detail/PlanDetail.tsx | 2 +- ui/lib/apps/Statement/pages/Detail/index.tsx | 2 +- ui/lib/components/HighlightSQL/index.tsx | 2 +- ui/lib/utils/formatSql.ts | 5 - ui/lib/utils/sqlFormatter/TiDBSQLFormatter.ts | 371 ++++++++++++++++++ ui/lib/utils/sqlFormatter/index.ts | 7 + 7 files changed, 382 insertions(+), 9 deletions(-) delete mode 100644 ui/lib/utils/formatSql.ts create mode 100644 ui/lib/utils/sqlFormatter/TiDBSQLFormatter.ts create mode 100644 ui/lib/utils/sqlFormatter/index.ts diff --git a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx index 358d23cfd0..8052a5957f 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx @@ -8,7 +8,7 @@ import { useLocalStorageState } from '@umijs/hooks' import client from '@lib/client' import { useClientRequest } from '@lib/utils/useClientRequest' import { buildQueryFn, parseQueryFn } from '@lib/utils/query' -import formatSql from '@lib/utils/formatSql' +import formatSql from '@lib/utils/sqlFormatter' import { AnimatedSkeleton, CardTabs, diff --git a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx index b686c63345..bb80fce1b6 100644 --- a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx +++ b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx @@ -16,7 +16,7 @@ import { } from '@lib/components' import { useClientRequest } from '@lib/utils/useClientRequest' import client from '@lib/client' -import formatSql from '@lib/utils/formatSql' +import formatSql from '@lib/utils/sqlFormatter' import { IPageQuery } from '.' import TabBasic from './PlanDetailTabBasic' diff --git a/ui/lib/apps/Statement/pages/Detail/index.tsx b/ui/lib/apps/Statement/pages/Detail/index.tsx index 8f97175764..ca91263f02 100644 --- a/ui/lib/apps/Statement/pages/Detail/index.tsx +++ b/ui/lib/apps/Statement/pages/Detail/index.tsx @@ -20,7 +20,7 @@ import { TextWithInfo, } from '@lib/components' import CopyLink from '@lib/components/CopyLink' -import formatSql from '@lib/utils/formatSql' +import formatSql from '@lib/utils/sqlFormatter' import { buildQueryFn, parseQueryFn } from '@lib/utils/query' import { useClientRequest } from '@lib/utils/useClientRequest' diff --git a/ui/lib/components/HighlightSQL/index.tsx b/ui/lib/components/HighlightSQL/index.tsx index 687fbcf725..7e6b2b5e2a 100644 --- a/ui/lib/components/HighlightSQL/index.tsx +++ b/ui/lib/components/HighlightSQL/index.tsx @@ -5,7 +5,7 @@ import sql from 'react-syntax-highlighter/dist/esm/languages/hljs/sql' import lightTheme from 'react-syntax-highlighter/dist/esm/styles/hljs/atom-one-light' import darkTheme from 'react-syntax-highlighter/dist/esm/styles/hljs/atom-one-dark' import Pre from '../Pre' -import formatSql from '@lib/utils/formatSql' +import formatSql from '@lib/utils/sqlFormatter' import moize from 'moize' SyntaxHighlighter.registerLanguage('sql', sql) diff --git a/ui/lib/utils/formatSql.ts b/ui/lib/utils/formatSql.ts deleted file mode 100644 index 62bdb9a8e2..0000000000 --- a/ui/lib/utils/formatSql.ts +++ /dev/null @@ -1,5 +0,0 @@ -import sqlFormatter from 'sql-formatter-plus-plus' - -export default function formatSql(sql?: string): string { - return sqlFormatter.format(sql || '', { uppercase: true }) -} diff --git a/ui/lib/utils/sqlFormatter/TiDBSQLFormatter.ts b/ui/lib/utils/sqlFormatter/TiDBSQLFormatter.ts new file mode 100644 index 0000000000..e91656fe87 --- /dev/null +++ b/ui/lib/utils/sqlFormatter/TiDBSQLFormatter.ts @@ -0,0 +1,371 @@ +// This file is copied from 'sql-formatter-plus-plus/src/languages/StandardSqlFormatter.js'. +// And changed the following lines: +// `namedPlaceholderTypes: ['@', ':'],` => `namedPlaceholderTypes: [':'],` +// Add the following line which copied from Db2Formatter.js: +// `specialWordChars: ['@'],` + +import Formatter from 'sql-formatter-plus-plus/src/core/Formatter' +import Tokenizer from 'sql-formatter-plus-plus/src/core/Tokenizer' + +const reservedWords = [ + 'ACCESSIBLE', + 'ACTION', + 'AGAINST', + 'AGGREGATE', + 'ALGORITHM', + 'ALL', + 'ALTER', + 'ANALYSE', + 'ANALYZE', + 'AS', + 'ASC', + 'AUTOCOMMIT', + 'AUTO_INCREMENT', + 'BACKUP', + 'BEGIN', + 'BETWEEN', + 'BINLOG', + 'BOTH', + 'CASCADE', + 'CASE', + 'CHANGE', + 'CHANGED', + 'CHARACTER SET', + 'CHARSET', + 'CHECK', + 'CHECKSUM', + 'COLLATE', + 'COLLATION', + 'COLUMN', + 'COLUMNS', + 'COMMENT', + 'COMMIT', + 'COMMITTED', + 'COMPRESSED', + 'CONCURRENT', + 'CONSTRAINT', + 'CONTAINS', + 'CONVERT', + 'CREATE', + 'CROSS', + 'CURRENT_TIMESTAMP', + 'DATABASE', + 'DATABASES', + 'DAY', + 'DAY_HOUR', + 'DAY_MINUTE', + 'DAY_SECOND', + 'DEFAULT', + 'DEFINER', + 'DELAYED', + 'DELETE', + 'DESC', + 'DESCRIBE', + 'DETERMINISTIC', + 'DISTINCT', + 'DISTINCTROW', + 'DIV', + 'DO', + 'DROP', + 'DUMPFILE', + 'DUPLICATE', + 'DYNAMIC', + 'ELSE', + 'ENCLOSED', + 'END', + 'ENGINE', + 'ENGINES', + 'ENGINE_TYPE', + 'ESCAPE', + 'ESCAPED', + 'EVENTS', + 'EXEC', + 'EXECUTE', + 'EXISTS', + 'EXPLAIN', + 'EXTENDED', + 'FAST', + 'FETCH', + 'FIELDS', + 'FILE', + 'FIRST', + 'FIXED', + 'FLUSH', + 'FOR', + 'FORCE', + 'FOREIGN', + 'FULL', + 'FULLTEXT', + 'FUNCTION', + 'GLOBAL', + 'GRANT', + 'GRANTS', + 'GROUP_CONCAT', + 'HEAP', + 'HIGH_PRIORITY', + 'HOSTS', + 'HOUR', + 'HOUR_MINUTE', + 'HOUR_SECOND', + 'IDENTIFIED', + 'IF', + 'IFNULL', + 'IGNORE', + 'IN', + 'INDEX', + 'INDEXES', + 'INFILE', + 'INSERT', + 'INSERT_ID', + 'INSERT_METHOD', + 'INTERVAL', + 'INTO', + 'INVOKER', + 'IS', + 'ISOLATION', + 'KEY', + 'KEYS', + 'KILL', + 'LAST_INSERT_ID', + 'LEADING', + 'LEVEL', + 'LIKE', + 'LINEAR', + 'LINES', + 'LOAD', + 'LOCAL', + 'LOCK', + 'LOCKS', + 'LOGS', + 'LOW_PRIORITY', + 'MARIA', + 'MASTER', + 'MASTER_CONNECT_RETRY', + 'MASTER_HOST', + 'MASTER_LOG_FILE', + 'MATCH', + 'MAX_CONNECTIONS_PER_HOUR', + 'MAX_QUERIES_PER_HOUR', + 'MAX_ROWS', + 'MAX_UPDATES_PER_HOUR', + 'MAX_USER_CONNECTIONS', + 'MEDIUM', + 'MERGE', + 'MINUTE', + 'MINUTE_SECOND', + 'MIN_ROWS', + 'MODE', + 'MODIFY', + 'MONTH', + 'MRG_MYISAM', + 'MYISAM', + 'NAMES', + 'NATURAL', + 'NOT', + 'NOW()', + 'NULL', + 'OFFSET', + 'ON DELETE', + 'ON UPDATE', + 'ON', + 'ONLY', + 'OPEN', + 'OPTIMIZE', + 'OPTION', + 'OPTIONALLY', + 'OUTFILE', + 'PACK_KEYS', + 'PAGE', + 'PARTIAL', + 'PARTITION', + 'PARTITIONS', + 'PASSWORD', + 'PRIMARY', + 'PRIVILEGES', + 'PROCEDURE', + 'PROCESS', + 'PROCESSLIST', + 'PURGE', + 'QUICK', + 'RAID0', + 'RAID_CHUNKS', + 'RAID_CHUNKSIZE', + 'RAID_TYPE', + 'RANGE', + 'READ', + 'READ_ONLY', + 'READ_WRITE', + 'REFERENCES', + 'REGEXP', + 'RELOAD', + 'RENAME', + 'REPAIR', + 'REPEATABLE', + 'REPLACE', + 'REPLICATION', + 'RESET', + 'RESTORE', + 'RESTRICT', + 'RETURN', + 'RETURNS', + 'REVOKE', + 'RLIKE', + 'ROLLBACK', + 'ROW', + 'ROWS', + 'ROW_FORMAT', + 'SECOND', + 'SECURITY', + 'SEPARATOR', + 'SERIALIZABLE', + 'SESSION', + 'SHARE', + 'SHOW', + 'SHUTDOWN', + 'SLAVE', + 'SONAME', + 'SOUNDS', + 'SQL', + 'SQL_AUTO_IS_NULL', + 'SQL_BIG_RESULT', + 'SQL_BIG_SELECTS', + 'SQL_BIG_TABLES', + 'SQL_BUFFER_RESULT', + 'SQL_CACHE', + 'SQL_CALC_FOUND_ROWS', + 'SQL_LOG_BIN', + 'SQL_LOG_OFF', + 'SQL_LOG_UPDATE', + 'SQL_LOW_PRIORITY_UPDATES', + 'SQL_MAX_JOIN_SIZE', + 'SQL_NO_CACHE', + 'SQL_QUOTE_SHOW_CREATE', + 'SQL_SAFE_UPDATES', + 'SQL_SELECT_LIMIT', + 'SQL_SLAVE_SKIP_COUNTER', + 'SQL_SMALL_RESULT', + 'SQL_WARNINGS', + 'START', + 'STARTING', + 'STATUS', + 'STOP', + 'STORAGE', + 'STRAIGHT_JOIN', + 'STRING', + 'STRIPED', + 'SUPER', + 'TABLE', + 'TABLES', + 'TEMPORARY', + 'TERMINATED', + 'THEN', + 'TO', + 'TRAILING', + 'TRANSACTIONAL', + 'TRUE', + 'TRUNCATE', + 'TYPE', + 'TYPES', + 'UNCOMMITTED', + 'UNIQUE', + 'UNLOCK', + 'UNSIGNED', + 'USAGE', + 'USE', + 'USING', + 'VARIABLES', + 'VIEW', + 'WHEN', + 'WITH', + 'WORK', + 'WRITE', + 'YEAR_MONTH', +] + +const reservedTopLevelWords = [ + 'ADD', + 'AFTER', + 'ALTER COLUMN', + 'ALTER TABLE', + 'DELETE FROM', + 'EXCEPT', + 'FETCH FIRST', + 'FROM', + 'GROUP BY', + 'GO', + 'HAVING', + 'INSERT INTO', + 'INSERT', + 'LIMIT', + 'MODIFY', + 'ORDER BY', + 'SELECT', + 'SET CURRENT SCHEMA', + 'SET SCHEMA', + 'SET', + 'UPDATE', + 'VALUES', + 'WHERE', +] + +const reservedTopLevelWordsNoIndent = [ + 'INTERSECT', + 'INTERSECT ALL', + 'MINUS', + 'UNION', + 'UNION ALL', +] + +const reservedNewlineWords = [ + 'AND', + 'CROSS APPLY', + 'CROSS JOIN', + 'ELSE', + 'INNER JOIN', + 'JOIN', + 'LEFT JOIN', + 'LEFT OUTER JOIN', + 'OR', + 'OUTER APPLY', + 'OUTER JOIN', + 'RIGHT JOIN', + 'RIGHT OUTER JOIN', + 'WHEN', + 'XOR', +] + +let tokenizer + +export default class TiDBSQLFormatter { + /** + * @param {Object} cfg Different set of configurations + */ + constructor(public cfg) { + this.cfg = cfg + } + + /** + * Format the whitespace in a Standard SQL string to make it easier to read + * + * @param {String} query The Standard SQL string + * @return {String} formatted string + */ + format(query) { + if (!tokenizer) { + tokenizer = new Tokenizer({ + reservedWords, + reservedTopLevelWords, + reservedNewlineWords, + reservedTopLevelWordsNoIndent, + stringTypes: [`""`, "N''", "''", '``', '[]'], + openParens: ['(', 'CASE'], + closeParens: [')', 'END'], + indexedPlaceholderTypes: ['?'], + namedPlaceholderTypes: [':'], + lineCommentTypes: ['#', '--'], + specialWordChars: ['@'], + }) + } + return new Formatter(this.cfg, tokenizer).format(query) + } +} diff --git a/ui/lib/utils/sqlFormatter/index.ts b/ui/lib/utils/sqlFormatter/index.ts new file mode 100644 index 0000000000..9405055f5f --- /dev/null +++ b/ui/lib/utils/sqlFormatter/index.ts @@ -0,0 +1,7 @@ +import TiDBSQLFormatter from './TiDBSQLFormatter' + +const mySqlFormatter = new TiDBSQLFormatter({ uppercase: true }) + +export default function formatSql(sql?: string): string { + return mySqlFormatter.format(sql || '') +} From c844020db0332f7f32db36ef8e14cfe333af5cd6 Mon Sep 17 00:00:00 2001 From: crazycs Date: Tue, 24 Nov 2020 17:57:10 +0800 Subject: [PATCH 19/29] *: fix query statement detail error cause by round (#806) Signed-off-by: crazycs520 --- pkg/apiserver/statement/models.go | 56 +++++++++++++++---------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/pkg/apiserver/statement/models.go b/pkg/apiserver/statement/models.go index d93e6c79e8..bee354cd31 100644 --- a/pkg/apiserver/statement/models.go +++ b/pkg/apiserver/statement/models.go @@ -42,52 +42,52 @@ type Model struct { AggSumLatency int `json:"sum_latency" agg:"SUM(sum_latency)"` AggMaxLatency int `json:"max_latency" agg:"MAX(max_latency)"` AggMinLatency int `json:"min_latency" agg:"MIN(min_latency)"` - AggAvgLatency int `json:"avg_latency" agg:"ROUND(SUM(exec_count * avg_latency) / SUM(exec_count))"` - AggAvgParseLatency int `json:"avg_parse_latency" agg:"ROUND(SUM(exec_count * avg_parse_latency) / SUM(exec_count))"` + AggAvgLatency int `json:"avg_latency" agg:"CAST(SUM(exec_count * avg_latency) / SUM(exec_count) AS SIGNED)"` + AggAvgParseLatency int `json:"avg_parse_latency" agg:"CAST(SUM(exec_count * avg_parse_latency) / SUM(exec_count) AS SIGNED)"` AggMaxParseLatency int `json:"max_parse_latency" agg:"MAX(max_parse_latency)"` - AggAvgCompileLatency int `json:"avg_compile_latency" agg:"ROUND(SUM(exec_count * avg_compile_latency) / SUM(exec_count))"` + AggAvgCompileLatency int `json:"avg_compile_latency" agg:"CAST(SUM(exec_count * avg_compile_latency) / SUM(exec_count) AS SIGNED)"` AggMaxCompileLatency int `json:"max_compile_latency" agg:"MAX(max_compile_latency)"` AggSumCopTaskNum int `json:"sum_cop_task_num" agg:"SUM(sum_cop_task_num)"` - AggAvgCopProcessTime int `json:"avg_cop_process_time" agg:"ROUND(SUM(exec_count * avg_process_time) / SUM(sum_cop_task_num))"` // avg process time per copr task - AggMaxCopProcessTime int `json:"max_cop_process_time" agg:"MAX(max_cop_process_time)"` // max process time per copr task - AggAvgCopWaitTime int `json:"avg_cop_wait_time" agg:"ROUND(SUM(exec_count * avg_wait_time) / SUM(sum_cop_task_num))"` // avg wait time per copr task - AggMaxCopWaitTime int `json:"max_cop_wait_time" agg:"MAX(max_cop_wait_time)"` // max wait time per copr task - AggAvgProcessTime int `json:"avg_process_time" agg:"ROUND(SUM(exec_count * avg_process_time) / SUM(exec_count))"` // avg total process time per sql - AggMaxProcessTime int `json:"max_process_time" agg:"MAX(max_process_time)"` // max process time per sql - AggAvgWaitTime int `json:"avg_wait_time" agg:"ROUND(SUM(exec_count * avg_wait_time) / SUM(exec_count))"` // avg total wait time per sql - AggMaxWaitTime int `json:"max_wait_time" agg:"MAX(max_wait_time)"` // max wait time per sql - AggAvgBackoffTime int `json:"avg_backoff_time" agg:"ROUND(SUM(exec_count * avg_backoff_time) / SUM(exec_count))"` // avg total back off time per sql - AggMaxBackoffTime int `json:"max_backoff_time" agg:"MAX(max_backoff_time)"` // max back off time per sql - AggAvgTotalKeys int `json:"avg_total_keys" agg:"ROUND(SUM(exec_count * avg_total_keys) / SUM(exec_count))"` + AggAvgCopProcessTime int `json:"avg_cop_process_time" agg:"CAST(SUM(exec_count * avg_process_time) / SUM(sum_cop_task_num) AS SIGNED)"` // avg process time per copr task + AggMaxCopProcessTime int `json:"max_cop_process_time" agg:"MAX(max_cop_process_time)"` // max process time per copr task + AggAvgCopWaitTime int `json:"avg_cop_wait_time" agg:"CAST(SUM(exec_count * avg_wait_time) / SUM(sum_cop_task_num) AS SIGNED)"` // avg wait time per copr task + AggMaxCopWaitTime int `json:"max_cop_wait_time" agg:"MAX(max_cop_wait_time)"` // max wait time per copr task + AggAvgProcessTime int `json:"avg_process_time" agg:"CAST(SUM(exec_count * avg_process_time) / SUM(exec_count) AS SIGNED)"` // avg total process time per sql + AggMaxProcessTime int `json:"max_process_time" agg:"MAX(max_process_time)"` // max process time per sql + AggAvgWaitTime int `json:"avg_wait_time" agg:"CAST(SUM(exec_count * avg_wait_time) / SUM(exec_count) AS SIGNED)"` // avg total wait time per sql + AggMaxWaitTime int `json:"max_wait_time" agg:"MAX(max_wait_time)"` // max wait time per sql + AggAvgBackoffTime int `json:"avg_backoff_time" agg:"CAST(SUM(exec_count * avg_backoff_time) / SUM(exec_count) AS SIGNED)"` // avg total back off time per sql + AggMaxBackoffTime int `json:"max_backoff_time" agg:"MAX(max_backoff_time)"` // max back off time per sql + AggAvgTotalKeys int `json:"avg_total_keys" agg:"CAST(SUM(exec_count * avg_total_keys) / SUM(exec_count) AS SIGNED)"` AggMaxTotalKeys int `json:"max_total_keys" agg:"MAX(max_total_keys)"` - AggAvgProcessedKeys int `json:"avg_processed_keys" agg:"ROUND(SUM(exec_count * avg_processed_keys) / SUM(exec_count))"` + AggAvgProcessedKeys int `json:"avg_processed_keys" agg:"CAST(SUM(exec_count * avg_processed_keys) / SUM(exec_count) AS SIGNED)"` AggMaxProcessedKeys int `json:"max_processed_keys" agg:"MAX(max_processed_keys)"` - AggAvgPrewriteTime int `json:"avg_prewrite_time" agg:"ROUND(SUM(exec_count * avg_prewrite_time) / SUM(exec_count))"` + AggAvgPrewriteTime int `json:"avg_prewrite_time" agg:"CAST(SUM(exec_count * avg_prewrite_time) / SUM(exec_count) AS SIGNED)"` AggMaxPrewriteTime int `json:"max_prewrite_time" agg:"MAX(max_prewrite_time)"` - AggAvgCommitTime int `json:"avg_commit_time" agg:"ROUND(SUM(exec_count * avg_commit_time) / SUM(exec_count))"` + AggAvgCommitTime int `json:"avg_commit_time" agg:"CAST(SUM(exec_count * avg_commit_time) / SUM(exec_count) AS SIGNED)"` AggMaxCommitTime int `json:"max_commit_time" agg:"MAX(max_commit_time)"` - AggAvgGetCommitTsTime int `json:"avg_get_commit_ts_time" agg:"ROUND(SUM(exec_count * avg_get_commit_ts_time) / SUM(exec_count))"` + AggAvgGetCommitTsTime int `json:"avg_get_commit_ts_time" agg:"CAST(SUM(exec_count * avg_get_commit_ts_time) / SUM(exec_count) AS SIGNED)"` AggMaxGetCommitTsTime int `json:"max_get_commit_ts_time" agg:"MAX(max_get_commit_ts_time)"` - AggAvgCommitBackoffTime int `json:"avg_commit_backoff_time" agg:"ROUND(SUM(exec_count * avg_commit_backoff_time) / SUM(exec_count))"` + AggAvgCommitBackoffTime int `json:"avg_commit_backoff_time" agg:"CAST(SUM(exec_count * avg_commit_backoff_time) / SUM(exec_count) AS SIGNED)"` AggMaxCommitBackoffTime int `json:"max_commit_backoff_time" agg:"MAX(max_commit_backoff_time)"` - AggAvgResolveLockTime int `json:"avg_resolve_lock_time" agg:"ROUND(SUM(exec_count * avg_resolve_lock_time) / SUM(exec_count))"` + AggAvgResolveLockTime int `json:"avg_resolve_lock_time" agg:"CAST(SUM(exec_count * avg_resolve_lock_time) / SUM(exec_count) AS SIGNED)"` AggMaxResolveLockTime int `json:"max_resolve_lock_time" agg:"MAX(max_resolve_lock_time)"` - AggAvgLocalLatchWaitTime int `json:"avg_local_latch_wait_time" agg:"ROUND(SUM(exec_count * avg_local_latch_wait_time) / SUM(exec_count))"` + AggAvgLocalLatchWaitTime int `json:"avg_local_latch_wait_time" agg:"CAST(SUM(exec_count * avg_local_latch_wait_time) / SUM(exec_count) AS SIGNED)"` AggMaxLocalLatchWaitTime int `json:"max_local_latch_wait_time" agg:"MAX(max_local_latch_wait_time)"` - AggAvgWriteKeys int `json:"avg_write_keys" agg:"ROUND(SUM(exec_count * avg_write_keys) / SUM(exec_count))"` + AggAvgWriteKeys int `json:"avg_write_keys" agg:"CAST(SUM(exec_count * avg_write_keys) / SUM(exec_count) AS SIGNED)"` AggMaxWriteKeys int `json:"max_write_keys" agg:"MAX(max_write_keys)"` - AggAvgWriteSize int `json:"avg_write_size" agg:"ROUND(SUM(exec_count * avg_write_size) / SUM(exec_count))"` + AggAvgWriteSize int `json:"avg_write_size" agg:"CAST(SUM(exec_count * avg_write_size) / SUM(exec_count) AS SIGNED)"` AggMaxWriteSize int `json:"max_write_size" agg:"MAX(max_write_size)"` - AggAvgPrewriteRegions int `json:"avg_prewrite_regions" agg:"ROUND(SUM(exec_count * avg_prewrite_regions) / SUM(exec_count))"` + AggAvgPrewriteRegions int `json:"avg_prewrite_regions" agg:"CAST(SUM(exec_count * avg_prewrite_regions) / SUM(exec_count) AS SIGNED)"` AggMaxPrewriteRegions int `json:"max_prewrite_regions" agg:"MAX(max_prewrite_regions)"` - AggAvgTxnRetry int `json:"avg_txn_retry" agg:"ROUND(SUM(exec_count * avg_txn_retry) / SUM(exec_count))"` + AggAvgTxnRetry int `json:"avg_txn_retry" agg:"CAST(SUM(exec_count * avg_txn_retry) / SUM(exec_count) AS SIGNED)"` AggMaxTxnRetry int `json:"max_txn_retry" agg:"MAX(max_txn_retry)"` AggSumBackoffTimes int `json:"sum_backoff_times" agg:"SUM(sum_backoff_times)"` - AggAvgMem int `json:"avg_mem" agg:"ROUND(SUM(exec_count * avg_mem) / SUM(exec_count))"` + AggAvgMem int `json:"avg_mem" agg:"CAST(SUM(exec_count * avg_mem) / SUM(exec_count) AS SIGNED)"` AggMaxMem int `json:"max_mem" agg:"MAX(max_mem)"` - AggAvgDisk int `json:"avg_disk" agg:"ROUND(SUM(exec_count * avg_disk) / SUM(exec_count))"` + AggAvgDisk int `json:"avg_disk" agg:"CAST(SUM(exec_count * avg_disk) / SUM(exec_count) AS SIGNED)"` AggMaxDisk int `json:"max_disk" agg:"MAX(max_disk)"` - AggAvgAffectedRows int `json:"avg_affected_rows" agg:"ROUND(SUM(exec_count * avg_affected_rows) / SUM(exec_count))"` + AggAvgAffectedRows int `json:"avg_affected_rows" agg:"CAST(SUM(exec_count * avg_affected_rows) / SUM(exec_count) AS SIGNED)"` AggFirstSeen int `json:"first_seen" agg:"UNIX_TIMESTAMP(MIN(first_seen))"` AggLastSeen int `json:"last_seen" agg:"UNIX_TIMESTAMP(MAX(last_seen))"` AggSampleUser string `json:"sample_user" agg:"ANY_VALUE(sample_user)"` From b2a389e45ea48d824dce3e3c1bd3790b59596be1 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Tue, 24 Nov 2020 19:49:57 +0800 Subject: [PATCH 20/29] ui: copy original content instead of formatted content for CopyLink (#802) * ui: copy original content instead of formatted content for CopyLink component * Revert "ui: copy original content instead of formatted content for CopyLink component" This reverts commit fa2a709737d7f7a0d5076afbefb084471b727556. * ui: support copy original sql and formatted sql * refine * remove tooltip --- ui/lib/apps/SlowQuery/pages/Detail/index.tsx | 18 +++++++++++++-- .../Statement/pages/Detail/PlanDetail.tsx | 18 +++++++++++++-- ui/lib/apps/Statement/pages/Detail/index.tsx | 9 +++++++- ui/lib/components/CopyLink/index.tsx | 23 +++++++++++++++---- 4 files changed, 58 insertions(+), 10 deletions(-) diff --git a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx index 8052a5957f..b0d513a54e 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx @@ -91,7 +91,14 @@ function DetailPage() { expanded={detailExpand.query} onClick={toggleQuery} /> - + + } > @@ -117,7 +124,14 @@ function DetailPage() { expanded={detailExpand.prev_query} onClick={togglePrevQuery} /> - + + } > diff --git a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx index bb80fce1b6..8a7c75e90d 100644 --- a/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx +++ b/ui/lib/apps/Statement/pages/Detail/PlanDetail.tsx @@ -96,7 +96,14 @@ function PlanDetail({ query }: IPlanDetailProps) { expanded={detailExpand.query} onClick={toggleQuery} /> - + + } > @@ -120,7 +127,14 @@ function PlanDetail({ query }: IPlanDetailProps) { expanded={detailExpand.prev_query} onClick={togglePrevQuery} /> - + + } > diff --git a/ui/lib/apps/Statement/pages/Detail/index.tsx b/ui/lib/apps/Statement/pages/Detail/index.tsx index ca91263f02..a288b467c6 100644 --- a/ui/lib/apps/Statement/pages/Detail/index.tsx +++ b/ui/lib/apps/Statement/pages/Detail/index.tsx @@ -99,7 +99,14 @@ function DetailPage() { expanded={sqlExpanded} onClick={toggleSqlExpanded} /> - + + } > diff --git a/ui/lib/components/CopyLink/index.tsx b/ui/lib/components/CopyLink/index.tsx index 7a395b51f9..1bd0e764cc 100644 --- a/ui/lib/components/CopyLink/index.tsx +++ b/ui/lib/components/CopyLink/index.tsx @@ -1,23 +1,35 @@ import React, { useState } from 'react' import { CopyToClipboard } from 'react-copy-to-clipboard' -import { addTranslationResource } from '@lib/utils/i18n' import { useTranslation } from 'react-i18next' import { useTimeoutFn } from 'react-use' import { CheckOutlined, CopyOutlined } from '@ant-design/icons' +import { addTranslationResource } from '@lib/utils/i18n' import styles from './index.module.less' +type DisplayVariant = 'default' | 'original_sql' | 'formatted_sql' +const transKeys: { [K in DisplayVariant]: string } = { + default: 'copy', + original_sql: 'copyOriginal', + formatted_sql: 'copyFormatted', +} + export interface ICopyLinkProps { data?: string + displayVariant?: DisplayVariant } const translations = { en: { - text: 'Copy', + copy: 'Copy', + copyOriginal: 'Copy Original', + copyFormatted: 'Copy Formatted', success: 'Copied', }, zh: { - text: '复制', + copy: '复制', + copyOriginal: '复制原始 SQL', + copyFormatted: '复制格式化 SQL', success: '已复制', }, } @@ -30,7 +42,7 @@ for (const key in translations) { }) } -function CopyLink({ data }: ICopyLinkProps) { +function CopyLink({ data, displayVariant = 'default' }: ICopyLinkProps) { const { t } = useTranslation() const [showCopied, setShowCopied] = useState(false) @@ -48,7 +60,8 @@ function CopyLink({ data }: ICopyLinkProps) { {!showCopied && ( - {t('component.copyLink.text')} + {t(`component.copyLink.${transKeys[displayVariant]}`)}{' '} + )} From 4d3b95a80d7d92ef2544a26d68fe7371a4650aed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=9C=C9=B4=D0=B2=CA=8F=D1=82=E1=B4=87?= Date: Tue, 24 Nov 2020 19:50:57 +0800 Subject: [PATCH 21/29] add min height of topology canvas (#804) * add min height and i18n * resolve comments * remove useless code --- .../ClusterInfo/components/StoreLocation.tsx | 7 ++++++- .../components/StoreLocationTree/index.tsx | 16 ++++++++++++---- ui/lib/apps/ClusterInfo/translations/en.yaml | 1 + ui/lib/apps/ClusterInfo/translations/zh.yaml | 1 + 4 files changed, 20 insertions(+), 5 deletions(-) diff --git a/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx b/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx index 65dec6ff16..f6710b8cc0 100644 --- a/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx +++ b/ui/lib/apps/ClusterInfo/components/StoreLocation.tsx @@ -53,7 +53,12 @@ export default function StoreLocation() {
- + document.documentElement.clientHeight - 80 - 48 * 2 // 48 = margin of cardInner + } + />
) diff --git a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx index 4fcdbefac5..c4d80205a2 100644 --- a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx +++ b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx @@ -7,9 +7,11 @@ import { QuestionCircleOutlined, } from '@ant-design/icons' import { Space, Tooltip } from 'antd' +import { useTranslation } from 'react-i18next' export interface IStoreLocationProps { dataSource: any + getMinHeight?: () => number } const margin = { left: 60, right: 40, top: 60, bottom: 100 } @@ -30,8 +32,12 @@ function calcHeight(root) { return x1 - x0 } -export default function StoreLocationTree({ dataSource }: IStoreLocationProps) { +export default function StoreLocationTree({ + dataSource, + getMinHeight, +}: IStoreLocationProps) { const divRef = useRef(null) + const { t } = useTranslation() useEffect(() => { let divWidth = divRef.current?.clientWidth || 0 @@ -129,11 +135,13 @@ export default function StoreLocationTree({ dataSource }: IStoreLocationProps) { root.y0 = root.y } + const contentHeight = boundHeight + margin.top + margin.bottom + const transition = svg .transition() .duration(duration) .attr('width', divWidth) - .attr('height', boundHeight + margin.top + margin.bottom) + .attr('height', Math.max(getMinHeight?.() || 0, contentHeight)) // update the nodes const node = gNode.selectAll('g').data(nodes, (d: any) => d.id) @@ -236,7 +244,7 @@ export default function StoreLocationTree({ dataSource }: IStoreLocationProps) { return () => { window.removeEventListener('resize', resizeHandler) } - }, [dataSource]) + }, [dataSource, getMinHeight]) return (
@@ -250,7 +258,7 @@ export default function StoreLocationTree({ dataSource }: IStoreLocationProps) { - + diff --git a/ui/lib/apps/ClusterInfo/translations/en.yaml b/ui/lib/apps/ClusterInfo/translations/en.yaml index 74009096f7..fbd363c8ad 100644 --- a/ui/lib/apps/ClusterInfo/translations/en.yaml +++ b/ui/lib/apps/ClusterInfo/translations/en.yaml @@ -28,5 +28,6 @@ cluster_info: instanceUnavailable: Host information is unknow due to instance unreachable store_topology: title: Store Topology + tooltip: You can also zoom in or out by pressing CTRL and scrolling mouse error: load: 'Load component {{comp}} error: {{cause}}' diff --git a/ui/lib/apps/ClusterInfo/translations/zh.yaml b/ui/lib/apps/ClusterInfo/translations/zh.yaml index 84febfa668..e3e916b646 100644 --- a/ui/lib/apps/ClusterInfo/translations/zh.yaml +++ b/ui/lib/apps/ClusterInfo/translations/zh.yaml @@ -28,5 +28,6 @@ cluster_info: instanceUnavailable: 获取该主机信息失败:无法访问实例 store_topology: title: 存储拓扑 + tooltip: 按住 Ctrl 键并滑动鼠标滚轮可以缩放 error: load: '加载组件 {{comp}} 失败: {{cause}}' From 29820e09b75e7f2e3ee53afdda216e93aac7d25a Mon Sep 17 00:00:00 2001 From: Wenxuan Date: Wed, 25 Nov 2020 15:34:22 +0800 Subject: [PATCH 22/29] metrics: Support customize Prometheus address (#808) --- CONTRIBUTING.md | 104 +--- go.mod | 1 + go.sum | 1 + pkg/apiserver/apiserver.go | 24 +- pkg/apiserver/clusterinfo/service.go | 2 +- pkg/apiserver/configuration/router.go | 2 +- pkg/apiserver/diagnose/diagnose.go | 2 +- pkg/apiserver/info/info.go | 2 +- pkg/apiserver/logsearch/service.go | 2 +- pkg/apiserver/metrics/metrics.go | 131 ----- pkg/apiserver/metrics/prom_resolve.go | 188 ++++++++ pkg/apiserver/metrics/router.go | 164 +++++++ pkg/apiserver/metrics/service.go | 67 +++ pkg/apiserver/profiling/router.go | 2 +- pkg/apiserver/queryeditor/service.go | 2 +- pkg/apiserver/slowquery/service.go | 2 +- pkg/apiserver/statement/service.go | 2 +- pkg/apiserver/user/auth.go | 2 +- pkg/keyvisual/service.go | 2 +- ui/dashboardApp/layout/main/Sider/index.tsx | 2 +- ui/dashboardApp/layout/signin/index.tsx | 5 +- .../SearchLogs/components/SearchHeader.tsx | 9 +- .../SearchLogs/components/SearchResult.tsx | 2 +- ui/lib/apps/UserProfile/index.tsx | 307 ++++++++---- ui/lib/apps/UserProfile/translations/en.yaml | 12 + ui/lib/apps/UserProfile/translations/zh.yaml | 12 + ui/lib/components/AnimatedSkeleton/index.tsx | 4 +- ui/lib/components/Blink/index.module.less | 17 + ui/lib/components/Blink/index.tsx | 29 ++ .../components/InstanceSelect/DropOverlay.tsx | 18 +- .../InstanceSelect/TableWithFilter.tsx | 14 +- ui/lib/components/InstanceSelect/index.tsx | 5 +- ui/lib/components/MetricChart/index.tsx | 53 ++- ui/lib/components/MultiSelect/DropOverlay.tsx | 6 +- ui/lib/components/index.ts | 2 + ui/lib/utils/useQueryParams.ts | 2 + ui/lib/utils/wdyr.ts | 5 +- ui/package.json | 1 + ui/tests/e2e/_config.ts | 4 + ui/tests/e2e/_preset.js | 4 + ui/tests/e2e/_setup.js | 1 + ui/tests/e2e/login.test.ts | 50 -- ui/tests/e2e/search_log.test.ts | 105 +--- ui/tests/e2e/sign_in.test.ts | 28 ++ ui/tests/e2e/test_config.ts | 12 - ui/tests/e2e/utils/sign_in.ts | 10 + ui/tests/jest-puppeteer.config.js | 5 + ui/tests/jest.config.js | 4 + ui/tests/jestconfig.json | 6 - ui/tests/package.json | 4 +- ui/tests/yarn.lock | 450 +++++++++++++++++- ui/yarn.lock | 7 + 52 files changed, 1402 insertions(+), 495 deletions(-) delete mode 100644 pkg/apiserver/metrics/metrics.go create mode 100644 pkg/apiserver/metrics/prom_resolve.go create mode 100644 pkg/apiserver/metrics/router.go create mode 100644 pkg/apiserver/metrics/service.go create mode 100644 ui/lib/components/Blink/index.module.less create mode 100644 ui/lib/components/Blink/index.tsx create mode 100644 ui/tests/e2e/_config.ts create mode 100644 ui/tests/e2e/_preset.js create mode 100644 ui/tests/e2e/_setup.js delete mode 100644 ui/tests/e2e/login.test.ts create mode 100644 ui/tests/e2e/sign_in.test.ts delete mode 100644 ui/tests/e2e/test_config.ts create mode 100644 ui/tests/e2e/utils/sign_in.ts create mode 100644 ui/tests/jest-puppeteer.config.js create mode 100644 ui/tests/jest.config.js delete mode 100644 ui/tests/jestconfig.json diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 97d028dc77..34f20d3806 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,8 +14,6 @@ Although TiDB Dashboard can also be integrated into [PD], this form is not conve ### Step 1. Start a TiDB cluster -#### Solution A. Use TiUP (Recommended) - [TiUP] is the offical component manager for [TiDB]. It can help you set up a local TiDB cluster in a few minutes. Download and install TiUP: @@ -40,73 +38,7 @@ Start a local TiDB cluster: tiup playground nightly ``` -> Note: you might notice that there is already a TiDB Dashboard integrated into the PD started by TiUP. For development purpose, we will not use the that TiDB Dashboard. Please keep following the rest of the steps in this document. - -#### Solution B. Download and Run Binary Manually - -
- -Alternatively, you can deploy a cluster with binary files manually. - -1. Download binaries - - Linux: - - ```bash - mkdir tidb_cluster - cd tidb_cluster - wget https://download.pingcap.org/tidb-nightly-linux-amd64.tar.gz - tar -xzf tidb-nightly-linux-amd64.tar.gz - cd tidb-nightly-linux-amd64 - ``` - - MacOS: - - ```bash - mkdir tidb_cluster - cd tidb_cluster - wget https://download.pingcap.org/tidb-nightly-darwin-amd64.tar.gz - wget https://download.pingcap.org/tikv-nightly-darwin-amd64.tar.gz - wget https://download.pingcap.org/pd-nightly-darwin-amd64.tar.gz - mkdir tidb-nightly-darwin-amd64 - tar -xzf tidb-nightly-darwin-amd64.tar.gz -C tidb-nightly-darwin-amd64 --strip-components=1 - tar -xzf tikv-nightly-darwin-amd64.tar.gz -C tidb-nightly-darwin-amd64 --strip-components=1 - tar -xzf pd-nightly-darwin-amd64.tar.gz -C tidb-nightly-darwin-amd64 --strip-components=1 - cd tidb-nightly-darwin-amd64 - ``` - -2. Start a PD server - - ```bash - ./bin/pd-server --name=pd --data-dir=pd --client-urls=http://127.0.0.1:2379 --log-file=pd.log - # Now pd-server is listen on port 2379 - ``` - -3. Start a TiKV server - - Open a new terminal: - - ```bash - ./bin/tikv-server --addr="127.0.0.1:20160" --pd-endpoints="127.0.0.1:2379" --data-dir=tikv --log-file=./tikv.log - # Now tikv-server is listen on port 20160 - ``` - -4. Start a TiDB server - - Open a new terminal: - - ```bash - ./bin/tidb-server --store=tikv --path="127.0.0.1:2379" --log-file=tidb.log - # Now tidb-server is listen on port 4000 - ``` - -5. Use mysql-client to check everything works fine: - - ```bash - mysql -h 127.0.0.1 -P 4000 -uroot - ``` - -
+You might notice that there is already a TiDB Dashboard integrated into the PD started by TiUP. For development purpose, it will not be used intentionally. ### Step 2. Prepare Prerequisites @@ -124,7 +56,7 @@ The followings are required for developing TiDB Dashboard: 1. Clone the repository: ```bash - git clone https://github.com/pingcap-incubator/tidb-dashboard.git + git clone https://github.com/pingcap/tidb-dashboard.git cd tidb-dashboard ``` @@ -144,17 +76,11 @@ The followings are required for developing TiDB Dashboard: yarn start ``` -1. That's it! You can access TiDB Dashboard now: - - TiDB Dashboard UI: http://127.0.0.1:3001 - - Swagger UI for TiDB Dashboard APIs: http://localhost:12333/dashboard/api/swagger +1. That's it! You can access TiDB Dashboard now: http://127.0.0.1:3001 ### Step 4. Run E2E Tests (optional) -Now we have only a few e2e tests in the `ui/tests` folder, you can contribute more for it. - -After finishing the above steps, we can run the tests by following commands: +When back-end server and front-end server are both started, E2E tests can be run by: ```bash cd ui/tests @@ -162,16 +88,30 @@ yarn yarn test ``` -### Step 5. Run Storybook Playground (optional) +> Now we have only a few e2e tests. Contributions are welcome! + +## Additional Guides + +### Swagger UI + +We use [Swagger] to generate the API server and corresponding clients. Swagger provides a web UI in which you can +see all TiDB Dashboard API endpoints and specifications, or even send API requests. + +Swagger UI is available at http://localhost:12333/dashboard/api/swagger after the above Step 3 is finished. + +### Storybook + +We expose some UI components in a playground provided by [React Storybook]. In the playground you can see what +components look like and how to use them. -After finishing the above steps, we can run the storybook playground by following commands: +Storybook can be started using the following commands: ```bash cd ui yarn storybook ``` -You can add more stories for your components to the playground. +> We have not yet make all components available in the Storybook. Contributions are welcome! ## Contribution flow @@ -256,3 +196,5 @@ The body of the commit message should describe why the change was made and at a [tidb]: https://github.com/pingcap/tidb [tikv]: https://github.com/tikv/tikv [tiup]: https://tiup.io +[Swagger]: https://swagger.io +[React Storybook]: https://storybook.js.org \ No newline at end of file diff --git a/go.mod b/go.mod index 84184fdcbc..1fad317dda 100644 --- a/go.mod +++ b/go.mod @@ -43,6 +43,7 @@ require ( go.uber.org/zap v1.13.0 golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f // indirect golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2 // indirect + golang.org/x/sync v0.0.0-20190423024810-112230192c58 google.golang.org/grpc v1.25.1 gopkg.in/oleiade/reflections.v1 v1.0.0 ) diff --git a/go.sum b/go.sum index 36b1c7ab53..647834bb0d 100644 --- a/go.sum +++ b/go.sum @@ -424,6 +424,7 @@ golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAG golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= diff --git a/pkg/apiserver/apiserver.go b/pkg/apiserver/apiserver.go index 3b23d5e37f..b519f5f33c 100644 --- a/pkg/apiserver/apiserver.go +++ b/pkg/apiserver/apiserver.go @@ -126,18 +126,18 @@ func (s *Service) Start(ctx context.Context) error { ), fx.Populate(&s.apiHandlerEngine), fx.Invoke( - user.Register, - info.Register, - clusterinfo.Register, - profiling.Register, - logsearch.Register, - slowquery.Register, - statement.Register, - diagnose.Register, - keyvisual.Register, - metrics.Register, - queryeditor.Register, - configuration.Register, + user.RegisterRouter, + info.RegisterRouter, + clusterinfo.RegisterRouter, + profiling.RegisterRouter, + logsearch.RegisterRouter, + slowquery.RegisterRouter, + statement.RegisterRouter, + diagnose.RegisterRouter, + keyvisual.RegisterRouter, + metrics.RegisterRouter, + queryeditor.RegisterRouter, + configuration.RegisterRouter, // Must be at the end s.status.Register, ), diff --git a/pkg/apiserver/clusterinfo/service.go b/pkg/apiserver/clusterinfo/service.go index 6b9eb70591..312a479eff 100644 --- a/pkg/apiserver/clusterinfo/service.go +++ b/pkg/apiserver/clusterinfo/service.go @@ -60,7 +60,7 @@ func NewService(lc fx.Lifecycle, p ServiceParams) *Service { return s } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/topology") endpoint.Use(auth.MWAuthRequired()) endpoint.GET("/tidb", s.getTiDBTopology) diff --git a/pkg/apiserver/configuration/router.go b/pkg/apiserver/configuration/router.go index 00bf34670c..380586ec7e 100644 --- a/pkg/apiserver/configuration/router.go +++ b/pkg/apiserver/configuration/router.go @@ -22,7 +22,7 @@ import ( "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/utils" ) -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/configuration") endpoint.Use(auth.MWAuthRequired()) endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) diff --git a/pkg/apiserver/diagnose/diagnose.go b/pkg/apiserver/diagnose/diagnose.go index 57cab1fd14..241b05be79 100644 --- a/pkg/apiserver/diagnose/diagnose.go +++ b/pkg/apiserver/diagnose/diagnose.go @@ -65,7 +65,7 @@ func NewService(config *config.Config, tidbClient *tidb.Client, db *dbstore.DB, } } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/diagnose") endpoint.GET("/reports", auth.MWAuthRequired(), diff --git a/pkg/apiserver/info/info.go b/pkg/apiserver/info/info.go index 45b405210b..c298ca256c 100644 --- a/pkg/apiserver/info/info.go +++ b/pkg/apiserver/info/info.go @@ -44,7 +44,7 @@ func NewService(p ServiceParams) *Service { return &Service{params: p} } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/info") endpoint.GET("/info", s.infoHandler) endpoint.Use(auth.MWAuthRequired()) diff --git a/pkg/apiserver/logsearch/service.go b/pkg/apiserver/logsearch/service.go index ea48b169ee..d73c113531 100644 --- a/pkg/apiserver/logsearch/service.go +++ b/pkg/apiserver/logsearch/service.go @@ -72,7 +72,7 @@ func NewService(lc fx.Lifecycle, config *config.Config, db *dbstore.DB) *Service return service } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/logs") { endpoint.GET("/download", s.DownloadLogs) diff --git a/pkg/apiserver/metrics/metrics.go b/pkg/apiserver/metrics/metrics.go deleted file mode 100644 index d41c9da3f1..0000000000 --- a/pkg/apiserver/metrics/metrics.go +++ /dev/null @@ -1,131 +0,0 @@ -package metrics - -import ( - "context" - "fmt" - "io/ioutil" - "net/http" - "net/url" - "strconv" - "time" - - "github.com/gin-gonic/gin" - "github.com/joomcode/errorx" - "go.etcd.io/etcd/clientv3" - "go.uber.org/fx" - - "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/user" - "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/utils" - "github.com/pingcap-incubator/tidb-dashboard/pkg/httpc" - "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/topology" -) - -var ( - ErrNS = errorx.NewNamespace("error.api.metrics") - ErrPrometheusNotFound = ErrNS.NewType("prometheus_not_found") - ErrPrometheusQueryFailed = ErrNS.NewType("prometheus_query_failed") -) - -const ( - defaultPromQueryTimeout = time.Second * 30 -) - -type ServiceParams struct { - fx.In - HTTPClient *httpc.Client - EtcdClient *clientv3.Client -} - -type Service struct { - params ServiceParams - lifecycleCtx context.Context -} - -func NewService(lc fx.Lifecycle, p ServiceParams) *Service { - s := &Service{params: p} - - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - s.lifecycleCtx = ctx - return nil - }, - }) - - return s -} - -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { - endpoint := r.Group("/metrics") - endpoint.Use(auth.MWAuthRequired()) - endpoint.GET("/query", s.queryHandler) -} - -type QueryRequest struct { - StartTimeSec int `json:"start_time_sec" form:"start_time_sec"` - EndTimeSec int `json:"end_time_sec" form:"end_time_sec"` - StepSec int `json:"step_sec" form:"step_sec"` - Query string `json:"query" form:"query"` -} - -type QueryResponse struct { - Status string `json:"status"` - Data map[string]interface{} `json:"data"` -} - -// @Summary Query metrics -// @Description Query metrics in the given range -// @Param q query QueryRequest true "Query" -// @Success 200 {object} QueryResponse -// @Failure 401 {object} utils.APIError "Unauthorized failure" -// @Security JwtAuth -// @Router /metrics/query [get] -func (s *Service) queryHandler(c *gin.Context) { - var req QueryRequest - if err := c.ShouldBindQuery(&req); err != nil { - utils.MakeInvalidRequestErrorFromError(c, err) - return - } - - pi, err := topology.FetchPrometheusTopology(s.lifecycleCtx, s.params.EtcdClient) - if err != nil { - _ = c.Error(err) - return - } - if pi == nil { - _ = c.Error(ErrPrometheusNotFound.NewWithNoMessage()) - return - } - - params := url.Values{} - params.Add("query", req.Query) - params.Add("start", strconv.Itoa(req.StartTimeSec)) - params.Add("end", strconv.Itoa(req.EndTimeSec)) - params.Add("step", strconv.Itoa(req.StepSec)) - - uri := fmt.Sprintf("http://%s:%d/api/v1/query_range?%s", pi.IP, pi.Port, params.Encode()) - promReq, err := http.NewRequestWithContext(s.lifecycleCtx, http.MethodGet, uri, nil) - if err != nil { - _ = c.Error(ErrPrometheusQueryFailed.Wrap(err, "failed to build Prometheus request")) - return - } - - promResp, err := s.params.HTTPClient.WithTimeout(defaultPromQueryTimeout).Do(promReq) - if err != nil { - _ = c.Error(ErrPrometheusQueryFailed.Wrap(err, "failed to send requests to Prometheus")) - return - } - - defer promResp.Body.Close() - if promResp.StatusCode != http.StatusOK { - _ = c.Error(ErrPrometheusQueryFailed.New("failed to query Prometheus")) - return - } - - body, err := ioutil.ReadAll(promResp.Body) - if err != nil { - _ = c.Error(ErrPrometheusQueryFailed.Wrap(err, "failed to read Prometheus query result")) - return - } - - c.Data(promResp.StatusCode, promResp.Header.Get("content-type"), body) -} diff --git a/pkg/apiserver/metrics/prom_resolve.go b/pkg/apiserver/metrics/prom_resolve.go new file mode 100644 index 0000000000..5c65b1762b --- /dev/null +++ b/pkg/apiserver/metrics/prom_resolve.go @@ -0,0 +1,188 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package metrics + +import ( + "bytes" + "encoding/json" + "fmt" + "net/url" + "strings" + "time" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/topology" +) + +const ( + promCacheTTL = time.Second * 5 +) + +type promAddressCacheEntity struct { + address string + cacheAt time.Time +} + +type pdServerConfig struct { + MetricStorage string `json:"metric-storage"` +} + +type pdConfig struct { + PdServer pdServerConfig `json:"pd-server"` +} + +// Check and normalize a Prometheus address supplied by user. +func normalizeCustomizedPromAddress(addr string) (string, error) { + if !strings.HasPrefix(addr, "http://") && !strings.HasPrefix(addr, "https://") { + addr = "http://" + addr + } + u, err := url.Parse(addr) + if err != nil { + return "", fmt.Errorf("invalid Prometheus address format: %v", err) + } + if len(u.Host) == 0 || len(u.Scheme) == 0 { + return "", fmt.Errorf("invalid Prometheus address format") + } + // Normalize the address, remove unnecessary parts. + addr = fmt.Sprintf("%s://%s", u.Scheme, u.Host) + return addr, nil +} + +// Resolve the customized Prometheus address in PD config. If it is not configured, empty address will be returned. +// The returned address must be valid. If an invalid Prometheus address is configured, errors will be returned. +func (s *Service) resolveCustomizedPromAddress(acceptInvalidAddr bool) (string, error) { + // Lookup "metric-storage" cluster config in PD. + data, err := s.params.PDClient.SendGetRequest("/config") + if err != nil { + return "", err + } + var config pdConfig + if err := json.Unmarshal(data, &config); err != nil { + return "", err + } + addr := config.PdServer.MetricStorage + if len(addr) > 0 { + if acceptInvalidAddr { + return addr, nil + } + // Verify whether address is valid. If not valid, throw error. + addr, err = normalizeCustomizedPromAddress(addr) + if err != nil { + return "", err + } + return addr, nil + } + return "", nil +} + +// Resolve the Prometheus address recorded by deployment tools in the `/topology` etcd namespace. +// If the address is not recorded (for example, when Prometheus is not deployed), empty address will be returned. +func (s *Service) resolveDeployedPromAddress() (string, error) { + pi, err := topology.FetchPrometheusTopology(s.lifecycleCtx, s.params.EtcdClient) + if err != nil { + return "", err + } + if pi == nil { + return "", nil + } + return fmt.Sprintf("http://%s:%d", pi.IP, pi.Port), nil +} + +// Resolve the final Prometheus address. When user has customized an address, this address is returned. Otherwise, +// address recorded by deployment tools will be returned. +// If neither custom address nor deployed address is available, empty address will be returned. +func (s *Service) resolveFinalPromAddress() (string, error) { + addr, err := s.resolveCustomizedPromAddress(false) + if err != nil { + return "", err + } + if addr != "" { + return addr, nil + } + addr, err = s.resolveDeployedPromAddress() + if err != nil { + return "", err + } + if addr != "" { + return addr, nil + } + return "", nil +} + +// Get the final Prometheus address from cache. If cache item is not valid, the address will be resolved from PD +// or etcd and then the cache will be updated. +func (s *Service) getPromAddressFromCache() (string, error) { + fn := func() (string, error) { + // Check whether cache is valid, and use the cache if possible. + if v := s.promAddressCache.Load(); v != nil { + entity := v.(*promAddressCacheEntity) + if entity.cacheAt.Add(promCacheTTL).After(time.Now()) { + return entity.address, nil + } + } + + // Cache is not valid, read from PD and etcd. + addr, err := s.resolveFinalPromAddress() + + if err != nil { + return "", err + } + + s.promAddressCache.Store(&promAddressCacheEntity{ + address: addr, + cacheAt: time.Now(), + }) + + return addr, nil + } + + resolveResult, err, _ := s.promRequestGroup.Do("any_key", func() (interface{}, error) { + return fn() + }) + if err != nil { + return "", err + } + return resolveResult.(string), nil +} + +// Set the customized Prometheus address. Address can be empty or a valid address like `http://host:port`. +// If address is set to empty, address from deployment tools will be used later. +func (s *Service) setCustomPromAddress(addr string) (string, error) { + var err error + if len(addr) > 0 { + addr, err = normalizeCustomizedPromAddress(addr) + if err != nil { + return "", err + } + } + + body := make(map[string]interface{}) + body["metric-storage"] = addr + bodyJSON, err := json.Marshal(&body) + if err != nil { + return "", err + } + + _, err = s.params.PDClient.SendPostRequest("/config", bytes.NewBuffer(bodyJSON)) + if err != nil { + return "", err + } + + // Invalidate cache immediately. + s.promAddressCache.Value.Store(&promAddressCacheEntity{ + address: addr, + cacheAt: time.Time{}, + }) + + return addr, nil +} diff --git a/pkg/apiserver/metrics/router.go b/pkg/apiserver/metrics/router.go new file mode 100644 index 0000000000..59a33611f3 --- /dev/null +++ b/pkg/apiserver/metrics/router.go @@ -0,0 +1,164 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package metrics + +import ( + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strconv" + + "github.com/gin-gonic/gin" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/user" + "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/utils" +) + +type QueryRequest struct { + StartTimeSec int `json:"start_time_sec" form:"start_time_sec"` + EndTimeSec int `json:"end_time_sec" form:"end_time_sec"` + StepSec int `json:"step_sec" form:"step_sec"` + Query string `json:"query" form:"query"` +} + +type QueryResponse struct { + Status string `json:"status"` + Data map[string]interface{} `json:"data"` +} + +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { + endpoint := r.Group("/metrics") + endpoint.Use(auth.MWAuthRequired()) + endpoint.GET("/query", s.queryMetrics) + endpoint.GET("/prom_address", s.getPromAddressConfig) + endpoint.PUT("/prom_address", s.putCustomPromAddress) +} + +// @Summary Query metrics +// @Description Query metrics in the given range +// @Param q query QueryRequest true "Query" +// @Success 200 {object} QueryResponse +// @Failure 401 {object} utils.APIError "Unauthorized failure" +// @Security JwtAuth +// @Router /metrics/query [get] +func (s *Service) queryMetrics(c *gin.Context) { + var req QueryRequest + if err := c.ShouldBindQuery(&req); err != nil { + utils.MakeInvalidRequestErrorFromError(c, err) + return + } + + addr, err := s.getPromAddressFromCache() + if err != nil { + _ = c.Error(ErrLoadPrometheusAddressFailed.Wrap(err, "Load prometheus address failed")) + return + } + if addr == "" { + _ = c.Error(ErrPrometheusNotFound.New("Prometheus is not deployed in the cluster")) + return + } + + params := url.Values{} + params.Add("query", req.Query) + params.Add("start", strconv.Itoa(req.StartTimeSec)) + params.Add("end", strconv.Itoa(req.EndTimeSec)) + params.Add("step", strconv.Itoa(req.StepSec)) + + uri := fmt.Sprintf("%s/api/v1/query_range?%s", addr, params.Encode()) + promReq, err := http.NewRequestWithContext(s.lifecycleCtx, http.MethodGet, uri, nil) + if err != nil { + _ = c.Error(ErrPrometheusQueryFailed.Wrap(err, "failed to build Prometheus request")) + return + } + + promResp, err := s.params.HTTPClient.WithTimeout(defaultPromQueryTimeout).Do(promReq) + if err != nil { + _ = c.Error(ErrPrometheusQueryFailed.Wrap(err, "failed to send requests to Prometheus")) + return + } + + defer promResp.Body.Close() + if promResp.StatusCode != http.StatusOK { + _ = c.Error(ErrPrometheusQueryFailed.New("failed to query Prometheus")) + return + } + + body, err := ioutil.ReadAll(promResp.Body) + if err != nil { + _ = c.Error(ErrPrometheusQueryFailed.Wrap(err, "failed to read Prometheus query result")) + return + } + + c.Data(promResp.StatusCode, promResp.Header.Get("content-type"), body) +} + +type GetPromAddressConfigResponse struct { + CustomizedAddr string `json:"customized_addr"` + DeployedAddr string `json:"deployed_addr"` +} + +// @ID metricsGetPromAddress +// @Summary Get the Prometheus address cluster config +// @Success 200 {object} GetPromAddressConfigResponse +// @Failure 401 {object} utils.APIError "Unauthorized failure" +// @Security JwtAuth +// @Router /metrics/prom_address [get] +func (s *Service) getPromAddressConfig(c *gin.Context) { + cAddr, err := s.resolveCustomizedPromAddress(true) + if err != nil { + _ = c.Error(err) + return + } + dAddr, err := s.resolveDeployedPromAddress() + if err != nil { + _ = c.Error(err) + return + } + c.JSON(http.StatusOK, GetPromAddressConfigResponse{ + CustomizedAddr: cAddr, + DeployedAddr: dAddr, + }) +} + +type PutCustomPromAddressRequest struct { + Addr string `json:"address"` +} + +type PutCustomPromAddressResponse struct { + NormalizedAddr string `json:"normalized_address"` +} + +// @ID metricsSetCustomPromAddress +// @Summary Set or clear the customized Prometheus address +// @Param request body PutCustomPromAddressRequest true "Request body" +// @Success 200 {object} PutCustomPromAddressResponse +// @Failure 401 {object} utils.APIError "Unauthorized failure" +// @Security JwtAuth +// @Router /metrics/prom_address [put] +func (s *Service) putCustomPromAddress(c *gin.Context) { + var req PutCustomPromAddressRequest + if err := c.ShouldBindJSON(&req); err != nil { + utils.MakeInvalidRequestErrorFromError(c, err) + return + } + addr, err := s.setCustomPromAddress(req.Addr) + if err != nil { + _ = c.Error(err) + return + } + c.JSON(http.StatusOK, PutCustomPromAddressResponse{ + NormalizedAddr: addr, + }) +} diff --git a/pkg/apiserver/metrics/service.go b/pkg/apiserver/metrics/service.go new file mode 100644 index 0000000000..91bedfd578 --- /dev/null +++ b/pkg/apiserver/metrics/service.go @@ -0,0 +1,67 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package metrics + +import ( + "context" + "time" + + "github.com/joomcode/errorx" + "go.etcd.io/etcd/clientv3" + "go.uber.org/atomic" + "go.uber.org/fx" + "golang.org/x/sync/singleflight" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/httpc" + "github.com/pingcap-incubator/tidb-dashboard/pkg/pd" +) + +var ( + ErrNS = errorx.NewNamespace("error.api.metrics") + ErrLoadPrometheusAddressFailed = ErrNS.NewType("load_prom_address_failed") + ErrPrometheusNotFound = ErrNS.NewType("prom_not_found") + ErrPrometheusQueryFailed = ErrNS.NewType("prom_query_failed") +) + +const ( + defaultPromQueryTimeout = time.Second * 30 +) + +type ServiceParams struct { + fx.In + HTTPClient *httpc.Client + EtcdClient *clientv3.Client + PDClient *pd.Client +} + +type Service struct { + params ServiceParams + lifecycleCtx context.Context + + promRequestGroup singleflight.Group + promAddressCache atomic.Value +} + +func NewService(lc fx.Lifecycle, p ServiceParams) *Service { + s := &Service{params: p} + + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + s.lifecycleCtx = ctx + return nil + }, + }) + + return s +} diff --git a/pkg/apiserver/profiling/router.go b/pkg/apiserver/profiling/router.go index 97bb1db488..a09eca5b1c 100644 --- a/pkg/apiserver/profiling/router.go +++ b/pkg/apiserver/profiling/router.go @@ -30,7 +30,7 @@ import ( ) // Register register the handlers to the service. -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/profiling") endpoint.GET("/group/list", auth.MWAuthRequired(), s.getGroupList) endpoint.POST("/group/start", auth.MWAuthRequired(), s.handleStartGroup) diff --git a/pkg/apiserver/queryeditor/service.go b/pkg/apiserver/queryeditor/service.go index 6c3e3c6db0..8665fb9a09 100644 --- a/pkg/apiserver/queryeditor/service.go +++ b/pkg/apiserver/queryeditor/service.go @@ -53,7 +53,7 @@ func NewService(lc fx.Lifecycle, p ServiceParams) *Service { return service } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/query_editor") endpoint.Use(auth.MWAuthRequired()) endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) diff --git a/pkg/apiserver/slowquery/service.go b/pkg/apiserver/slowquery/service.go index 6df8d5d6c2..624d57de9d 100644 --- a/pkg/apiserver/slowquery/service.go +++ b/pkg/apiserver/slowquery/service.go @@ -47,7 +47,7 @@ func NewService(p ServiceParams) *Service { return &Service{params: p} } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/slow_query") { endpoint.GET("/download", s.downloadHandler) diff --git a/pkg/apiserver/statement/service.go b/pkg/apiserver/statement/service.go index 7b25b3b16e..fce356bc85 100644 --- a/pkg/apiserver/statement/service.go +++ b/pkg/apiserver/statement/service.go @@ -48,7 +48,7 @@ func NewService(p ServiceParams) *Service { return &Service{params: p} } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/statements") { endpoint.GET("/download", s.downloadHandler) diff --git a/pkg/apiserver/user/auth.go b/pkg/apiserver/user/auth.go index 64ca155d3f..39bf30757b 100644 --- a/pkg/apiserver/user/auth.go +++ b/pkg/apiserver/user/auth.go @@ -245,7 +245,7 @@ func (s *AuthService) authSharingCodeForm(f *authenticateForm) (*utils.SessionUs return session, nil } -func Register(r *gin.RouterGroup, s *AuthService) { +func RegisterRouter(r *gin.RouterGroup, s *AuthService) { endpoint := r.Group("/user") endpoint.POST("/login", s.loginHandler) endpoint.POST("/share", s.MWAuthRequired(), s.shareSessionHandler) diff --git a/pkg/keyvisual/service.go b/pkg/keyvisual/service.go index ca8005bda0..aeb4e4fb53 100644 --- a/pkg/keyvisual/service.go +++ b/pkg/keyvisual/service.go @@ -113,7 +113,7 @@ func NewService( return s } -func Register(r *gin.RouterGroup, auth *user.AuthService, s *Service) { +func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint := r.Group("/keyvisual") endpoint.Use(auth.MWAuthRequired()) diff --git a/ui/dashboardApp/layout/main/Sider/index.tsx b/ui/dashboardApp/layout/main/Sider/index.tsx index 68fe971363..33b1f2bcb2 100644 --- a/ui/dashboardApp/layout/main/Sider/index.tsx +++ b/ui/dashboardApp/layout/main/Sider/index.tsx @@ -89,9 +89,9 @@ function Sider({ const menuItems = [ useAppMenuItem(registry, 'overview'), useAppMenuItem(registry, 'cluster_info'), - useAppMenuItem(registry, 'keyviz'), useAppMenuItem(registry, 'statement'), useAppMenuItem(registry, 'slow_query'), + useAppMenuItem(registry, 'keyviz'), useAppMenuItem(registry, 'diagnose'), useAppMenuItem(registry, 'search_logs'), debugSubMenu, diff --git a/ui/dashboardApp/layout/signin/index.tsx b/ui/dashboardApp/layout/signin/index.tsx index 9e291de79d..c4bfe6208c 100644 --- a/ui/dashboardApp/layout/signin/index.tsx +++ b/ui/dashboardApp/layout/signin/index.tsx @@ -215,7 +215,7 @@ function TiDBSignInForm({ successRoute, onClickAlternative }) { } disabled /> + + + + )} + + + ) +} + function App() { const { t, i18n } = useTranslation() @@ -198,87 +335,95 @@ function App() { return ( - - - - - - - -
- - - -
-
- - - {error && } - {info && ( - - - - - - } - > - {info.version?.internal_version} - - - - - - } - > - {info.version?.build_git_hash} - - - } - > - {info.version?.build_time} - - - } - > - {info.version?.standalone} - - - - - - } + + + + + + + + + + + +
+ + + +
+
+ + + {error && } + {info && ( + + + + + + } + > + {info.version?.internal_version} + + + + + + } + > + {info.version?.build_git_hash} + + + } + > + {info.version?.build_time} + + + } + > + {info.version?.standalone} + + + + + + } + > + {info.version?.pd_version} + + + )} + + +
) } diff --git a/ui/lib/apps/UserProfile/translations/en.yaml b/ui/lib/apps/UserProfile/translations/en.yaml index 4f764c0592..efb6bd94ab 100644 --- a/ui/lib/apps/UserProfile/translations/en.yaml +++ b/ui/lib/apps/UserProfile/translations/en.yaml @@ -1,4 +1,16 @@ user_profile: + service_endpoints: + title: Service Endpoints + prometheus: + title: Prometheus Data Source + form: + deployed: Use deployed address + not_deployed: Prometheus is not deployed + custom: Use customized address + update: Update + cancel: Cancel + custom_form: + address: Customize Prometheus Address i18n: title: Language & Localization language: Language diff --git a/ui/lib/apps/UserProfile/translations/zh.yaml b/ui/lib/apps/UserProfile/translations/zh.yaml index 2a7687dcba..951f967c97 100644 --- a/ui/lib/apps/UserProfile/translations/zh.yaml +++ b/ui/lib/apps/UserProfile/translations/zh.yaml @@ -1,4 +1,16 @@ user_profile: + service_endpoints: + title: 服务端点 + prometheus: + title: Prometheus 数据源 + form: + deployed: 使用已部署的组件地址 + not_deployed: 未部署 Prometheus 组件 + custom: 使用自定义地址 + update: 更新 + cancel: 取消 + custom_form: + address: 自定义 Prometheus 数据源地址 i18n: title: 语言和本地化 language: 语言 diff --git a/ui/lib/components/AnimatedSkeleton/index.tsx b/ui/lib/components/AnimatedSkeleton/index.tsx index bb812dccf9..6b1b990c94 100644 --- a/ui/lib/components/AnimatedSkeleton/index.tsx +++ b/ui/lib/components/AnimatedSkeleton/index.tsx @@ -9,11 +9,13 @@ import styles from './index.module.less' export interface IAnimatedSkeletonProps extends SkeletonProps { showSkeleton?: boolean children?: React.ReactNode + style?: React.CSSProperties } function AnimatedSkeleton({ showSkeleton, children, + style, ...restProps }: IAnimatedSkeletonProps) { const [skeletonAppears, setSkeletonAppears] = useState(0) @@ -25,7 +27,7 @@ function AnimatedSkeleton({ }, [showSkeleton]) return ( -
+
{showSkeleton && (
{ + activeId: string +} + +export default function Blink({ + activeId, + children, + className, + ...restProps +}: IBlinkProps) { + const { blink } = useQueryParams() + + return ( +
+ {children} +
+ ) +} diff --git a/ui/lib/components/InstanceSelect/DropOverlay.tsx b/ui/lib/components/InstanceSelect/DropOverlay.tsx index b5071df6c5..68805b30f7 100644 --- a/ui/lib/components/InstanceSelect/DropOverlay.tsx +++ b/ui/lib/components/InstanceSelect/DropOverlay.tsx @@ -12,13 +12,12 @@ const groupProps = { onRenderHeader: (props) => , } -const containerStyle = { fontSize: '0.8rem' } - export interface IDropOverlayProps { selection: ISelection columns: IColumn[] items: IInstanceTableItem[] filterTableRef?: React.Ref + containerProps?: React.HTMLAttributes } function DropOverlay({ @@ -26,6 +25,7 @@ function DropOverlay({ columns, items, filterTableRef, + containerProps, }: IDropOverlayProps) { const { t } = useTranslation() const [keyword, setKeyword] = useState('') @@ -34,6 +34,18 @@ function DropOverlay({ return filterInstanceTable(items, keyword) }, [items, keyword]) + const { style: containerStyle, ...restContainerProps } = containerProps ?? {} + const finalContainerProps = useMemo(() => { + const style: React.CSSProperties = { + fontSize: '0.8rem', + ...containerStyle, + } + return { + style, + ...restContainerProps, + } as React.HTMLAttributes & Record + }, [containerStyle, restContainerProps]) + return ( ) diff --git a/ui/lib/components/InstanceSelect/TableWithFilter.tsx b/ui/lib/components/InstanceSelect/TableWithFilter.tsx index 66e3596b46..d6b4157048 100644 --- a/ui/lib/components/InstanceSelect/TableWithFilter.tsx +++ b/ui/lib/components/InstanceSelect/TableWithFilter.tsx @@ -21,8 +21,7 @@ export interface ITableWithFilterProps extends IDetailsListProps { onFilterChange?: (value: string) => void tableMaxHeight?: number tableWidth?: number - containerClassName?: string - containerStyle?: React.CSSProperties + containerProps?: React.HTMLAttributes } export interface ITableWithFilterRefProps { @@ -37,8 +36,7 @@ function TableWithFilter( onFilterChange, tableMaxHeight, tableWidth, - containerClassName, - containerStyle, + containerProps, ...restProps }: ITableWithFilterProps, ref: React.Ref @@ -73,11 +71,17 @@ function TableWithFilter( [containerState.height, tableMaxHeight, tableWidth] ) + const { + className: containerClassName, + style: containerStyle, + ...containerRestProps + } = containerProps ?? {} + return (
void enableTiFlash?: boolean defaultSelectAll?: boolean + dropContainerProps?: React.HTMLAttributes } export interface IInstanceSelectRefProps { @@ -85,6 +86,7 @@ function InstanceSelect( const { enableTiFlash, defaultSelectAll, + dropContainerProps, value, // only to exclude from restProps onChange, // only to exclude from restProps ...restProps @@ -243,9 +245,10 @@ function InstanceSelect( items={tableItems} selection={selection.current} filterTableRef={filterTableRef} + containerProps={dropContainerProps} /> ), - [columns, tableItems] + [columns, tableItems, dropContainerProps] ) const handleOpened = useCallback(() => { diff --git a/ui/lib/components/MetricChart/index.tsx b/ui/lib/components/MetricChart/index.tsx index 98f1df4949..4ce0e97bb0 100644 --- a/ui/lib/components/MetricChart/index.tsx +++ b/ui/lib/components/MetricChart/index.tsx @@ -19,9 +19,48 @@ import client from '@lib/client' import { AnimatedSkeleton, Card } from '@lib/components' import { useBatchClientRequest } from '@lib/utils/useClientRequest' import ErrorBar from '../ErrorBar' +import { addTranslationResource } from '@lib/utils/i18n' +import { Link } from 'react-router-dom' +import { useTranslation } from 'react-i18next' export type GraphType = 'bar' | 'line' +const translations = { + en: { + error: { + api: { + metrics: { + prom_not_found: + 'Prometheus is not deployed in the cluster. Metrics are unavailable.', + }, + }, + }, + components: { + metricChart: { + changePromButton: 'Change Prometheus Source', + }, + }, + }, + zh: { + error: { + api: { + metrics: { + prom_not_found: '集群中未部署 Prometheus 组件,监控不可用。', + }, + }, + }, + components: { + metricChart: { + changePromButton: '修改 Prometheus 源', + }, + }, + }, +} + +for (const key in translations) { + addTranslationResource(key, translations[key]) +} + export interface ISeries { query: string name: string @@ -70,6 +109,7 @@ export default function MetricChart({ type, }: IMetricChartProps) { const timeParams = useRef(getTimeParams()) + const { t } = useTranslation() const { isLoading, data, error, sendRequest } = useBatchClientRequest( series.map((s) => (reqConfig) => @@ -214,7 +254,7 @@ export default function MetricChart({ let inner if (showSkeleton) { - inner =
+ inner = null } else if ( _.every( _.zip(data, error), @@ -223,7 +263,12 @@ export default function MetricChart({ ) { inner = (
- + + + + {t('components.metricChart.changePromButton')} + +
) } else { @@ -249,7 +294,9 @@ export default function MetricChart({ return ( - {inner} + + {inner} + ) } diff --git a/ui/lib/components/MultiSelect/DropOverlay.tsx b/ui/lib/components/MultiSelect/DropOverlay.tsx index 3e1fb57374..49952292ba 100644 --- a/ui/lib/components/MultiSelect/DropOverlay.tsx +++ b/ui/lib/components/MultiSelect/DropOverlay.tsx @@ -6,7 +6,9 @@ import TableWithFilter, { } from '../InstanceSelect/TableWithFilter' import { IItem } from '.' -const containerStyle = { fontSize: '0.8rem' } +const containerProps: React.HTMLAttributes = { + style: { fontSize: '0.8rem' }, +} export interface IDropOverlayProps { selection: ISelection @@ -50,7 +52,7 @@ function DropOverlay({ tableWidth={250} columns={columns} items={filteredItems} - containerStyle={containerStyle} + containerProps={containerProps} ref={filterTableRef} /> ) diff --git a/ui/lib/components/index.ts b/ui/lib/components/index.ts index 51eb2676e8..d5b58766fd 100644 --- a/ui/lib/components/index.ts +++ b/ui/lib/components/index.ts @@ -52,6 +52,8 @@ export * from './ErrorBar' export { default as ErrorBar } from './ErrorBar' export * from './AppearAnimate' export { default as AppearAnimate } from './AppearAnimate' +export * from './Blink' +export { default as Blink } from './Blink' export { default as LanguageDropdown } from './LanguageDropdown' export { default as ParamsPageWrapper } from './ParamsPageWrapper' diff --git a/ui/lib/utils/useQueryParams.ts b/ui/lib/utils/useQueryParams.ts index 94d9be31ed..4718326ee7 100644 --- a/ui/lib/utils/useQueryParams.ts +++ b/ui/lib/utils/useQueryParams.ts @@ -2,6 +2,8 @@ import { useMemo } from 'react' import { useLocation } from 'react-router' export default function useQueryParams() { + // Note: seems that history.location can be outdated sometimes. + const { search } = useLocation() const params = useMemo(() => { diff --git a/ui/lib/utils/wdyr.ts b/ui/lib/utils/wdyr.ts index 3d79bfde63..50003057ed 100644 --- a/ui/lib/utils/wdyr.ts +++ b/ui/lib/utils/wdyr.ts @@ -3,8 +3,5 @@ import React from 'react' if (process.env.NODE_ENV === 'development') { console.log('Development mode, enable render trackers') const whyDidYouRender = require('@welldone-software/why-did-you-render') - whyDidYouRender(React, { - trackAllPureComponents: true, - logOwnerReasons: true, - }) + whyDidYouRender(React) } diff --git a/ui/package.json b/ui/package.json index 503655afe7..68c2ec3e5c 100644 --- a/ui/package.json +++ b/ui/package.json @@ -21,6 +21,7 @@ "dayjs": "^1.8.31", "echarts": "^4.8.0", "echarts-for-react": "^2.0.16", + "history": "^5.0.0", "i18next": "^19.6.3", "i18next-browser-languagedetector": "^5.0.0", "lodash": "^4.17.19", diff --git a/ui/tests/e2e/_config.ts b/ui/tests/e2e/_config.ts new file mode 100644 index 0000000000..384c05c1aa --- /dev/null +++ b/ui/tests/e2e/_config.ts @@ -0,0 +1,4 @@ +export const SERVER_URL = + (process.env.SERVER_URL || 'http://localhost:3001/dashboard') + '#' +export const LOGIN_URL = SERVER_URL + '/signin' +export const OVERVIEW_URL = SERVER_URL + '/overview' diff --git a/ui/tests/e2e/_preset.js b/ui/tests/e2e/_preset.js new file mode 100644 index 0000000000..b65d085471 --- /dev/null +++ b/ui/tests/e2e/_preset.js @@ -0,0 +1,4 @@ +const ts_preset = require('ts-jest/jest-preset') +const puppeteer_preset = require('jest-puppeteer/jest-preset') + +module.exports = Object.assign(ts_preset, puppeteer_preset) diff --git a/ui/tests/e2e/_setup.js b/ui/tests/e2e/_setup.js new file mode 100644 index 0000000000..719a473b6e --- /dev/null +++ b/ui/tests/e2e/_setup.js @@ -0,0 +1 @@ +jest.setTimeout(10000) diff --git a/ui/tests/e2e/login.test.ts b/ui/tests/e2e/login.test.ts deleted file mode 100644 index 5d92870853..0000000000 --- a/ui/tests/e2e/login.test.ts +++ /dev/null @@ -1,50 +0,0 @@ -import puppeteer from 'puppeteer' -import ppExpect from 'expect-puppeteer' -import { LOGIN_URL, OVERVIEW_URL, PUPPETEER_CONFIG } from './test_config' - -describe('Login', () => { - let browser - beforeAll(async () => { - browser = await puppeteer.launch(PUPPETEER_CONFIG) - }) - - afterAll(() => { - browser.close() - }) - - it( - 'should login fail by incorrect password', - async () => { - const page = await browser.newPage() - await page.goto(LOGIN_URL) - - await ppExpect(page).toFill('input#tidb_signin_password', 'any') - await ppExpect(page).toClick('button#signin_btn') - - const failReason = await page.waitForSelector( - 'form#tidb_signin div[data-e2e="password"] div:last-child' - ) - const content = await failReason.evaluate((n) => n.innerText) - console.log('fail reason:', content) - expect(content).toContain('TiDB authentication failed') - }, - 10 * 1000 - ) - - it( - 'should login success by correct password', - async () => { - const page = await browser.newPage() - await page.goto(LOGIN_URL) - - const title = await page.title() - expect(title).toBe('TiDB Dashboard') - - const loginBtn = await page.waitForSelector('button#signin_btn') - await Promise.all([page.waitForNavigation(), loginBtn.click()]) - const url = await page.url() - expect(url).toBe(OVERVIEW_URL) - }, - 10 * 1000 - ) -}) diff --git a/ui/tests/e2e/search_log.test.ts b/ui/tests/e2e/search_log.test.ts index 8a81f03ca2..e8ab6635c8 100644 --- a/ui/tests/e2e/search_log.test.ts +++ b/ui/tests/e2e/search_log.test.ts @@ -1,92 +1,39 @@ -import puppeteer from 'puppeteer' -import ppExpect from 'expect-puppeteer' -import { LOGIN_URL, PUPPETEER_CONFIG } from './test_config' +import 'expect-puppeteer' +import { do_sign_in } from './utils/sign_in' describe('Search Logs', () => { - let browser - beforeAll(async () => { - browser = await puppeteer.launch(PUPPETEER_CONFIG) - }) - - afterAll(() => { - browser.close() - }) - it( 'should search correct logs', async () => { - const page = await browser.newPage() - - // login - await page.goto(LOGIN_URL) - await ppExpect(page).toClick('button#signin_btn') - - // jump to search logs page - await page.waitForSelector('a#search_logs') - const searchLogsLink = await page.$('a#search_logs') - await searchLogsLink.click() + await do_sign_in() - // this fails randomly and high possibility, says can't find "a#search_logs" element - // await ppExpect(page).toClick('a#search_logs') + await Promise.all([page.waitForNavigation(), page.click('a#search_logs')]) - // find search form - const searchForm = await page.waitForSelector('form#search_form') + // Fill keyword + await expect(page).toFill('[data-e2e="log_search_keywords"]', 'Welcome') - // choose time range - await ppExpect(searchForm).toClick( - 'button[data-e2e="timerange-selector"]' + // Deselect PD instance + await page.click('[data-e2e="log_search_instances"]') + await expect(page).toClick( + '[data-e2e="log_search_instances_drop"] .ms-GroupHeader-title', + { + text: 'PD', + } ) - const secondsOf1Hour = 60 * 60 - await ppExpect(page).toClick( - `div[data-e2e="common-timeranges"] div[data-e2e="timerange-${secondsOf1Hour}"]` - ) - // to hide dropdown - await ppExpect(searchForm).toClick( - 'button[data-e2e="timerange-selector"]' - ) - - // set log level to INFO - await ppExpect(searchForm).toClick('#logLevel') - await ppExpect(page).toClick('div[data-e2e="level_2"]') - - // select TiDB component - // https://stackoverflow.com/questions/59882543/how-to-wait-for-a-button-to-be-enabled-and-click-with-puppeteer - await page.waitForSelector('div#instances input:not([disabled])') - await ppExpect(searchForm).toClick('div#instances') - // components selector dropdown is a DOM node with absolute position - // and its parent is body, failed to add id or data-e2e to it - // cancel select PD and TiKV, and only remain TiDB - await ppExpect(page).toClick('div[data-e2e="table-with-filter"] span', { - text: 'PD', - }) - await ppExpect(page).toClick('div[data-e2e="table-with-filter"] span', { - text: 'TiKV', - }) - // to hide dropdown - await ppExpect(searchForm).toClick('div#instances') - - // input keyword - await ppExpect(page).toFill('input#keywords', 'welcome') - - // start search - await ppExpect(searchForm).toClick('button#search_btn') - - // check search result - let logsTable = await page.waitForSelector( - 'div[data-e2e="search-result"] div[role="presentation"]:first-child' - ) - const url = await page.url() - console.log('current url:', url) - let content = await logsTable.evaluate((node) => node.innerText) - console.log(content) - - logsTable = await page.waitForSelector( - 'div[data-e2e="search-result"] div[role="presentation"]:last-child' + await page.click('[data-e2e="log_search_instances"]') + + // Start search + await page.click('[data-e2e="log_search_submit"]') + + await page.waitForSelector('[data-e2e="log_search_result"]') + await page.waitForFunction( + `document + .querySelector('[data-e2e="log_search_result"]') + .innerText + .includes("Welcome to TiDB")`, + { timeout: 5000 } ) - content = await logsTable.evaluate((node) => node.innerText) - expect(content).toContain('Welcome to TiDB') - expect(content.includes('Welcome to TiKV')).toBe(false) }, - 25 * 1000 + 30 * 1000 ) }) diff --git a/ui/tests/e2e/sign_in.test.ts b/ui/tests/e2e/sign_in.test.ts new file mode 100644 index 0000000000..5e20fae316 --- /dev/null +++ b/ui/tests/e2e/sign_in.test.ts @@ -0,0 +1,28 @@ +import 'expect-puppeteer' +import { do_sign_in } from './utils/sign_in' +import { LOGIN_URL, OVERVIEW_URL } from './_config' + +describe('Sign In', () => { + it('should fail to sign in using incorrect password', async () => { + await page.goto(LOGIN_URL) + + await expect(page).toFill( + '[data-e2e="signin_password_input"]', + 'incorrect_password' + ) + await expect(page).toClick('[data-e2e="signin_submit"]') + await page.waitForFunction( + `document + .querySelector('[data-e2e="signin_password_form_item"]') + .innerText + .includes("TiDB authentication failed")`, + { timeout: 5000 } + ) + }) + + it('should sign in using correct password', async () => { + await do_sign_in() + const url = await page.url() + expect(url).toBe(OVERVIEW_URL) + }) +}) diff --git a/ui/tests/e2e/test_config.ts b/ui/tests/e2e/test_config.ts deleted file mode 100644 index 9b6259a084..0000000000 --- a/ui/tests/e2e/test_config.ts +++ /dev/null @@ -1,12 +0,0 @@ -export let SERVER_URL = `${ - process.env.SERVER_URL || 'http://localhost:3001/dashboard' -}#` -export const LOGIN_URL = SERVER_URL + '/signin' -export const OVERVIEW_URL = SERVER_URL + '/overview' - -export const PUPPETEER_CONFIG = process.env.CI - ? undefined - : { - headless: false, - slowMo: 80, - } diff --git a/ui/tests/e2e/utils/sign_in.ts b/ui/tests/e2e/utils/sign_in.ts new file mode 100644 index 0000000000..08cfe07a9b --- /dev/null +++ b/ui/tests/e2e/utils/sign_in.ts @@ -0,0 +1,10 @@ +import { LOGIN_URL } from '../_config' + +export async function do_sign_in() { + await page.goto(LOGIN_URL) + + await Promise.all([ + page.waitForNavigation(), + page.click('[data-e2e="signin_submit"]'), + ]) +} diff --git a/ui/tests/jest-puppeteer.config.js b/ui/tests/jest-puppeteer.config.js new file mode 100644 index 0000000000..c52e6f49c5 --- /dev/null +++ b/ui/tests/jest-puppeteer.config.js @@ -0,0 +1,5 @@ +module.exports = { + launch: { + headless: process.env.HEADLESS !== 'false', + }, +} diff --git a/ui/tests/jest.config.js b/ui/tests/jest.config.js new file mode 100644 index 0000000000..1fb0901893 --- /dev/null +++ b/ui/tests/jest.config.js @@ -0,0 +1,4 @@ +module.exports = { + preset: './e2e/_preset.js', + setupFilesAfterEnv: ['expect-puppeteer', './e2e/_setup.js'], +} diff --git a/ui/tests/jestconfig.json b/ui/tests/jestconfig.json deleted file mode 100644 index cf5b2377b4..0000000000 --- a/ui/tests/jestconfig.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "transform": { - "^.+\\.(t|j)sx?$": "ts-jest" - }, - "setupFilesAfterEnv": ["expect-puppeteer"] -} diff --git a/ui/tests/package.json b/ui/tests/package.json index 1c98eeb3c2..f37afc0cef 100644 --- a/ui/tests/package.json +++ b/ui/tests/package.json @@ -3,14 +3,16 @@ "version": "1.0.0", "license": "MIT", "scripts": { - "test": "jest --config jestconfig.json" + "test": "jest --runInBand" }, "devDependencies": { "@types/expect-puppeteer": "^4.4.0", "@types/jest": "^25.1.4", + "@types/jest-environment-puppeteer": "^4.4.0", "@types/puppeteer": "^2.0.1", "expect-puppeteer": "^4.4.0", "jest": "^25.1.0", + "jest-puppeteer": "^4.4.0", "puppeteer": "^2.1.1", "ts-jest": "^25.2.1", "typescript": "^3.7.4" diff --git a/ui/tests/yarn.lock b/ui/tests/yarn.lock index 6fd9c984ad..07c428d3d2 100644 --- a/ui/tests/yarn.lock +++ b/ui/tests/yarn.lock @@ -209,6 +209,38 @@ exec-sh "^0.3.2" minimist "^1.2.0" +"@hapi/address@2.x.x": + version "2.1.4" + resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5" + integrity sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ== + +"@hapi/bourne@1.x.x": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@hapi/bourne/-/bourne-1.3.2.tgz#0a7095adea067243ce3283e1b56b8a8f453b242a" + integrity sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA== + +"@hapi/hoek@8.x.x", "@hapi/hoek@^8.3.0": + version "8.5.1" + resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-8.5.1.tgz#fde96064ca446dec8c55a8c2f130957b070c6e06" + integrity sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow== + +"@hapi/joi@^15.0.3": + version "15.1.1" + resolved "https://registry.yarnpkg.com/@hapi/joi/-/joi-15.1.1.tgz#c675b8a71296f02833f8d6d243b34c57b8ce19d7" + integrity sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ== + dependencies: + "@hapi/address" "2.x.x" + "@hapi/bourne" "1.x.x" + "@hapi/hoek" "8.x.x" + "@hapi/topo" "3.x.x" + +"@hapi/topo@3.x.x": + version "3.1.6" + resolved "https://registry.yarnpkg.com/@hapi/topo/-/topo-3.1.6.tgz#68d935fa3eae7fdd5ab0d7f953f3205d8b2bfc29" + integrity sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ== + dependencies: + "@hapi/hoek" "^8.3.0" + "@istanbuljs/load-nyc-config@^1.0.0": version "1.0.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.0.0.tgz#10602de5570baea82f8afbfa2630b24e7a8cfe5b" @@ -277,6 +309,16 @@ "@jest/types" "^25.1.0" jest-mock "^25.1.0" +"@jest/environment@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-26.6.2.tgz#ba364cc72e221e79cc8f0a99555bf5d7577cf92c" + integrity sha512-nFy+fHl28zUrRsCeMB61VDThV1pVTtlEokBRgqPrcT1JNq4yRNIyTHfyht6PqtUvY9IsuLGTrbG8kPXjSZIZwA== + dependencies: + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + "@jest/fake-timers@^25.1.0": version "25.1.0" resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-25.1.0.tgz#a1e0eff51ffdbb13ee81f35b52e0c1c11a350ce8" @@ -288,6 +330,18 @@ jest-util "^25.1.0" lolex "^5.0.0" +"@jest/fake-timers@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-26.6.2.tgz#459c329bcf70cee4af4d7e3f3e67848123535aad" + integrity sha512-14Uleatt7jdzefLPYM3KLcnUl1ZNikaKq34enpb5XG9i81JpppDb5muZvonvKyrl7ftEHkKS5L5/eB/kxJ+bvA== + dependencies: + "@jest/types" "^26.6.2" + "@sinonjs/fake-timers" "^6.0.1" + "@types/node" "*" + jest-message-util "^26.6.2" + jest-mock "^26.6.2" + jest-util "^26.6.2" + "@jest/reporters@^25.1.0": version "25.1.0" resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-25.1.0.tgz#9178ecf136c48f125674ac328f82ddea46e482b0" @@ -373,6 +427,17 @@ source-map "^0.6.1" write-file-atomic "^3.0.0" +"@jest/types@>=24 && <=26", "@jest/types@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" + integrity sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^15.0.0" + chalk "^4.0.0" + "@jest/types@^25.1.0": version "25.1.0" resolved "https://registry.yarnpkg.com/@jest/types/-/types-25.1.0.tgz#b26831916f0d7c381e11dbb5e103a72aed1b4395" @@ -390,6 +455,13 @@ dependencies: type-detect "4.0.8" +"@sinonjs/fake-timers@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40" + integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA== + dependencies: + "@sinonjs/commons" "^1.7.0" + "@types/babel__core@^7.1.0": version "7.1.6" resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.6.tgz#16ff42a5ae203c9af1c6e190ed1f30f83207b610" @@ -456,6 +528,22 @@ "@types/istanbul-lib-coverage" "*" "@types/istanbul-lib-report" "*" +"@types/istanbul-reports@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz#508b13aa344fa4976234e75dddcc34925737d821" + integrity sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest-environment-puppeteer@^4.4.0": + version "4.4.0" + resolved "https://registry.yarnpkg.com/@types/jest-environment-puppeteer/-/jest-environment-puppeteer-4.4.0.tgz#8d343035934610accdbfd4582e765823b948aa94" + integrity sha512-BjJWUmaui6CZE449y/xGVPPvOcNwlHZXxWekv38kZqu1Pda+Jn90pKaxWtxM5NAC2HaUEabsCWlTeHiJvno/hg== + dependencies: + "@jest/types" ">=24 && <=26" + "@types/puppeteer" "*" + jest-environment-node ">=24 && <=26" + "@types/jest@*", "@types/jest@^25.1.4": version "25.1.4" resolved "https://registry.yarnpkg.com/@types/jest/-/jest-25.1.4.tgz#9e9f1e59dda86d3fd56afce71d1ea1b331f6f760" @@ -486,6 +574,11 @@ resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-1.0.1.tgz#0a851d3bd96498fa25c33ab7278ed3bd65f06c3e" integrity sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw== +"@types/stack-utils@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff" + integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw== + "@types/yargs-parser@*": version "15.0.0" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-15.0.0.tgz#cb3f9f741869e20cce330ffbeb9271590483882d" @@ -832,7 +925,7 @@ caseless@~0.12.0: resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= -chalk@^2.0.0: +chalk@^2.0.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -849,6 +942,14 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" + integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + ci-info@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" @@ -873,6 +974,17 @@ cliui@^6.0.0: strip-ansi "^6.0.0" wrap-ansi "^6.2.0" +clone-deep@^0.2.4: + version "0.2.4" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-0.2.4.tgz#4e73dd09e9fb971cc38670c5dced9c1896481cc6" + integrity sha1-TnPdCen7lxzDhnDF3O2cGJZIHMY= + dependencies: + for-own "^0.1.3" + is-plain-object "^2.0.1" + kind-of "^3.0.2" + lazy-cache "^1.0.3" + shallow-clone "^0.1.2" + co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" @@ -922,6 +1034,16 @@ combined-stream@^1.0.6, combined-stream@~1.0.6: dependencies: delayed-stream "~1.0.0" +commander@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/commander/-/commander-3.0.2.tgz#6837c3fb677ad9933d1cfba42dd14d5117d6b39e" + integrity sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow== + +commander@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" + integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== + component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" @@ -954,6 +1076,11 @@ copy-descriptor@^0.1.0: resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= +core-js@^2.6.5: + version "2.6.11" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.11.tgz#38831469f9922bded8ee21c9dc46985e0399308c" + integrity sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg== + core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" @@ -996,6 +1123,14 @@ cssstyle@^2.0.0: dependencies: cssom "~0.3.6" +cwd@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/cwd/-/cwd-0.10.0.tgz#172400694057c22a13b0cf16162c7e4b7a7fe567" + integrity sha1-FyQAaUBXwioTsM8WFix+S3p/5Wc= + dependencies: + find-pkg "^0.1.2" + fs-exists-sync "^0.1.0" + dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" @@ -1143,6 +1278,11 @@ escape-string-regexp@^1.0.5: resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + escodegen@^1.11.1: version "1.14.1" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.14.1.tgz#ba01d0c8278b5e95a9a45350142026659027a457" @@ -1222,6 +1362,13 @@ expand-brackets@^2.1.4: snapdragon "^0.8.1" to-regex "^3.0.1" +expand-tilde@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-1.2.2.tgz#0b81eba897e5a3d31d1c3d102f8f01441e559449" + integrity sha1-C4HrqJflo9MdHD0QL48BRB5VlEk= + dependencies: + os-homedir "^1.0.1" + expect-puppeteer@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/expect-puppeteer/-/expect-puppeteer-4.4.0.tgz#1c948af08acdd6c8cbdb7f90e617f44d86888886" @@ -1339,6 +1486,30 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" +find-file-up@^0.1.2: + version "0.1.3" + resolved "https://registry.yarnpkg.com/find-file-up/-/find-file-up-0.1.3.tgz#cf68091bcf9f300a40da411b37da5cce5a2fbea0" + integrity sha1-z2gJG8+fMApA2kEbN9pczlovvqA= + dependencies: + fs-exists-sync "^0.1.0" + resolve-dir "^0.1.0" + +find-pkg@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/find-pkg/-/find-pkg-0.1.2.tgz#1bdc22c06e36365532e2a248046854b9788da557" + integrity sha1-G9wiwG42NlUy4qJIBGhUuXiNpVc= + dependencies: + find-file-up "^0.1.2" + +find-process@^1.4.3: + version "1.4.4" + resolved "https://registry.yarnpkg.com/find-process/-/find-process-1.4.4.tgz#52820561162fda0d1feef9aed5d56b3787f0fd6e" + integrity sha512-rRSuT1LE4b+BFK588D2V8/VG9liW0Ark1XJgroxZXI0LtwmQJOb490DvDYvbm+Hek9ETFzTutGfJ90gumITPhQ== + dependencies: + chalk "^4.0.0" + commander "^5.1.0" + debug "^4.1.1" + find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" @@ -1347,11 +1518,23 @@ find-up@^4.0.0, find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -for-in@^1.0.2: +for-in@^0.1.3: + version "0.1.8" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-0.1.8.tgz#d8773908e31256109952b1fdb9b3fa867d2775e1" + integrity sha1-2Hc5COMSVhCZUrH9ubP6hn0ndeE= + +for-in@^1.0.1, for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= +for-own@^0.1.3: + version "0.1.5" + resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" + integrity sha1-UmXGgaTylNq78XyVCbZ2OqhFEM4= + dependencies: + for-in "^1.0.1" + forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" @@ -1373,6 +1556,11 @@ fragment-cache@^0.2.1: dependencies: map-cache "^0.2.2" +fs-exists-sync@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add" + integrity sha1-mC1ok6+RjnLQjeyehnP/K1qNat0= + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -1436,6 +1624,24 @@ glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: once "^1.3.0" path-is-absolute "^1.0.0" +global-modules@^0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-0.2.3.tgz#ea5a3bed42c6d6ce995a4f8a1269b5dae223828d" + integrity sha1-6lo77ULG1s6ZWk+KEmm12uIjgo0= + dependencies: + global-prefix "^0.1.4" + is-windows "^0.2.0" + +global-prefix@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-0.1.5.tgz#8d3bc6b8da3ca8112a160d8d496ff0462bfef78f" + integrity sha1-jTvGuNo8qBEqFg2NSW/wRiv+948= + dependencies: + homedir-polyfill "^1.0.0" + ini "^1.3.4" + is-windows "^0.2.0" + which "^1.2.12" + globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" @@ -1446,6 +1652,11 @@ graceful-fs@^4.2.3: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== +graceful-fs@^4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" + integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== + growly@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" @@ -1517,6 +1728,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +homedir-polyfill@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" + integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== + dependencies: + parse-passwd "^1.0.0" + html-encoding-sniffer@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz#e70d84b94da53aa375e11fe3a351be6642ca46f8" @@ -1584,6 +1802,11 @@ inherits@2, inherits@^2.0.3, inherits@~2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== +ini@^1.3.4: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== + ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" @@ -1603,7 +1826,7 @@ is-accessor-descriptor@^1.0.0: dependencies: kind-of "^6.0.0" -is-buffer@^1.1.5: +is-buffer@^1.0.2, is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== @@ -1691,7 +1914,7 @@ is-number@^7.0.0: resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== -is-plain-object@^2.0.3, is-plain-object@^2.0.4: +is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== @@ -1727,6 +1950,11 @@ is-typedarray@^1.0.0, is-typedarray@~1.0.0: resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= +is-windows@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-0.2.0.tgz#de1aa6d63ea29dd248737b69f1ff8b8002d2108c" + integrity sha1-3hqm1j6indJIc3tp8f+LgALSEIw= + is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" @@ -1859,6 +2087,19 @@ jest-config@^25.1.0: pretty-format "^25.1.0" realpath-native "^1.1.0" +jest-dev-server@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/jest-dev-server/-/jest-dev-server-4.4.0.tgz#557113faae2877452162696aa94c1e44491ab011" + integrity sha512-STEHJ3iPSC8HbrQ3TME0ozGX2KT28lbT4XopPxUm2WimsX3fcB3YOptRh12YphQisMhfqNSNTZUmWyT3HEXS2A== + dependencies: + chalk "^3.0.0" + cwd "^0.10.0" + find-process "^1.4.3" + prompts "^2.3.0" + spawnd "^4.4.0" + tree-kill "^1.2.2" + wait-on "^3.3.0" + jest-diff@^25.1.0: version "25.1.0" resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-25.1.0.tgz#58b827e63edea1bc80c1de952b80cec9ac50e1ad" @@ -1899,6 +2140,18 @@ jest-environment-jsdom@^25.1.0: jest-util "^25.1.0" jsdom "^15.1.1" +"jest-environment-node@>=24 && <=26": + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-26.6.2.tgz#824e4c7fb4944646356f11ac75b229b0035f2b0c" + integrity sha512-zhtMio3Exty18dy8ee8eJ9kjnRyZC1N4C1Nt/VShN1apyXc8rWGtJ9lI7vqiWcyyXS4BVSEn9lxAM2D+07/Tag== + dependencies: + "@jest/environment" "^26.6.2" + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + jest-util "^26.6.2" + jest-environment-node@^25.1.0: version "25.1.0" resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-25.1.0.tgz#797bd89b378cf0bd794dc8e3dca6ef21126776db" @@ -1910,6 +2163,16 @@ jest-environment-node@^25.1.0: jest-mock "^25.1.0" jest-util "^25.1.0" +jest-environment-puppeteer@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/jest-environment-puppeteer/-/jest-environment-puppeteer-4.4.0.tgz#d82a37e0e0c51b63cc6b15dea101d53967508860" + integrity sha512-iV8S8+6qkdTM6OBR/M9gKywEk8GDSOe05hspCs5D8qKSwtmlUfdtHfB4cakdc68lC6YfK3AUsLirpfgodCHjzQ== + dependencies: + chalk "^3.0.0" + cwd "^0.10.0" + jest-dev-server "^4.4.0" + merge-deep "^3.0.2" + jest-get-type@^25.1.0: version "25.1.0" resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-25.1.0.tgz#1cfe5fc34f148dc3a8a3b7275f6b9ce9e2e8a876" @@ -1988,6 +2251,21 @@ jest-message-util@^25.1.0: slash "^3.0.0" stack-utils "^1.0.1" +jest-message-util@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-26.6.2.tgz#58173744ad6fc0506b5d21150b9be56ef001ca07" + integrity sha512-rGiLePzQ3AzwUshu2+Rn+UMFk0pHN58sOG+IaJbk5Jxuqo3NYO1U2/MIR4S1sKgsoYSXSzdtSa0TgrmtUwEbmA== + dependencies: + "@babel/code-frame" "^7.0.0" + "@jest/types" "^26.6.2" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.4" + micromatch "^4.0.2" + pretty-format "^26.6.2" + slash "^3.0.0" + stack-utils "^2.0.2" + jest-mock@^25.1.0: version "25.1.0" resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-25.1.0.tgz#411d549e1b326b7350b2e97303a64715c28615fd" @@ -1995,11 +2273,27 @@ jest-mock@^25.1.0: dependencies: "@jest/types" "^25.1.0" +jest-mock@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302" + integrity sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-pnp-resolver@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.1.tgz#ecdae604c077a7fbc70defb6d517c3c1c898923a" integrity sha512-pgFw2tm54fzgYvc/OHrnysABEObZCUNFnhjoRjaVOCN8NYc032/gVjPaHD4Aq6ApkSieWtfKAFQtmDKAmhupnQ== +jest-puppeteer@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/jest-puppeteer/-/jest-puppeteer-4.4.0.tgz#4b906e638a5e3782ed865e7b673c82047b85952e" + integrity sha512-ZaiCTlPZ07B9HW0erAWNX6cyzBqbXMM7d2ugai4epBDKpKvRDpItlRQC6XjERoJELKZsPziFGS0OhhUvTvQAXA== + dependencies: + expect-puppeteer "^4.4.0" + jest-environment-puppeteer "^4.4.0" + jest-regex-util@^25.1.0: version "25.1.0" resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-25.1.0.tgz#efaf75914267741838e01de24da07b2192d16d87" @@ -2115,6 +2409,18 @@ jest-util@^25.1.0: is-ci "^2.0.0" mkdirp "^0.5.1" +jest-util@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1" + integrity sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + graceful-fs "^4.2.4" + is-ci "^2.0.0" + micromatch "^4.0.2" + jest-validate@^25.1.0: version "25.1.0" resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-25.1.0.tgz#1469fa19f627bb0a9a98e289f3e9ab6a668c732a" @@ -2243,6 +2549,13 @@ jsprim@^1.2.2: json-schema "0.2.3" verror "1.10.0" +kind-of@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-2.0.1.tgz#018ec7a4ce7e3a86cb9141be519d24c8faa981b5" + integrity sha1-AY7HpM5+OobLkUG+UZ0kyPqpgbU= + dependencies: + is-buffer "^1.0.2" + kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -2272,6 +2585,16 @@ kleur@^3.0.3: resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== +lazy-cache@^0.2.3: + version "0.2.7" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-0.2.7.tgz#7feddf2dcb6edb77d11ef1d117ab5ffdf0ab1b65" + integrity sha1-f+3fLctu23fRHvHRF6tf/fCrG2U= + +lazy-cache@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" + integrity sha1-odePw6UEdMuAhF07O24dpJpEbo4= + leven@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" @@ -2345,6 +2668,15 @@ map-visit@^1.0.0: dependencies: object-visit "^1.0.0" +merge-deep@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/merge-deep/-/merge-deep-3.0.2.tgz#f39fa100a4f1bd34ff29f7d2bf4508fbb8d83ad2" + integrity sha512-T7qC8kg4Zoti1cFd8Cr0M+qaZfOwjlPDEdZIIPPB2JZctjaPM4fX+i7HOId69tAti2fvO6X5ldfYUONDODsrkA== + dependencies: + arr-union "^3.1.0" + clone-deep "^0.2.4" + kind-of "^3.0.2" + merge-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" @@ -2424,6 +2756,14 @@ mixin-deep@^1.2.0: for-in "^1.0.2" is-extendable "^1.0.1" +mixin-object@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mixin-object/-/mixin-object-2.0.1.tgz#4fb949441dab182540f1fe035ba60e1947a5e57e" + integrity sha1-T7lJRB2rGCVA8f4DW6YOGUel5X4= + dependencies: + for-in "^0.1.3" + is-extendable "^0.1.1" + mkdirp@0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" @@ -2609,6 +2949,11 @@ optionator@^0.8.1: type-check "~0.3.2" word-wrap "~1.2.3" +os-homedir@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= + p-each-series@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.1.0.tgz#961c8dd3f195ea96c747e636b262b800a6b1af48" @@ -2643,6 +2988,11 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== +parse-passwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" + integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= + parse5@5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/parse5/-/parse5-5.1.0.tgz#c59341c9723f414c452975564c7c00a68d58acd2" @@ -2732,6 +3082,16 @@ pretty-format@^25.1.0: ansi-styles "^4.0.0" react-is "^16.12.0" +pretty-format@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93" + integrity sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg== + dependencies: + "@jest/types" "^26.6.2" + ansi-regex "^5.0.0" + ansi-styles "^4.0.0" + react-is "^17.0.1" + process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" @@ -2750,6 +3110,14 @@ prompts@^2.0.1: kleur "^3.0.3" sisteransi "^1.0.4" +prompts@^2.3.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.0.tgz#4aa5de0723a231d1ee9121c40fdf663df73f61d7" + integrity sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + proxy-from-env@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" @@ -2799,6 +3167,11 @@ react-is@^16.12.0: resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== +react-is@^17.0.1: + version "17.0.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.1.tgz#5b3531bd76a645a4c9fb6e693ed36419e3301339" + integrity sha512-NAnt2iGDXohE5LI7uBnLnqvLQMtzhkiAOLXTmv+qnF9Ky7xAPcX8Up/xWIhxvLVGJvuLiNc4xQLtuqDRzb4fSA== + readable-stream@^2.2.2: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" @@ -2901,6 +3274,14 @@ resolve-cwd@^3.0.0: dependencies: resolve-from "^5.0.0" +resolve-dir@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-0.1.1.tgz#b219259a5602fac5c5c496ad894a6e8cc430261e" + integrity sha1-shklmlYC+sXFxJatiUpujMQwJh4= + dependencies: + expand-tilde "^1.2.2" + global-modules "^0.2.3" + resolve-from@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" @@ -2947,6 +3328,11 @@ rsvp@^4.8.4: resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734" integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== +rx@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" + integrity sha1-pfE/957zt0D+MKqAP7CfmIBdR4I= + safe-buffer@^5.0.1, safe-buffer@^5.1.2: version "5.2.0" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" @@ -3021,6 +3407,16 @@ set-value@^2.0.0, set-value@^2.0.1: is-plain-object "^2.0.3" split-string "^3.0.1" +shallow-clone@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-0.1.2.tgz#5909e874ba77106d73ac414cfec1ffca87d97060" + integrity sha1-WQnodLp3EG1zrEFM/sH/yofZcGA= + dependencies: + is-extendable "^0.1.1" + kind-of "^2.0.1" + lazy-cache "^0.2.3" + mixin-object "^2.0.1" + shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" @@ -3055,7 +3451,7 @@ signal-exit@^3.0.0, signal-exit@^3.0.2: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= -sisteransi@^1.0.4: +sisteransi@^1.0.4, sisteransi@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== @@ -3134,6 +3530,16 @@ source-map@^0.7.3: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== +spawnd@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/spawnd/-/spawnd-4.4.0.tgz#bb52c5b34a22e3225ae1d3acb873b2cd58af0886" + integrity sha512-jLPOfB6QOEgMOQY15Z6+lwZEhH3F5ncXxIaZ7WHPIapwNNLyjrs61okj3VJ3K6tmP5TZ6cO0VAu9rEY4MD4YQg== + dependencies: + exit "^0.1.2" + signal-exit "^3.0.2" + tree-kill "^1.2.2" + wait-port "^0.2.7" + split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" @@ -3166,6 +3572,13 @@ stack-utils@^1.0.1: resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.2.tgz#33eba3897788558bebfc2db059dc158ec36cebb8" integrity sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA== +stack-utils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277" + integrity sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw== + dependencies: + escape-string-regexp "^2.0.0" + static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" @@ -3363,6 +3776,11 @@ tr46@^1.0.1: dependencies: punycode "^2.1.0" +tree-kill@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc" + integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A== + ts-jest@^25.2.1: version "25.2.1" resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-25.2.1.tgz#49bf05da26a8b7fbfbc36b4ae2fcdc2fef35c85d" @@ -3514,6 +3932,26 @@ w3c-xmlserializer@^1.1.2: webidl-conversions "^4.0.2" xml-name-validator "^3.0.0" +wait-on@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-3.3.0.tgz#9940981d047a72a9544a97b8b5fca45b2170a082" + integrity sha512-97dEuUapx4+Y12aknWZn7D25kkjMk16PbWoYzpSdA8bYpVfS6hpl2a2pOWZ3c+Tyt3/i4/pglyZctG3J4V1hWQ== + dependencies: + "@hapi/joi" "^15.0.3" + core-js "^2.6.5" + minimist "^1.2.0" + request "^2.88.0" + rx "^4.1.0" + +wait-port@^0.2.7: + version "0.2.9" + resolved "https://registry.yarnpkg.com/wait-port/-/wait-port-0.2.9.tgz#3905cf271b5dbe37a85c03b85b418b81cb24ee55" + integrity sha512-hQ/cVKsNqGZ/UbZB/oakOGFqic00YAMM5/PEj3Bt4vKarv2jWIWzDbqlwT94qMs/exAQAsvMOq99sZblV92zxQ== + dependencies: + chalk "^2.4.2" + commander "^3.0.2" + debug "^4.1.1" + walker@^1.0.7, walker@~1.0.5: version "1.0.7" resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" @@ -3552,7 +3990,7 @@ which-module@^2.0.0: resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= -which@^1.2.9, which@^1.3.1: +which@^1.2.12, which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== diff --git a/ui/yarn.lock b/ui/yarn.lock index 109ffea9a8..ee61ac900b 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -9209,6 +9209,13 @@ history@5.0.0-beta.9: dependencies: "@babel/runtime" "^7.7.6" +history@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/history/-/history-5.0.0.tgz#0cabbb6c4bbf835addb874f8259f6d25101efd08" + integrity sha512-3NyRMKIiFSJmIPdq7FxkNMJkQ7ZEtVblOQ38VtKaA0zZMW1Eo6Q6W8oDKEflr1kNNTItSnk4JMCO1deeSgbLLg== + dependencies: + "@babel/runtime" "^7.7.6" + hmac-drbg@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" From 71f1cfe49ebf852d487ab1f2276c459b56882164 Mon Sep 17 00:00:00 2001 From: Sparkle <1284531+baurine@users.noreply.github.com> Date: Wed, 25 Nov 2020 22:53:01 +0800 Subject: [PATCH 23/29] Revert "ui: improve the expansion and collapse of sider menu (#767)" (#813) This reverts commit 47a9db5470e5b660f31f6653ebb4cc337a184e0b. --- ui/dashboardApp/layout/main/Sider/Banner.tsx | 22 ++++--- .../layout/main/Sider/index.module.less | 7 +- ui/dashboardApp/layout/main/Sider/index.tsx | 55 +++++++--------- ui/dashboardApp/layout/main/index.module.less | 24 +++---- ui/dashboardApp/layout/main/index.tsx | 65 ++++++++++++++++++- 5 files changed, 113 insertions(+), 60 deletions(-) diff --git a/ui/dashboardApp/layout/main/Sider/Banner.tsx b/ui/dashboardApp/layout/main/Sider/Banner.tsx index 25ec615a30..4c9b85ad07 100644 --- a/ui/dashboardApp/layout/main/Sider/Banner.tsx +++ b/ui/dashboardApp/layout/main/Sider/Banner.tsx @@ -1,7 +1,8 @@ import React, { useMemo } from 'react' -import { MenuFoldOutlined, MenuUnfoldOutlined } from '@ant-design/icons' +import { MenuUnfoldOutlined, MenuFoldOutlined } from '@ant-design/icons' import { useSize } from '@umijs/hooks' import Flexbox from '@g07cha/flexbox-react' +import { useSpring, animated } from 'react-spring' import { useClientRequest } from '@lib/utils/useClientRequest' import client, { InfoInfoResponse } from '@lib/client' @@ -45,14 +46,14 @@ export default function ToggleBanner({ onToggle, }) { const [bannerSize, bannerRef] = useSize() - const bannerStyle = { + const transBanner = useSpring({ opacity: collapsed ? 0 : 1, height: collapsed ? toggleHeight : bannerSize.height || 0, - } - const buttonStyle = { + }) + const transButton = useSpring({ left: collapsed ? 0 : fullWidth - toggleWidth, width: collapsed ? collapsedWidth : toggleWidth, - } + }) const { data, isLoading } = useClientRequest((reqConfig) => client.getInstance().infoGet(reqConfig) @@ -67,7 +68,10 @@ export default function ToggleBanner({ return (
-
+
-
-
+ + {collapsed ? ( ) : ( )} -
+
) } diff --git a/ui/dashboardApp/layout/main/Sider/index.module.less b/ui/dashboardApp/layout/main/Sider/index.module.less index 5d4843a92a..67dbe8cd8b 100644 --- a/ui/dashboardApp/layout/main/Sider/index.module.less +++ b/ui/dashboardApp/layout/main/Sider/index.module.less @@ -2,11 +2,10 @@ @sider-background: #f7f7fa; -.wrapper { - transition: width 444ms ease; -} - .sider { + position: fixed; + left: 0; + top: 0; height: 100%; z-index: 1; background: linear-gradient(@sider-background, #ebeffa); diff --git a/ui/dashboardApp/layout/main/Sider/index.tsx b/ui/dashboardApp/layout/main/Sider/index.tsx index 33b1f2bcb2..6cbffa74c4 100644 --- a/ui/dashboardApp/layout/main/Sider/index.tsx +++ b/ui/dashboardApp/layout/main/Sider/index.tsx @@ -1,9 +1,10 @@ -import React, { useCallback, useMemo, useState } from 'react' -import { BugOutlined, ExperimentOutlined } from '@ant-design/icons' +import React, { useState, useMemo } from 'react' +import { ExperimentOutlined, BugOutlined } from '@ant-design/icons' import { Layout, Menu } from 'antd' import { Link } from 'react-router-dom' import { useEventListener } from '@umijs/hooks' import { useTranslation } from 'react-i18next' +import { useSpring, animated } from 'react-spring' import client from '@lib/client' import Banner from './Banner' @@ -17,9 +18,10 @@ function useAppMenuItem(registry, appId, title?: string) { return null } return ( - : null}> + - {title ? title : t(`${appId}.nav_title`, appId)} + {app.icon ? : null} + {title ? title : t(`${appId}.nav_title`, appId)} ) @@ -36,12 +38,6 @@ function useActiveAppId(registry) { return appId } -function triggerResizeEvent() { - const event = document.createEvent('HTMLEvents') - event.initEvent('resize', true, false) - window.dispatchEvent(event) -} - function Sider({ registry, fullWidth, @@ -65,8 +61,12 @@ function Sider({ const debugSubMenu = ( } - title={t('nav.sider.debug')} + title={ + + + {t('nav.sider.debug')} + + } > {debugSubMenuItems} @@ -79,8 +79,12 @@ function Sider({ const experimentalSubMenu = ( } - title={t('nav.sider.experimental')} + title={ + + + {t('nav.sider.experimental')} + + } > {experimentalSubMenuItems} @@ -111,9 +115,9 @@ function Sider({ useAppMenuItem(registry, 'user_profile', displayName), ] - const siderStyle = { + const transSider = useSpring({ width: collapsed ? collapsedWidth : fullWidth, - } + }) const defaultOpenKeys = useMemo(() => { if (defaultCollapsed) { @@ -123,17 +127,8 @@ function Sider({ } }, [defaultCollapsed]) - const wrapperRef = useCallback((wrapper) => { - if (wrapper !== null) { - wrapper.addEventListener('transitionend', (e) => { - if (e.target !== wrapper || e.propertyName !== 'width') return - triggerResizeEvent() - }) - } - }, []) - return ( -
+ {extraMenuItems} -
+ ) } diff --git a/ui/dashboardApp/layout/main/index.module.less b/ui/dashboardApp/layout/main/index.module.less index 05709a45e0..4c050027a6 100644 --- a/ui/dashboardApp/layout/main/index.module.less +++ b/ui/dashboardApp/layout/main/index.module.less @@ -1,28 +1,14 @@ @import '~antd/es/style/themes/default.less'; .container { - display: flex; - position: fixed; - top: 0; - bottom: 0; - right: 0; - left: 0; height: 100vh; - width: 100vw; } .content { position: relative; - - flex: 1; - z-index: 3; background: #fff; min-height: 100vh; - box-shadow: 0 0 30px rgba(#000, 0.15); - - overflow-x: hidden; - overflow-y: auto; &:before, &:after { @@ -31,3 +17,13 @@ display: table; } } + +.contentBack { + position: fixed; + z-index: 2; + background: #fff; + top: 0; + height: 100%; + right: 0; + box-shadow: 0 0 30px rgba(#000, 0.15); +} diff --git a/ui/dashboardApp/layout/main/index.tsx b/ui/dashboardApp/layout/main/index.tsx index 594a8702da..5a23210942 100644 --- a/ui/dashboardApp/layout/main/index.tsx +++ b/ui/dashboardApp/layout/main/index.tsx @@ -1,14 +1,45 @@ -import React, { useCallback, useState } from 'react' +import React, { useState, useCallback, useEffect } from 'react' import { Root } from '@lib/components' import { useLocalStorageState } from '@umijs/hooks' import { HashRouter as Router } from 'react-router-dom' -import { animated, useSpring } from 'react-spring' +import { useSpring, animated } from 'react-spring' import Sider from './Sider' import styles from './index.module.less' const siderWidth = 260 const siderCollapsedWidth = 80 +const collapsedContentOffset = siderCollapsedWidth - siderWidth +const contentOffsetTrigger = collapsedContentOffset * 0.99 + +function triggerResizeEvent() { + const event = document.createEvent('HTMLEvents') + event.initEvent('resize', true, false) + window.dispatchEvent(event) +} + +const useContentLeftOffset = (collapsed) => { + const [offset, setOffset] = useState(siderWidth) + const onAnimationStart = useCallback(() => { + if (!collapsed) { + setOffset(siderWidth) + } + }, [collapsed]) + const onAnimationFrame = useCallback( + ({ x }) => { + if (collapsed && x < contentOffsetTrigger) { + setOffset(siderCollapsedWidth) + } + }, + [collapsed] + ) + useEffect(triggerResizeEvent, [offset]) + return { + contentLeftOffset: offset, + onAnimationStart, + onAnimationFrame, + } +} export default function App({ registry }) { const [collapsed, setCollapsed] = useLocalStorageState( @@ -16,6 +47,16 @@ export default function App({ registry }) { false ) const [defaultCollapsed] = useState(collapsed) + const { + contentLeftOffset, + onAnimationStart, + onAnimationFrame, + } = useContentLeftOffset(collapsed) + const transContentBack = useSpring({ + x: collapsed ? collapsedContentOffset : 0, + onStart: onAnimationStart, + onFrame: onAnimationFrame, + }) const transContainer = useSpring({ opacity: 1, from: { opacity: 0 }, @@ -43,9 +84,27 @@ export default function App({ registry }) { collapsedWidth={siderCollapsedWidth} animationDelay={0} /> + `translate3d(${x}px, 0, 0)` + ), + }} + > )} -
+
From 870a8cce28069eaf272f31acfcb13cf43aa68932 Mon Sep 17 00:00:00 2001 From: Wenxuan Date: Thu, 26 Nov 2020 17:08:48 +0800 Subject: [PATCH 24/29] clusterinfo: Refine (#815) Signed-off-by: Breezewish --- .../_shared/Vagrantfile.partial.pubKey.rb | 24 +- etc/manualTestEnv/complexCase1/README.md | 36 ++ etc/manualTestEnv/complexCase1/Vagrantfile | 40 ++ etc/manualTestEnv/complexCase1/topology.yaml | 85 ++++ etc/manualTestEnv/multiHost/README.md | 2 +- etc/manualTestEnv/multiReplica/README.md | 2 +- etc/manualTestEnv/singleHost/README.md | 2 +- .../singleHostMultiDisk/README.md | 2 +- pkg/apiserver/clusterinfo/host.go | 402 +++--------------- .../clusterinfo/hostinfo/cluster_config.go | 82 ++++ .../clusterinfo/hostinfo/cluster_hardware.go | 149 +++++++ .../clusterinfo/hostinfo/cluster_load.go | 92 ++++ .../clusterinfo/hostinfo/hostinfo.go | 95 +++++ pkg/apiserver/clusterinfo/service.go | 87 ++-- pkg/apiserver/clusterinfo/statistics.go | 191 +++++++++ pkg/utils/host/host.go | 49 +++ pkg/utils/topology/pd.go | 5 +- pkg/utils/topology/store.go | 7 +- pkg/utils/topology/tidb.go | 6 +- pkg/utils/topology/topology.go | 31 -- .../apps/ClusterInfo/components/DiskTable.tsx | 212 +++++++++ .../apps/ClusterInfo/components/HostTable.tsx | 322 +++++++------- .../components/Statistics.module.less | 8 + .../ClusterInfo/components/Statistics.tsx | 102 +++++ .../components/StoreLocationTree/index.tsx | 13 +- ui/lib/apps/ClusterInfo/pages/List.tsx | 14 + ui/lib/apps/ClusterInfo/translations/en.yaml | 28 +- ui/lib/apps/ClusterInfo/translations/zh.yaml | 26 +- 28 files changed, 1471 insertions(+), 643 deletions(-) create mode 100644 etc/manualTestEnv/complexCase1/README.md create mode 100644 etc/manualTestEnv/complexCase1/Vagrantfile create mode 100644 etc/manualTestEnv/complexCase1/topology.yaml create mode 100644 pkg/apiserver/clusterinfo/hostinfo/cluster_config.go create mode 100644 pkg/apiserver/clusterinfo/hostinfo/cluster_hardware.go create mode 100644 pkg/apiserver/clusterinfo/hostinfo/cluster_load.go create mode 100644 pkg/apiserver/clusterinfo/hostinfo/hostinfo.go create mode 100644 pkg/apiserver/clusterinfo/statistics.go create mode 100644 pkg/utils/host/host.go create mode 100644 ui/lib/apps/ClusterInfo/components/DiskTable.tsx create mode 100644 ui/lib/apps/ClusterInfo/components/Statistics.module.less create mode 100644 ui/lib/apps/ClusterInfo/components/Statistics.tsx diff --git a/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb b/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb index 8b7a381637..c319df4019 100644 --- a/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb +++ b/etc/manualTestEnv/_shared/Vagrantfile.partial.pubKey.rb @@ -2,23 +2,25 @@ ssh_pub_key = File.readlines("#{File.dirname(__FILE__)}/vagrant_key.pub").first.strip config.vm.box = "hashicorp/bionic64" - config.vm.provision "shell", privileged: false, inline: <<-SHELL + config.vm.provision "zsh", type: "shell", privileged: false, inline: <<-SHELL + echo "Installing zsh" sudo apt install -y zsh sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" sudo chsh -s /usr/bin/zsh vagrant + SHELL + config.vm.provision "private_key", type: "shell", privileged: false, inline: <<-SHELL + echo "Inserting private key" echo #{ssh_pub_key} >> /home/vagrant/.ssh/authorized_keys SHELL - config.vm.provision "shell", privileged: true, inline: <<-SHELL - echo "setting ulimit" - sudo echo "fs.file-max = 65535" >> /etc/sysctl.conf - sudo sysctl -p - sudo echo "* hard nofile 65535" >> /etc/security/limits.conf - sudo echo "* soft nofile 65535" >> /etc/security/limits.conf - sudo echo "root hard nofile 65535" >> /etc/security/limits.conf - sudo echo "root hard nofile 65535" >> /etc/security/limits.conf + config.vm.provision "ulimit", type: "shell", privileged: true, inline: <<-SHELL + echo "Setting ulimit" + echo "fs.file-max = 65535" >> /etc/sysctl.conf + sysctl -p + echo "* hard nofile 65535" >> /etc/security/limits.conf + echo "* soft nofile 65535" >> /etc/security/limits.conf + echo "root hard nofile 65535" >> /etc/security/limits.conf + echo "root hard nofile 65535" >> /etc/security/limits.conf SHELL end - -# ulimit ref: https://my.oschina.net/u/914655/blog/3067520 diff --git a/etc/manualTestEnv/complexCase1/README.md b/etc/manualTestEnv/complexCase1/README.md new file mode 100644 index 0000000000..24ea4d0548 --- /dev/null +++ b/etc/manualTestEnv/complexCase1/README.md @@ -0,0 +1,36 @@ +# complexCase1 + +TiDB, PD, TiKV, TiFlash each in different hosts. + +## Usage + +1. Start the box: + + ```bash + VAGRANT_EXPERIMENTAL="disks" vagrant up + ``` + +1. Use [TiUP](https://tiup.io/) to deploy the cluster to the box (only need to do it once): + + ```bash + tiup cluster deploy complexCase1 v4.0.8 topology.yaml -i ../_shared/vagrant_key -y --user vagrant + ``` + +1. Start the cluster in the box: + + ```bash + tiup cluster start complexCase1 + ``` + +1. Start TiDB Dashboard server: + + ```bash + bin/tidb-dashboard --pd http://10.0.1.31:2379 + ``` + +## Cleanup + +```bash +tiup cluster destroy complexCase1 -y +vagrant destroy --force +``` diff --git a/etc/manualTestEnv/complexCase1/Vagrantfile b/etc/manualTestEnv/complexCase1/Vagrantfile new file mode 100644 index 0000000000..b941effb1d --- /dev/null +++ b/etc/manualTestEnv/complexCase1/Vagrantfile @@ -0,0 +1,40 @@ +load "#{File.dirname(__FILE__)}/../_shared/Vagrantfile.partial.pubKey.rb" + +Vagrant.configure("2") do |config| + config.vm.provider "virtualbox" do |v| + v.memory = 1024 + v.cpus = 1 + end + + (1..5).each do |i| + config.vm.define "node#{i}" do |node| + node.vm.network "private_network", ip: "10.0.1.#{i+30}" + (1..4).each do |j| + node.vm.disk :disk, size: "10GB", name: "disk-#{i}-#{j}" + end + end + end + + config.vm.provision "disk", type: "shell", privileged: false, inline: <<-SHELL + echo "Formatting disks" + sudo mkfs.ext4 -j -L hdd1 /dev/sdb + sudo mkfs.ext4 -j -L hdd2 /dev/sdc + sudo mkfs.ext4 -j -L hdd3 /dev/sdd + sudo mkfs.ext4 -j -L hdd4 /dev/sde + + echo "Mounting directories" + sudo mkdir -p /pingcap/tidb-data + echo "/dev/sdb /pingcap/tidb-data ext4 defaults 0 0" | sudo tee -a /etc/fstab + sudo mount /pingcap/tidb-data + + sudo mkdir -p /pingcap/tidb-deploy + sudo mkdir -p /pingcap/tidb-data/tikv-1 + sudo mkdir -p /pingcap/tidb-data/tikv-2 + echo "/dev/sdc /pingcap/tidb-deploy ext4 defaults 0 0" | sudo tee -a /etc/fstab + echo "/dev/sdd /pingcap/tidb-data/tikv-1 ext4 defaults 0 0" | sudo tee -a /etc/fstab + echo "/dev/sde /pingcap/tidb-data/tikv-2 ext4 defaults 0 0" | sudo tee -a /etc/fstab + sudo mount /pingcap/tidb-deploy + sudo mount /pingcap/tidb-data/tikv-1 + sudo mount /pingcap/tidb-data/tikv-2 + SHELL +end diff --git a/etc/manualTestEnv/complexCase1/topology.yaml b/etc/manualTestEnv/complexCase1/topology.yaml new file mode 100644 index 0000000000..8bd2db0e9b --- /dev/null +++ b/etc/manualTestEnv/complexCase1/topology.yaml @@ -0,0 +1,85 @@ +global: + user: tidb + deploy_dir: /pingcap/tidb-deploy + data_dir: /pingcap/tidb-data + +server_configs: + tikv: + server.grpc-concurrency: 1 + raftstore.apply-pool-size: 1 + raftstore.store-pool-size: 1 + readpool.unified.max-thread-count: 1 + readpool.storage.use-unified-pool: false + readpool.coprocessor.use-unified-pool: true + storage.block-cache.capacity: 256MB + raftstore.capacity: 5GB + +# Overview: +# 31: 1 PD, 1 TiDB, 2 TiKV +# 32: 1 TiDB, 2 TiKV +# 33: 1 PD, 1 TiFlash +# 34: 2 TiKV, 1 TiFlash +# 35: 1 TiFlash + +pd_servers: + - host: 10.0.1.31 + - host: 10.0.1.33 + +tikv_servers: + - host: 10.0.1.31 + port: 20160 + status_port: 20180 + data_dir: /pingcap/tidb-data/tikv-1/tikv-20160 + config: + server.labels: { host: "tikv1" } + - host: 10.0.1.31 + port: 20161 + status_port: 20181 + data_dir: /pingcap/tidb-data/tikv-2/tikv-20161 + config: + server.labels: { host: "tikv2" } + - host: 10.0.1.32 + port: 20160 + status_port: 20180 + data_dir: /pingcap/tidb-data/tikv-1/tikv-20160 + config: + server.labels: { host: "tikv1" } + - host: 10.0.1.32 + port: 20161 + status_port: 20181 + data_dir: /pingcap/tidb-data/tikv-2/tikv-20161 + config: + server.labels: { host: "tikv2" } + - host: 10.0.1.34 + port: 20160 + status_port: 20180 + data_dir: /pingcap/tidb-data/tikv-1/tikv-20160 + config: + server.labels: { host: "tikv1" } + - host: 10.0.1.34 + port: 20161 + status_port: 20181 + data_dir: /pingcap/tidb-data/tikv-2/tikv-20161 + config: + server.labels: { host: "tikv2" } + +tiflash_servers: + - host: 10.0.1.33 + data_dir: /pingcap/tidb-data/tikv-1/tiflash + - host: 10.0.1.34 + data_dir: /pingcap/tidb-data/tikv-2/tiflash + - host: 10.0.1.35 + data_dir: /pingcap/tidb-data/tikv-1/tiflash + +tidb_servers: + - host: 10.0.1.31 + - host: 10.0.1.32 + +grafana_servers: + - host: 10.0.1.31 + +monitoring_servers: + - host: 10.0.1.31 + +alertmanager_servers: + - host: 10.0.1.31 diff --git a/etc/manualTestEnv/multiHost/README.md b/etc/manualTestEnv/multiHost/README.md index 60b8058c4f..dad3f81b6e 100644 --- a/etc/manualTestEnv/multiHost/README.md +++ b/etc/manualTestEnv/multiHost/README.md @@ -13,7 +13,7 @@ TiDB, PD, TiKV, TiFlash each in different hosts. 1. Use [TiUP](https://tiup.io/) to deploy the cluster to the box (only need to do it once): ```bash - tiup cluster deploy multiHost v4.0.4 topology.yaml -i ../_shared/vagrant_key -y --user vagrant + tiup cluster deploy multiHost v4.0.8 topology.yaml -i ../_shared/vagrant_key -y --user vagrant ``` 1. Start the cluster in the box: diff --git a/etc/manualTestEnv/multiReplica/README.md b/etc/manualTestEnv/multiReplica/README.md index fc31ff5941..32111caa53 100644 --- a/etc/manualTestEnv/multiReplica/README.md +++ b/etc/manualTestEnv/multiReplica/README.md @@ -13,7 +13,7 @@ Multiple TiKV nodes in different labels. 1. Use [TiUP](https://tiup.io/) to deploy the cluster to the box (only need to do it once): ```bash - tiup cluster deploy multiReplica v4.0.4 topology.yaml -i ../_shared/vagrant_key -y --user vagrant + tiup cluster deploy multiReplica v4.0.8 topology.yaml -i ../_shared/vagrant_key -y --user vagrant ``` 1. Start the cluster in the box: diff --git a/etc/manualTestEnv/singleHost/README.md b/etc/manualTestEnv/singleHost/README.md index 4d3f7413ab..d1d8c2b6ad 100644 --- a/etc/manualTestEnv/singleHost/README.md +++ b/etc/manualTestEnv/singleHost/README.md @@ -13,7 +13,7 @@ TiDB, PD, TiKV, TiFlash in the same host. 1. Use [TiUP](https://tiup.io/) to deploy the cluster to the box (only need to do it once): ```bash - tiup cluster deploy singleHost v4.0.4 topology.yaml -i ../_shared/vagrant_key -y --user vagrant + tiup cluster deploy singleHost v4.0.8 topology.yaml -i ../_shared/vagrant_key -y --user vagrant ``` 1. Start the cluster in the box: diff --git a/etc/manualTestEnv/singleHostMultiDisk/README.md b/etc/manualTestEnv/singleHostMultiDisk/README.md index 4cb2dd419e..cb9d6eabed 100644 --- a/etc/manualTestEnv/singleHostMultiDisk/README.md +++ b/etc/manualTestEnv/singleHostMultiDisk/README.md @@ -13,7 +13,7 @@ All instances in a single host, but on different disks. 1. Use [TiUP](https://tiup.io/) to deploy the cluster to the box (only need to do it once): ```bash - tiup cluster deploy singleHostMultiDisk v4.0.4 topology.yaml -i ../_shared/vagrant_key -y --user vagrant + tiup cluster deploy singleHostMultiDisk v4.0.8 topology.yaml -i ../_shared/vagrant_key -y --user vagrant ``` 1. Start the cluster in the box: diff --git a/pkg/apiserver/clusterinfo/host.go b/pkg/apiserver/clusterinfo/host.go index fb5ce32323..414982dd34 100644 --- a/pkg/apiserver/clusterinfo/host.go +++ b/pkg/apiserver/clusterinfo/host.go @@ -14,385 +14,83 @@ package clusterinfo import ( - "math" - "path/filepath" - "strconv" - "strings" + "sort" "github.com/jinzhu/gorm" -) - -type CPUUsage struct { - Idle float64 `json:"idle"` - System float64 `json:"system"` -} + "github.com/pingcap/log" + "github.com/thoas/go-funk" + "go.uber.org/zap" -type Memory struct { - Used int `json:"used"` - Total int `json:"total"` -} - -type Partition struct { - Path string `json:"path"` - FSType string `json:"fstype"` - Free int `json:"free"` - Total int `json:"total"` - - ServerType string // identify TiFlash -} - -type HostInfo struct { - IP string `json:"ip"` - CPUCore int `json:"cpu_core,omitempty"` - *CPUUsage `json:"cpu_usage,omitempty"` - *Memory `json:"memory,omitempty"` - Partitions []PartitionInstance `json:"partitions,omitempty"` - Unavailable bool `json:"unavailable"` -} - -type Instance struct { - Address string `gorm:"column:INSTANCE" json:"address"` - ServerType string `gorm:"column:TYPE" json:"server_type"` -} - -type PartitionInstance struct { - Partition `json:"partition"` - Instance `json:"instance"` -} + "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/clusterinfo/hostinfo" + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/topology" +) -func GetAllHostInfo(db *gorm.DB) ([]HostInfo, error) { - hostMap, err := loadHosts(db) +// fetchAllInstanceHosts fetches all hosts in the cluster and return in ascending order. +func (s *Service) fetchAllInstanceHosts() ([]string, error) { + allHostsMap := make(map[string]struct{}) + pdInfo, err := topology.FetchPDTopology(s.params.PDClient) if err != nil { return nil, err } - memory, usages, err := queryClusterLoad(db) - if err != nil { - return nil, err + for _, i := range pdInfo { + allHostsMap[i.IP] = struct{}{} } - cores, hostPartitionMap, err := queryClusterHardware(db) + + tikvInfo, tiFlashInfo, err := topology.FetchStoreTopology(s.params.PDClient) if err != nil { return nil, err } - dataDirMap, err := queryDeployInfo(db) - if err != nil { - return nil, err + for _, i := range tikvInfo { + allHostsMap[i.IP] = struct{}{} } - - infos := make([]HostInfo, 0) - for ip, instances := range hostMap { - var partitions = make([]PartitionInstance, 0) - for _, instance := range instances { - ip := parseIP(instance.Address) - - partitionMap, ok := hostPartitionMap[ip] - if !ok { - continue - } - - if instance.ServerType == "tiflash" { - // Since Multi-path is a common feature in TiFlash, a TiFlash instance may have multiple partitions. - // For now TiFlash will only return the disks it used, so we can just add all partitions. - for _, p := range partitionMap { - if p.ServerType == "tiflash" { - partitions = append(partitions, PartitionInstance{ - Partition: p, - Instance: instance, - }) - } - } - } else { - dataDir, ok := dataDirMap[instance.Address] - if !ok { - continue - } - - partition := inferPartition(dataDir, partitionMap) - partitions = append(partitions, PartitionInstance{ - Partition: partition, - Instance: instance, - }) - } - } - - info := HostInfo{ - IP: ip, - CPUCore: cores[ip], - CPUUsage: usages[ip], - Memory: memory[ip], - Partitions: partitions, - } - infos = append(infos, info) + for _, i := range tiFlashInfo { + allHostsMap[i.IP] = struct{}{} } - return infos, nil -} - -func splitPath(path string) []string { - if path == "" { - return []string{} - } - return strings.Split(path, string(filepath.Separator)) -} - -func inferPartition(dataDir string, diskMap PartitionMap) Partition { - var targetDisk Partition - var minRelativePathLength = math.MaxInt64 - - for _, disk := range diskMap { - rel, err := filepath.Rel(disk.Path, dataDir) - if err != nil { - continue - } - var relativePathLength int - for _, dir := range splitPath(rel) { - if dir == ".." { - relativePathLength = -1 - break - } else { - relativePathLength++ - } - } - if relativePathLength == -1 { - continue - } - if relativePathLength < minRelativePathLength { - minRelativePathLength = relativePathLength - targetDisk = disk - } - } - - return targetDisk -} - -// HostMap map host ip to all instance on it -// e.g. "127.0.0.1" => []Instance{...} -type HostMap map[string][]Instance - -func loadHosts(db *gorm.DB) (HostMap, error) { - hostMap := make(HostMap) - var rows []Instance - if err := db.Table("INFORMATION_SCHEMA.CLUSTER_INFO").Find(&rows).Error; err != nil { + tidbInfo, err := topology.FetchTiDBTopology(s.lifecycleCtx, s.params.EtcdClient) + if err != nil { return nil, err } - for _, row := range rows { - ip := parseIP(row.Address) - instances, ok := hostMap[ip] - if !ok { - instances = []Instance{} - } - - instances = append(instances, Instance{ - Address: row.Address, - ServerType: row.ServerType, - }) - hostMap[ip] = instances + for _, i := range tidbInfo { + allHostsMap[i.IP] = struct{}{} } - return hostMap, nil -} - -func parseIP(addr string) string { - return strings.Split(addr, ":")[0] -} - -// CPUCoreMap map host ip to its cpu logical cores number -// e.g. "127.0.0.1" => 8 -type CPUCoreMap map[string]int + allHosts := funk.Keys(allHostsMap).([]string) + sort.Strings(allHosts) -// Memory map host ip to its Memory detail -// e.g. "127.0.0.1" => &Memory{} -type MemoryMap map[string]*Memory - -// CPUUsageMap map host ip to its cpu usage -// e.g. "127.0.0.1" => &CPUUsage{ Idle: 0.1, System: 0.1 } -type CPUUsageMap map[string]*CPUUsage - -type ClusterTableModel struct { - Instance string `gorm:"column:INSTANCE"` - DeviceName string `gorm:"column:DEVICE_NAME"` - DeviceType string `gorm:"column:DEVICE_TYPE"` - Name string `gorm:"column:NAME"` - Value string `gorm:"column:VALUE"` - Type string `gorm:"column:TYPE"` + return allHosts, nil } -const ClusterLoadCondition = "(device_type = 'memory' and device_name = 'virtual') or (device_type = 'cpu' and device_name = 'usage')" - -func queryClusterLoad(db *gorm.DB) (MemoryMap, CPUUsageMap, error) { - memoryMap := make(MemoryMap) - cpuMap := make(CPUUsageMap) - var rows []ClusterTableModel - if err := db.Table("INFORMATION_SCHEMA.CLUSTER_LOAD"). - Where(ClusterLoadCondition).Find(&rows).Error; err != nil { - return nil, nil, err - } - - for _, row := range rows { - switch { - case row.DeviceType == "memory" && row.DeviceName == "virtual": - saveMemory(row, &memoryMap) - case row.DeviceType == "cpu" && row.DeviceName == "usage": - saveCPUUsageMap(row, &cpuMap) - default: - continue - } - } - return memoryMap, cpuMap, nil -} - -func saveMemory(row ClusterTableModel, m *MemoryMap) { - ip := parseIP(row.Instance) - - memory, ok := (*m)[ip] - if !ok { - memory = &Memory{} - (*m)[ip] = memory - } - - var err error - switch row.Name { - case "total": - memory.Total, err = strconv.Atoi(row.Value) - if err != nil { - return - } - case "used": - memory.Used, err = strconv.Atoi(row.Value) - if err != nil { - return - } - default: - return - } -} - -func saveCPUUsageMap(row ClusterTableModel, m *CPUUsageMap) { - ip := parseIP(row.Instance) - - var cpu *CPUUsage - var ok bool - if cpu, ok = (*m)[ip]; !ok { - cpu = &CPUUsage{} - (*m)[ip] = cpu - } - - var err error - switch row.Name { - case "system": - cpu.System, err = strconv.ParseFloat(row.Value, 64) - if err != nil { - return - } - case "idle": - cpu.Idle, err = strconv.ParseFloat(row.Value, 64) - if err != nil { - return - } - default: - return - } -} - -// PartitionMap map partition name to its detail -// e.g. "nvme0n1p1" => Partition{ Path: "/", FSType: "ext4", ... } -type PartitionMap map[string]Partition - -// HostPartition map host ip to all partitions on it -// e.g. "127.0.0.1" => { "nvme0n1p1" => Partition{ Path: "/", FSType: "ext4", ... }, ... } -type HostPartitionMap map[string]PartitionMap - -const ClusterHardWareCondition = "(device_type = 'cpu' and name = 'cpu-logical-cores') or (device_type = 'disk')" - -func queryClusterHardware(db *gorm.DB) (CPUCoreMap, HostPartitionMap, error) { - cpuMap := make(CPUCoreMap) - hostMap := make(HostPartitionMap) - var rows []ClusterTableModel - - if err := db.Table("INFORMATION_SCHEMA.CLUSTER_HARDWARE").Where(ClusterHardWareCondition).Find(&rows).Error; err != nil { - return nil, nil, err - } - - for _, row := range rows { - switch { - case row.DeviceType == "cpu" && row.Name == "cpu-logical-cores": - saveCPUCore(row, &cpuMap) - case row.DeviceType == "disk": - savePartition(row, &hostMap) - default: - continue - } - } - return cpuMap, hostMap, nil -} - -func saveCPUCore(row ClusterTableModel, m *CPUCoreMap) { - ip := parseIP(row.Instance) - cores, err := strconv.Atoi(row.Value) +// fetchAllHostsInfo fetches all hosts and their information. +// Note: The returned data and error may both exist. +func (s *Service) fetchAllHostsInfo(db *gorm.DB) ([]*hostinfo.Info, error) { + allHosts, err := s.fetchAllInstanceHosts() if err != nil { - return + return nil, err } - (*m)[ip] = cores -} - -func savePartition(row ClusterTableModel, m *HostPartitionMap) { - ip := parseIP(row.Instance) - partitionMap, ok := (*m)[ip] - if !ok { - partitionMap = make(PartitionMap) + allHostsInfoMap := make(map[string]*hostinfo.Info) + if e := hostinfo.FillFromClusterLoadTable(db, allHostsInfoMap); e != nil { + log.Warn("Failed to read cluster_load table", zap.Error(e)) + err = e } - - partition, ok := partitionMap[row.DeviceName] - if !ok { - partition = Partition{} + if e := hostinfo.FillFromClusterHardwareTable(db, allHostsInfoMap); e != nil && err == nil { + log.Warn("Failed to read cluster_hardware table", zap.Error(e)) + err = e } - - partition.ServerType = row.Type - - var err error - switch row.Name { - case "fstype": - partition.FSType = row.Value - case "path": - partition.Path = row.Value - case "total": - partition.Total, err = strconv.Atoi(row.Value) - if err != nil { - return - } - case "free": - partition.Free, err = strconv.Atoi(row.Value) - if err != nil { - return - } - default: - return - } - - partitionMap[row.DeviceName] = partition - (*m)[ip] = partitionMap -} - -type ClusterConfigModel struct { - Instance string `gorm:"column:INSTANCE"` - Value string `gorm:"column:VALUE"` -} - -// DataDirMap map instance address to its data directory -// e.g. "127.0.0.1:20160" => "/tikv/data-dir" -type DataDirMap map[string]string - -const ClusterConfigCondition = "(`type` = 'tidb' and `key` = 'log.file.filename') or (`type` = 'tikv' and `key` = 'storage.data-dir') or (`type` = 'pd' and `key` = 'data-dir')" - -func queryDeployInfo(db *gorm.DB) (DataDirMap, error) { - m := make(DataDirMap) - var rows []ClusterConfigModel - if err := db.Table("INFORMATION_SCHEMA.CLUSTER_CONFIG").Where(ClusterConfigCondition).Find(&rows).Error; err != nil { - return nil, err + if e := hostinfo.FillInstances(db, allHostsInfoMap); e != nil && err == nil { + log.Warn("Failed to fill instances for hosts", zap.Error(e)) + err = e } - for _, row := range rows { - m[row.Instance] = row.Value + r := make([]*hostinfo.Info, 0, len(allHosts)) + for _, host := range allHosts { + if im, ok := allHostsInfoMap[host]; ok { + r = append(r, im) + } else { + // Missing item + r = append(r, hostinfo.NewHostInfo(host)) + } } - return m, nil + return r, err } diff --git a/pkg/apiserver/clusterinfo/hostinfo/cluster_config.go b/pkg/apiserver/clusterinfo/hostinfo/cluster_config.go new file mode 100644 index 0000000000..fb2655de29 --- /dev/null +++ b/pkg/apiserver/clusterinfo/hostinfo/cluster_config.go @@ -0,0 +1,82 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package hostinfo + +import ( + "strings" + + "github.com/jinzhu/gorm" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/host" +) + +type clusterConfigModel struct { + Type string `gorm:"column:TYPE"` + Instance string `gorm:"column:INSTANCE"` + Key string `gorm:"column:KEY"` + Value string `gorm:"column:VALUE"` +} + +func FillInstances(db *gorm.DB, m InfoMap) error { + var rows []clusterConfigModel + if err := db. + Table("INFORMATION_SCHEMA.CLUSTER_CONFIG"). + Where("(`TYPE` = 'tidb' AND `KEY` = 'log.file.filename') " + + "OR (`TYPE` = 'tikv' AND `KEY` = 'storage.data-dir') " + + "OR (`TYPE` = 'pd' AND `KEY` = 'data-dir')"). + Find(&rows).Error; err != nil { + return err + } + + for _, row := range rows { + hostname, _, err := host.ParseHostAndPortFromAddress(row.Instance) + if err != nil { + continue + } + if _, ok := m[hostname]; !ok { + m[hostname] = NewHostInfo(hostname) + } + m[hostname].Instances[row.Instance] = &InstanceInfo{ + Type: row.Type, + PartitionPathL: strings.ToLower(locateInstanceMountPartition(row.Value, m[hostname].Partitions)), + } + } + return nil +} + +// Try to discover which partition this instance is running on. +// If discover failed, empty string will be returned. +func locateInstanceMountPartition(directoryOrFilePath string, partitions map[string]*PartitionInfo) string { + if len(directoryOrFilePath) == 0 { + return "" + } + + maxMatchLen := 0 + maxMatchPath := "" + + directoryOrFilePathL := strings.ToLower(directoryOrFilePath) + + for _, info := range partitions { + // FIXME: This may cause wrong result in case sensitive FS. + if !strings.HasPrefix(directoryOrFilePathL, strings.ToLower(info.Path)) { + continue + } + if len(info.Path) > maxMatchLen { + maxMatchLen = len(info.Path) + maxMatchPath = info.Path + } + } + + return maxMatchPath +} diff --git a/pkg/apiserver/clusterinfo/hostinfo/cluster_hardware.go b/pkg/apiserver/clusterinfo/hostinfo/cluster_hardware.go new file mode 100644 index 0000000000..3657a02b28 --- /dev/null +++ b/pkg/apiserver/clusterinfo/hostinfo/cluster_hardware.go @@ -0,0 +1,149 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package hostinfo + +import ( + "bytes" + "encoding/json" + "strings" + + "github.com/jinzhu/gorm" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/host" +) + +// Used to deserialize from JSON_VALUE +type clusterHardwareCPUInfoModel struct { + LogicalCores int `json:"cpu-logical-cores,string"` + PhysicalCores int `json:"cpu-physical-cores,string"` +} + +// Used to deserialize from JSON_VALUE +type clusterHardwareDiskModel struct { + Path string `json:"path"` + FSType string `json:"fstype"` + Free int `json:"free,string"` + Total int `json:"total,string"` +} + +func FillFromClusterHardwareTable(db *gorm.DB, m InfoMap) error { + var rows []clusterTableModel + + var sqlQuery bytes.Buffer + if err := clusterTableQueryTemplate.Execute(&sqlQuery, map[string]string{ + "tableName": "INFORMATION_SCHEMA.CLUSTER_HARDWARE", + }); err != nil { + panic(err) + } + + if err := db. + Raw(sqlQuery.String(), []string{"cpu", "disk"}). + Scan(&rows).Error; err != nil { + return err + } + + tiFlashDisks := make([]clusterTableModel, 0) + + for _, row := range rows { + hostname, _, err := host.ParseHostAndPortFromAddress(row.Instance) + if err != nil { + continue + } + if _, ok := m[hostname]; !ok { + m[hostname] = NewHostInfo(hostname) + } + + switch { + case row.DeviceType == "cpu" && row.DeviceName == "cpu": + if m[hostname].CPUInfo != nil { + continue + } + var v clusterHardwareCPUInfoModel + err := json.Unmarshal([]byte(row.JSONValue), &v) + if err != nil { + continue + } + m[hostname].CPUInfo = &CPUInfo{ + LogicalCores: v.LogicalCores, + PhysicalCores: v.PhysicalCores, + } + case row.DeviceType == "disk": + if row.Type == "tiflash" { + // Collect TiFlash related information for later processing. + tiFlashDisks = append(tiFlashDisks, row) + } + if m[hostname].PartitionProviderType != "" && m[hostname].PartitionProviderType != row.Type { + // Another instance on the same host has already provided disk information, skip. + continue + } + var v clusterHardwareDiskModel + err := json.Unmarshal([]byte(row.JSONValue), &v) + if err != nil { + continue + } + if m[hostname].PartitionProviderType == "" { + m[hostname].PartitionProviderType = row.Type + } + m[hostname].Partitions[strings.ToLower(v.Path)] = &PartitionInfo{ + Path: v.Path, + FSType: v.FSType, + Free: v.Free, + Total: v.Total, + } + } + } + + // ========================================================================================== + // HACK: TiFlash special logic + // For now, we can only infer TiFlash instances from its reported disk information. + // Due to a bug, TiFlash will return all disks that has the prefix of actual deployed disk. + type tiFlashDiskEntity struct { + maxLen int + maxLenPath string + } + tiFlashDiskInfo := make(map[string]tiFlashDiskEntity) // key is TiFlash instance address + for _, d := range tiFlashDisks { + var v clusterHardwareDiskModel + err := json.Unmarshal([]byte(d.JSONValue), &v) + if err != nil { + continue + } + // For each TiFlash instance, it may report multiple disks. We keep the disk that has longest path. + if _, ok := tiFlashDiskInfo[d.Instance]; !ok { + tiFlashDiskInfo[d.Instance] = tiFlashDiskEntity{ + maxLen: 0, + maxLenPath: "", + } + } + if len(v.Path) > tiFlashDiskInfo[d.Instance].maxLen { + tiFlashDiskInfo[d.Instance] = tiFlashDiskEntity{ + maxLen: len(v.Path), + maxLenPath: v.Path, + } + } + } + // Back fill TiFlash instances + for instance, de := range tiFlashDiskInfo { + hostname, _, err := host.ParseHostAndPortFromAddress(instance) + if err != nil { + panic(err) + } + m[hostname].Instances[instance] = &InstanceInfo{ + Type: "tiflash", + PartitionPathL: strings.ToLower(de.maxLenPath), + } + } + + return nil +} diff --git a/pkg/apiserver/clusterinfo/hostinfo/cluster_load.go b/pkg/apiserver/clusterinfo/hostinfo/cluster_load.go new file mode 100644 index 0000000000..e30582f91a --- /dev/null +++ b/pkg/apiserver/clusterinfo/hostinfo/cluster_load.go @@ -0,0 +1,92 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package hostinfo + +import ( + "bytes" + "encoding/json" + + "github.com/jinzhu/gorm" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/host" +) + +// Used to deserialize from JSON_VALUE +type clusterLoadCPUUsageModel struct { + Idle float64 `json:"idle,string"` + System float64 `json:"system,string"` +} + +// Used to deserialize from JSON_VALUE +type clusterLoadMemoryVirtualModel struct { + Used int `json:"used,string"` + Total int `json:"total,string"` +} + +func FillFromClusterLoadTable(db *gorm.DB, m InfoMap) error { + var rows []clusterTableModel + + var sqlQuery bytes.Buffer + if err := clusterTableQueryTemplate.Execute(&sqlQuery, map[string]string{ + "tableName": "INFORMATION_SCHEMA.CLUSTER_LOAD", + }); err != nil { + panic(err) + } + + if err := db. + Raw(sqlQuery.String(), []string{"memory", "cpu"}). + Scan(&rows).Error; err != nil { + return err + } + + for _, row := range rows { + hostname, _, err := host.ParseHostAndPortFromAddress(row.Instance) + if err != nil { + continue + } + if _, ok := m[hostname]; !ok { + m[hostname] = NewHostInfo(hostname) + } + + switch { + case row.DeviceType == "memory" && row.DeviceName == "virtual": + if m[hostname].MemoryUsage != nil { + continue + } + var v clusterLoadMemoryVirtualModel + err := json.Unmarshal([]byte(row.JSONValue), &v) + if err != nil { + continue + } + m[hostname].MemoryUsage = &MemoryUsageInfo{ + Used: v.Used, + Total: v.Total, + } + case row.DeviceType == "cpu" && row.DeviceName == "usage": + if m[hostname].CPUUsage != nil { + continue + } + var v clusterLoadCPUUsageModel + err := json.Unmarshal([]byte(row.JSONValue), &v) + if err != nil { + continue + } + m[hostname].CPUUsage = &CPUUsageInfo{ + Idle: v.Idle, + System: v.System, + } + } + } + return nil +} diff --git a/pkg/apiserver/clusterinfo/hostinfo/hostinfo.go b/pkg/apiserver/clusterinfo/hostinfo/hostinfo.go new file mode 100644 index 0000000000..97a2d81dd7 --- /dev/null +++ b/pkg/apiserver/clusterinfo/hostinfo/hostinfo.go @@ -0,0 +1,95 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package hostinfo + +import "text/template" + +type CPUUsageInfo struct { + Idle float64 `json:"idle"` + System float64 `json:"system"` +} + +type MemoryUsageInfo struct { + Used int `json:"used"` + Total int `json:"total"` +} + +type CPUInfo struct { + LogicalCores int `json:"logical_cores"` + PhysicalCores int `json:"physical_cores"` + // TODO: Support arch. +} + +type PartitionInfo struct { + Path string `json:"path"` + FSType string `json:"fstype"` + Free int `json:"free"` + Total int `json:"total"` +} + +type InstanceInfo struct { + Type string `json:"type"` + PartitionPathL string `json:"partition_path_lower"` +} + +type Info struct { + Host string `json:"host"` + CPUInfo *CPUInfo `json:"cpu_info"` + CPUUsage *CPUUsageInfo `json:"cpu_usage"` + MemoryUsage *MemoryUsageInfo `json:"memory_usage"` + + // Containing unused partitions. The key is path in lower case. + // Note: deviceName is not used as the key, since TiDB and TiKV may return different deviceName for the same device. + Partitions map[string]*PartitionInfo `json:"partitions"` + // The source instance type that provides the partition info. + PartitionProviderType string `json:"-"` + + // Instances in the current host. The key is instance address + Instances map[string]*InstanceInfo `json:"instances"` +} + +type InfoMap = map[string]*Info + +var clusterTableQueryTemplate = template.Must(template.New("").Parse(` +SELECT + *, + FIELD(LOWER(A.TYPE), 'tiflash', 'tikv', 'pd', 'tidb') AS _ORDER +FROM ( + SELECT + TYPE, INSTANCE, DEVICE_TYPE, DEVICE_NAME, JSON_OBJECTAGG(NAME, VALUE) AS JSON_VALUE + FROM + {{.tableName}} + WHERE + DEVICE_TYPE IN (?) + GROUP BY TYPE, INSTANCE, DEVICE_TYPE, DEVICE_NAME +) AS A +ORDER BY + _ORDER DESC, INSTANCE, DEVICE_TYPE, DEVICE_NAME +`)) + +type clusterTableModel struct { + Type string `gorm:"column:TYPE"` // Example: tidb, tikv + Instance string `gorm:"column:INSTANCE"` // Example: 127.0.0.1:4000 + DeviceType string `gorm:"column:DEVICE_TYPE"` // Example: cpu + DeviceName string `gorm:"column:DEVICE_NAME"` // Example: usage + JSONValue string `gorm:"column:JSON_VALUE"` // Only exists by using `clusterTableQueryTemplate`. +} + +func NewHostInfo(hostname string) *Info { + return &Info{ + Host: hostname, + Partitions: make(map[string]*PartitionInfo), + Instances: make(map[string]*InstanceInfo), + } +} diff --git a/pkg/apiserver/clusterinfo/service.go b/pkg/apiserver/clusterinfo/service.go index 312a479eff..a8fe7506cd 100644 --- a/pkg/apiserver/clusterinfo/service.go +++ b/pkg/apiserver/clusterinfo/service.go @@ -20,7 +20,6 @@ import ( "context" "fmt" "net/http" - "sort" "sync" "time" @@ -28,6 +27,7 @@ import ( "go.etcd.io/etcd/clientv3" "go.uber.org/fx" + "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/clusterinfo/hostinfo" "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/user" "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/utils" "github.com/pingcap-incubator/tidb-dashboard/pkg/httpc" @@ -77,6 +77,7 @@ func RegisterRouter(r *gin.RouterGroup, auth *user.AuthService, s *Service) { endpoint.Use(auth.MWAuthRequired()) endpoint.Use(utils.MWConnectTiDB(s.params.TiDBClient)) endpoint.GET("/all", s.getHostsInfo) + endpoint.GET("/statistics", s.getStatistics) } // @Summary Hide a TiDB instance @@ -233,79 +234,49 @@ func (s *Service) getAlertManagerCounts(c *gin.Context) { c.JSON(http.StatusOK, cnt) } -// @ID getHostsInfo +type GetHostsInfoResponse struct { + Hosts []*hostinfo.Info `json:"hosts"` + Warning *utils.APIError `json:"warning"` +} + +// @ID clusterInfoGetHostsInfo // @Summary Get information of all hosts -// @Description Get information about host in the cluster -// @Success 200 {array} HostInfo // @Router /host/all [get] // @Security JwtAuth +// @Success 200 {object} GetHostsInfoResponse // @Failure 401 {object} utils.APIError "Unauthorized failure" func (s *Service) getHostsInfo(c *gin.Context) { db := utils.GetTiDBConnection(c) - allHostsMap, err := s.fetchAllInstanceHostsMap() - if err != nil { - _ = c.Error(err) - return - } - hostsInfo, err := GetAllHostInfo(db) - if err != nil { + info, err := s.fetchAllHostsInfo(db) + if err != nil && info == nil { _ = c.Error(err) return } - hostsInfoMap := make(map[string]HostInfo) - for _, hi := range hostsInfo { - hostsInfoMap[hi.IP] = hi - } - - hiList := make([]HostInfo, 0, len(hostsInfo)) - for hostIP := range allHostsMap { - if hi, ok := hostsInfoMap[hostIP]; ok { - hiList = append(hiList, hi) - } else { - hiList = append(hiList, HostInfo{ - IP: hostIP, - Unavailable: true, - }) - } + var warning *utils.APIError + if err != nil { + warning = utils.NewAPIError(err) } - sort.Slice(hiList, func(i, j int) bool { - return hiList[i].IP < hiList[j].IP + c.JSON(http.StatusOK, GetHostsInfoResponse{ + Hosts: info, + Warning: warning, }) - - c.JSON(http.StatusOK, hiList) } -func (s *Service) fetchAllInstanceHostsMap() (map[string]struct{}, error) { - allHosts := make(map[string]struct{}) - pdInfo, err := topology.FetchPDTopology(s.params.PDClient) - if err != nil { - return nil, err - } - for _, i := range pdInfo { - allHosts[i.IP] = struct{}{} - } - - tikvInfo, tiFlashInfo, err := topology.FetchStoreTopology(s.params.PDClient) - if err != nil { - return nil, err - } - for _, i := range tikvInfo { - allHosts[i.IP] = struct{}{} - } - for _, i := range tiFlashInfo { - allHosts[i.IP] = struct{}{} - } - - tidbInfo, err := topology.FetchTiDBTopology(s.lifecycleCtx, s.params.EtcdClient) +// @ID clusterInfoGetStatistics +// @Summary Get cluster statistics +// @Router /host/statistics [get] +// @Security JwtAuth +// @Success 200 {object} ClusterStatistics +// @Failure 401 {object} utils.APIError "Unauthorized failure" +func (s *Service) getStatistics(c *gin.Context) { + db := utils.GetTiDBConnection(c) + stats, err := s.calculateStatistics(db) if err != nil { - return nil, err - } - for _, i := range tidbInfo { - allHosts[i.IP] = struct{}{} + _ = c.Error(err) + return } - - return allHosts, nil + c.JSON(http.StatusOK, stats) } diff --git a/pkg/apiserver/clusterinfo/statistics.go b/pkg/apiserver/clusterinfo/statistics.go new file mode 100644 index 0000000000..a57fd40fb4 --- /dev/null +++ b/pkg/apiserver/clusterinfo/statistics.go @@ -0,0 +1,191 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package clusterinfo + +import ( + "fmt" + "sort" + + "github.com/jinzhu/gorm" + "github.com/thoas/go-funk" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/apiserver/clusterinfo/hostinfo" + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/topology" +) + +type ClusterStatisticsPartial struct { + NumberOfHosts int `json:"number_of_hosts"` + NumberOfInstances int `json:"number_of_instances"` + TotalMemoryCapacityBytes int `json:"total_memory_capacity_bytes"` + TotalPhysicalCores int `json:"total_physical_cores"` + TotalLogicalCores int `json:"total_logical_cores"` +} + +type ClusterStatistics struct { + ProbeFailureHosts int `json:"probe_failure_hosts"` + Versions []string `json:"versions"` + TotalStats *ClusterStatisticsPartial `json:"total_stats"` + StatsByInstanceKind map[string]*ClusterStatisticsPartial `json:"stats_by_instance_kind"` +} + +type instanceKindHostImmediateInfo struct { + memoryCapacity int + physicalCores int + logicalCores int +} + +type instanceKindImmediateInfo struct { + instances map[string]struct{} + hosts map[string]*instanceKindHostImmediateInfo +} + +func newInstanceKindImmediateInfo() *instanceKindImmediateInfo { + return &instanceKindImmediateInfo{ + instances: make(map[string]struct{}), + hosts: make(map[string]*instanceKindHostImmediateInfo), + } +} + +func sumInt(array []int) int { + result := 0 + for _, v := range array { + result += v + } + return result +} + +func (info *instanceKindImmediateInfo) ToResult() *ClusterStatisticsPartial { + return &ClusterStatisticsPartial{ + NumberOfHosts: len(funk.Keys(info.hosts).([]string)), + NumberOfInstances: len(funk.Keys(info.instances).([]string)), + TotalMemoryCapacityBytes: sumInt(funk.Map(funk.Values(info.hosts), func(x *instanceKindHostImmediateInfo) int { return x.memoryCapacity }).([]int)), + TotalPhysicalCores: sumInt(funk.Map(funk.Values(info.hosts), func(x *instanceKindHostImmediateInfo) int { return x.physicalCores }).([]int)), + TotalLogicalCores: sumInt(funk.Map(funk.Values(info.hosts), func(x *instanceKindHostImmediateInfo) int { return x.logicalCores }).([]int)), + } +} + +func (s *Service) calculateStatistics(db *gorm.DB) (*ClusterStatistics, error) { + globalHostsSet := make(map[string]struct{}) + globalFailureHostsSet := make(map[string]struct{}) + globalVersionsSet := make(map[string]struct{}) + globalInfo := newInstanceKindImmediateInfo() + infoByIk := make(map[string]*instanceKindImmediateInfo) + infoByIk["pd"] = newInstanceKindImmediateInfo() + infoByIk["tidb"] = newInstanceKindImmediateInfo() + infoByIk["tikv"] = newInstanceKindImmediateInfo() + infoByIk["tiflash"] = newInstanceKindImmediateInfo() + + // Fill from topology info + pdInfo, err := topology.FetchPDTopology(s.params.PDClient) + if err != nil { + return nil, err + } + for _, i := range pdInfo { + globalHostsSet[i.IP] = struct{}{} + globalVersionsSet[i.Version] = struct{}{} + globalInfo.instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + infoByIk["pd"].instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + } + tikvInfo, tiFlashInfo, err := topology.FetchStoreTopology(s.params.PDClient) + if err != nil { + return nil, err + } + for _, i := range tikvInfo { + globalHostsSet[i.IP] = struct{}{} + globalVersionsSet[i.Version] = struct{}{} + globalInfo.instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + infoByIk["tikv"].instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + } + for _, i := range tiFlashInfo { + globalHostsSet[i.IP] = struct{}{} + globalVersionsSet[i.Version] = struct{}{} + globalInfo.instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + infoByIk["tiflash"].instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + } + tidbInfo, err := topology.FetchTiDBTopology(s.lifecycleCtx, s.params.EtcdClient) + if err != nil { + return nil, err + } + for _, i := range tidbInfo { + globalHostsSet[i.IP] = struct{}{} + globalVersionsSet[i.Version] = struct{}{} + globalInfo.instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + infoByIk["tidb"].instances[fmt.Sprintf("%s:%d", i.IP, i.Port)] = struct{}{} + } + + // Fill from hardware info + allHostsInfoMap := make(map[string]*hostinfo.Info) + if e := hostinfo.FillFromClusterLoadTable(db, allHostsInfoMap); e != nil { + return nil, err + } + if e := hostinfo.FillFromClusterHardwareTable(db, allHostsInfoMap); e != nil { + return nil, err + } + for host, hi := range allHostsInfoMap { + if hi.MemoryUsage.Total > 0 && hi.CPUInfo.PhysicalCores > 0 && hi.CPUInfo.LogicalCores > 0 { + // Put success host info into `globalInfo.hosts`. + globalInfo.hosts[host] = &instanceKindHostImmediateInfo{ + memoryCapacity: hi.MemoryUsage.Total, + physicalCores: hi.CPUInfo.PhysicalCores, + logicalCores: hi.CPUInfo.LogicalCores, + } + } + } + + // Fill hosts in each instance kind according to the global hosts info + for _, i := range pdInfo { + if v, ok := globalInfo.hosts[i.IP]; ok { + infoByIk["pd"].hosts[i.IP] = v + } else { + globalFailureHostsSet[i.IP] = struct{}{} + } + } + for _, i := range tikvInfo { + if v, ok := globalInfo.hosts[i.IP]; ok { + infoByIk["tikv"].hosts[i.IP] = v + } else { + globalFailureHostsSet[i.IP] = struct{}{} + } + } + for _, i := range tiFlashInfo { + if v, ok := globalInfo.hosts[i.IP]; ok { + infoByIk["tiflash"].hosts[i.IP] = v + } else { + globalFailureHostsSet[i.IP] = struct{}{} + } + } + for _, i := range tidbInfo { + if v, ok := globalInfo.hosts[i.IP]; ok { + infoByIk["tidb"].hosts[i.IP] = v + } else { + globalFailureHostsSet[i.IP] = struct{}{} + } + } + + // Generate result.. + versions := funk.Keys(globalVersionsSet).([]string) + sort.Strings(versions) + + statsByIk := make(map[string]*ClusterStatisticsPartial) + for ik, info := range infoByIk { + statsByIk[ik] = info.ToResult() + } + + return &ClusterStatistics{ + ProbeFailureHosts: len(funk.Keys(globalFailureHostsSet).([]string)), + Versions: versions, + TotalStats: globalInfo.ToResult(), + StatsByInstanceKind: statsByIk, + }, nil +} diff --git a/pkg/utils/host/host.go b/pkg/utils/host/host.go new file mode 100644 index 0000000000..8d4ab16e4a --- /dev/null +++ b/pkg/utils/host/host.go @@ -0,0 +1,49 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package host + +import ( + "fmt" + "net" + "net/url" + "strconv" + "strings" +) + +// address should be like "ip:port" as "127.0.0.1:2379". +// return error if string is not like "ip:port". +func ParseHostAndPortFromAddress(address string) (string, uint, error) { + host, port, err := net.SplitHostPort(address) + if err != nil { + return "", 0, fmt.Errorf("invalid address: %v", err) + } + portNumeric, err := strconv.Atoi(port) + if err != nil || portNumeric == 0 { + return "", 0, fmt.Errorf("invalid address: invalid port") + } + return strings.ToLower(host), uint(portNumeric), nil +} + +// address should be like "protocol://ip:port" as "http://127.0.0.1:2379". +func ParseHostAndPortFromAddressURL(urlString string) (string, uint, error) { + u, err := url.Parse(urlString) + if err != nil { + return "", 0, fmt.Errorf("invalid address: %v", err) + } + port, err := strconv.Atoi(u.Port()) + if err != nil || port == 0 { + return "", 0, fmt.Errorf("invalid address: invalid port") + } + return strings.ToLower(u.Hostname()), uint(port), nil +} diff --git a/pkg/utils/topology/pd.go b/pkg/utils/topology/pd.go index eadf6e8777..94a38abb6c 100644 --- a/pkg/utils/topology/pd.go +++ b/pkg/utils/topology/pd.go @@ -22,6 +22,7 @@ import ( "go.uber.org/zap" "github.com/pingcap-incubator/tidb-dashboard/pkg/pd" + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/host" ) func FetchPDTopology(pdClient *pd.Client) ([]PDInfo, error) { @@ -53,7 +54,7 @@ func FetchPDTopology(pdClient *pd.Client) ([]PDInfo, error) { for _, ds := range ds.Members { u := ds.ClientUrls[0] - host, port, err := parseHostAndPortFromAddressURL(u) + hostname, port, err := host.ParseHostAndPortFromAddressURL(u) if err != nil { continue } @@ -74,7 +75,7 @@ func FetchPDTopology(pdClient *pd.Client) ([]PDInfo, error) { nodes = append(nodes, PDInfo{ GitHash: ds.GitHash, Version: ds.BinaryVersion, - IP: host, + IP: hostname, Port: port, DeployPath: ds.DeployPath, Status: storeStatus, diff --git a/pkg/utils/topology/store.go b/pkg/utils/topology/store.go index 1750fe3a63..9d700736f4 100644 --- a/pkg/utils/topology/store.go +++ b/pkg/utils/topology/store.go @@ -22,6 +22,7 @@ import ( "go.uber.org/zap" "github.com/pingcap-incubator/tidb-dashboard/pkg/pd" + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/host" ) // FetchStoreTopology returns TiKV info and TiFlash info. @@ -84,12 +85,12 @@ func FetchStoreLocation(pdClient *pd.Client) (*StoreLocation, error) { func buildStoreTopology(stores []store) []StoreInfo { nodes := make([]StoreInfo, 0, len(stores)) for _, v := range stores { - host, port, err := parseHostAndPortFromAddress(v.Address) + hostname, port, err := host.ParseHostAndPortFromAddress(v.Address) if err != nil { log.Warn("Failed to parse store address", zap.Any("store", v)) continue } - _, statusPort, err := parseHostAndPortFromAddress(v.StatusAddress) + _, statusPort, err := host.ParseHostAndPortFromAddress(v.StatusAddress) if err != nil { log.Warn("Failed to parse store status address", zap.Any("store", v)) continue @@ -102,7 +103,7 @@ func buildStoreTopology(stores []store) []StoreInfo { } node := StoreInfo{ Version: version, - IP: host, + IP: hostname, Port: port, GitHash: v.GitHash, DeployPath: v.DeployPath, diff --git a/pkg/utils/topology/tidb.go b/pkg/utils/topology/tidb.go index ed8f8fc8d8..ebab743d8d 100644 --- a/pkg/utils/topology/tidb.go +++ b/pkg/utils/topology/tidb.go @@ -24,6 +24,8 @@ import ( "github.com/pingcap/log" "go.etcd.io/etcd/clientv3" "go.uber.org/zap" + + "github.com/pingcap-incubator/tidb-dashboard/pkg/utils/host" ) const tidbTopologyKeyPrefix = "/topology/tidb/" @@ -114,7 +116,7 @@ func parseTiDBInfo(address string, value []byte) (*TiDBInfo, error) { if err != nil { return nil, ErrInvalidTopologyData.Wrap(err, "TiDB info unmarshal failed") } - host, port, err := parseHostAndPortFromAddress(address) + hostname, port, err := host.ParseHostAndPortFromAddress(address) if err != nil { return nil, ErrInvalidTopologyData.Wrap(err, "TiDB info address parse failed") } @@ -122,7 +124,7 @@ func parseTiDBInfo(address string, value []byte) (*TiDBInfo, error) { return &TiDBInfo{ GitHash: ds.GitHash, Version: ds.Version, - IP: host, + IP: hostname, Port: port, DeployPath: ds.DeployPath, Status: ComponentStatusUnreachable, diff --git a/pkg/utils/topology/topology.go b/pkg/utils/topology/topology.go index d3b904750a..60cee5669f 100644 --- a/pkg/utils/topology/topology.go +++ b/pkg/utils/topology/topology.go @@ -16,10 +16,6 @@ package topology import ( "context" "encoding/json" - "fmt" - "net/url" - "strconv" - "strings" "time" "github.com/joomcode/errorx" @@ -36,33 +32,6 @@ var ( const defaultFetchTimeout = 2 * time.Second -// address should be like "ip:port" as "127.0.0.1:2379". -// return error if string is not like "ip:port". -func parseHostAndPortFromAddress(address string) (string, uint, error) { - addresses := strings.Split(address, ":") - if len(addresses) != 2 { - return "", 0, fmt.Errorf("invalid address %s", address) - } - port, err := strconv.Atoi(addresses[1]) - if err != nil { - return "", 0, err - } - return addresses[0], uint(port), nil -} - -// address should be like "protocol://ip:port" as "http://127.0.0.1:2379". -func parseHostAndPortFromAddressURL(urlString string) (string, uint, error) { - u, err := url.Parse(urlString) - if err != nil { - return "", 0, err - } - port, err := strconv.Atoi(u.Port()) - if err != nil { - return "", 0, err - } - return u.Hostname(), uint(port), nil -} - func fetchStandardComponentTopology(ctx context.Context, componentName string, etcdClient *clientv3.Client) (*StandardComponentInfo, error) { ctx2, cancel := context.WithTimeout(ctx, defaultFetchTimeout) defer cancel() diff --git a/ui/lib/apps/ClusterInfo/components/DiskTable.tsx b/ui/lib/apps/ClusterInfo/components/DiskTable.tsx new file mode 100644 index 0000000000..e7a9be879b --- /dev/null +++ b/ui/lib/apps/ClusterInfo/components/DiskTable.tsx @@ -0,0 +1,212 @@ +import { Tooltip, Typography } from 'antd' +import React, { useMemo } from 'react' +import { useTranslation } from 'react-i18next' +import { getValueFormat } from '@baurine/grafana-value-formats' +import client, { HostinfoInfo, HostinfoPartitionInfo } from '@lib/client' +import { Bar, CardTable } from '@lib/components' +import { useClientRequest } from '@lib/utils/useClientRequest' +import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' +import { + InstanceKind, + InstanceKinds, + InstanceKindName, +} from '@lib/utils/instanceTable' +import { WarningOutlined } from '@ant-design/icons' + +interface IExpandedDiskItem extends HostinfoPartitionInfo { + key: string + host?: string + instancesCount: Record +} + +function expandDisksItems(rows: HostinfoInfo[]): IExpandedDiskItem[] { + const expanded: IExpandedDiskItem[] = [] + rows.forEach((row) => { + const instancesPerPartition: Record< + string, + Record + > = {} + + let partitions = 0 + + Object.values(row.instances ?? {}).forEach((i) => { + if (!i) { + return + } + if (!instancesPerPartition[i.partition_path_lower!]) { + instancesPerPartition[i.partition_path_lower!] = { + pd: 0, + tidb: 0, + tikv: 0, + tiflash: 0, + } + } + instancesPerPartition[i.partition_path_lower!][i.type!]++ + }) + + for (let pathL in row.partitions) { + const instancesCount = instancesPerPartition[pathL] + if (!instancesCount) { + // This partition does not have deployed instances, skip + continue + } + const partition = row.partitions[pathL] + expanded.push({ + key: `${row.host} ${pathL}`, + host: row.host, + instancesCount, + ...partition, + }) + partitions++ + } + + if (partitions === 0) { + // Supply dummy item.. + expanded.push({ + key: row.host ?? '', + host: row.host, + instancesCount: { + pd: 0, + tidb: 0, + tikv: 0, + tiflash: 0, + }, + }) + } + }) + return expanded +} + +export default function HostTable() { + const { t } = useTranslation() + + const { data, isLoading, error } = useClientRequest((reqConfig) => + client.getInstance().clusterInfoGetHostsInfo(reqConfig) + ) + + const diskData = useMemo(() => expandDisksItems(data?.hosts ?? []), [data]) + + const columns: IColumn[] = useMemo( + () => [ + { + name: t('cluster_info.list.disk_table.columns.host'), + key: 'host', + minWidth: 100, + maxWidth: 150, + onRender: (row: IExpandedDiskItem) => { + if (!row.free) { + return ( + + + {row.host} + + + ) + } + return ( + + {row.host} + + ) + }, + }, + { + name: t('cluster_info.list.disk_table.columns.mount_dir'), + key: 'mount_dir', + minWidth: 150, + maxWidth: 200, + onRender: (row: IExpandedDiskItem) => { + if (!row.path) { + return + } + return ( + + {row.path} + + ) + }, + }, + { + name: t('cluster_info.list.disk_table.columns.fs'), + key: 'fs', + minWidth: 50, + maxWidth: 100, + onRender: (row: IExpandedDiskItem) => { + return row.fstype?.toUpperCase() ?? '' + }, + }, + { + name: t('cluster_info.list.disk_table.columns.disk_size'), + key: 'disk_size', + minWidth: 60, + maxWidth: 100, + onRender: (row: IExpandedDiskItem) => { + if (!row.total) { + return + } + return getValueFormat('bytes')(row.total, 1) + }, + }, + { + name: t('cluster_info.list.disk_table.columns.disk_usage'), + key: 'disk_usage', + minWidth: 100, + maxWidth: 150, + onRender: (row: IExpandedDiskItem) => { + if (!row.total || !row.free) { + return + } + const total = row.total + const free = row.free + const used = total - free + const usedPercent = (used / total).toFixed(3) + const tooltipContent = ( + + Used: {getValueFormat('bytes')(used, 1)} ( + {getValueFormat('percentunit')(+usedPercent, 1)}) + + ) + return ( + + + + ) + }, + }, + { + name: t('cluster_info.list.disk_table.columns.instances'), + key: 'instances', + minWidth: 100, + maxWidth: 200, + onRender: (row: IExpandedDiskItem) => { + const item = InstanceKinds.map((ik) => { + if (row.instancesCount[ik] > 0) { + return `${row.instancesCount[ik]} ${InstanceKindName[ik]}` + } else { + return '' + } + }) + const content = item.filter((v) => v.length > 0).join(', ') + return ( + + {content} + + ) + }, + }, + ], + [t] + ) + + return ( + + ) +} diff --git a/ui/lib/apps/ClusterInfo/components/HostTable.tsx b/ui/lib/apps/ClusterInfo/components/HostTable.tsx index 634dbd996a..49cbfc9d5f 100644 --- a/ui/lib/apps/ClusterInfo/components/HostTable.tsx +++ b/ui/lib/apps/ClusterInfo/components/HostTable.tsx @@ -1,212 +1,200 @@ import { Tooltip, Typography } from 'antd' -import React from 'react' +import React, { useMemo } from 'react' import { useTranslation } from 'react-i18next' import { red } from '@ant-design/colors' -import { WarningOutlined } from '@ant-design/icons' import { getValueFormat } from '@baurine/grafana-value-formats' - -import client from '@lib/client' +import client, { HostinfoInfo } from '@lib/client' import { Bar, CardTable, Pre } from '@lib/components' import { useClientRequest } from '@lib/utils/useClientRequest' +import { IColumn } from 'office-ui-fabric-react/lib/DetailsList' +import { + InstanceKind, + InstanceKinds, + InstanceKindName, +} from '@lib/utils/instanceTable' +import { WarningOutlined } from '@ant-design/icons' -const { Text } = Typography +interface IExpandedHostItem extends HostinfoInfo { + key: string + instancesCount: Record +} -function filterUniquePartitions(items) { - return items.filter( - (x, i, a) => a.findIndex((y) => y.partition.path === x.partition.path) === i - ) +function expandHostItems(rows: HostinfoInfo[]): IExpandedHostItem[] { + const expanded: IExpandedHostItem[] = [] + rows.forEach((row) => { + const instancesCount: Record = { + pd: 0, + tidb: 0, + tikv: 0, + tiflash: 0, + } + + Object.values(row.instances ?? {}).forEach((i) => { + if (!i) { + return + } + instancesCount[i.type!]++ + }) + + expanded.push({ + key: row.host ?? '', + instancesCount, + ...row, + }) + }) + return expanded } export default function HostTable() { const { t } = useTranslation() - const { data: tableData, isLoading, error } = useClientRequest((reqConfig) => - client.getInstance().getHostsInfo(reqConfig) + const { data, isLoading, error } = useClientRequest((reqConfig) => + client.getInstance().clusterInfoGetHostsInfo(reqConfig) ) - const columns = [ - { - name: t('cluster_info.list.host_table.columns.ip'), - key: 'ip', - minWidth: 100, - maxWidth: 150, - onRender: ({ ip, unavailable }) => { - if (unavailable) { + const hostData = useMemo(() => expandHostItems(data?.hosts ?? []), [data]) + + const columns: IColumn[] = useMemo( + () => [ + { + name: t('cluster_info.list.host_table.columns.host'), + key: 'host', + minWidth: 100, + maxWidth: 150, + onRender: (row: IExpandedHostItem) => { + if (!row.cpu_info) { + // We assume that CPU info must be successfully retrieved. + return ( + + + {row.host} + + + ) + } return ( - - - {ip} - + + {row.host} ) - } - return ip + }, }, - }, - { - name: t('cluster_info.list.host_table.columns.cpu'), - key: 'cpu_core', - minWidth: 60, - maxWidth: 100, - onRender: ({ cpu_core }) => - cpu_core !== undefined ? `${cpu_core} vCPU` : '', - }, - { - name: t('cluster_info.list.host_table.columns.cpu_usage'), - key: 'cpu_usage', - minWidth: 100, - maxWidth: 150, - onRender: ({ cpu_usage }) => { - if (cpu_usage === undefined) { - return - } - const { system, idle } = cpu_usage - const user = 1 - system - idle - const tooltipContent = ` -User: ${getValueFormat('percentunit')(user)} -System: ${getValueFormat('percentunit')(system)}` - return ( - {tooltipContent.trim()}}> - - - ) - }, - }, - { - name: t('cluster_info.list.host_table.columns.memory'), - key: 'memory', - minWidth: 60, - maxWidth: 100, - onRender: ({ memory }) => - memory !== undefined ? getValueFormat('bytes')(memory.total, 1) : '', - }, - { - name: t('cluster_info.list.host_table.columns.memory_usage'), - key: 'memory_usage', - minWidth: 100, - maxWidth: 150, - onRender: ({ memory }) => { - if (memory === undefined) { - return - } - const { total, used } = memory - const usedPercent = (used / total).toFixed(3) - const title = ( -
- Used: {getValueFormat('bytes')(used, 1)} ( - {getValueFormat('percentunit')(+usedPercent, 1)}) -
- ) - return ( - - - - ) - }, - }, - { - name: t('cluster_info.list.host_table.columns.deploy'), - key: 'deploy', - minWidth: 100, - maxWidth: 200, - onRender: ({ partitions }) => { - if (partitions === undefined || partitions.length === 0) { - return - } - const serverTotal = { - tidb: 0, - tikv: 0, - pd: 0, - tiflash: 0, - } - return filterUniquePartitions(partitions).map((partition, i) => { - const currentMountPoint = partition.partition.path - partitions.forEach((item) => { - if (item.partition.path !== currentMountPoint) { - return - } - serverTotal[item.instance.server_type]++ - }) - const serverInfos: string[] = [] - if (serverTotal.tidb > 0) { - serverInfos.push(`${serverTotal.tidb} TiDB`) - } - if (serverTotal.tikv > 0) { - serverInfos.push(`${serverTotal.tikv} TiKV`) - } - if (serverTotal.pd > 0) { - serverInfos.push(`${serverTotal.pd} PD`) + { + name: t('cluster_info.list.host_table.columns.cpu'), + key: 'cpu', + minWidth: 100, + maxWidth: 150, + onRender: (row: IExpandedHostItem) => { + const { cpu_info: c } = row + if (!c) { + return } - if (serverTotal.tiflash > 0) { - serverInfos.push(`${serverTotal.tiflash} TiFlash`) - } - const content = `${serverInfos.join( - ',' - )}: ${partition.partition.fstype.toUpperCase()} ${currentMountPoint}` + const tooltipContent = ` +Physical Cores: ${c.physical_cores} +Logical Cores: ${c.logical_cores}` return ( - -
{content}
+ {tooltipContent.trim()}}> + {`${c.physical_cores!} (${c.logical_cores!} vCore)`} ) - }) + }, }, - }, - { - name: t('cluster_info.list.host_table.columns.disk_size'), - key: 'disk_size', - minWidth: 80, - maxWidth: 100, - onRender: ({ partitions }) => { - if (partitions === undefined || partitions.length === 0) { - return - } - return filterUniquePartitions(partitions).map((partiton, i) => { + { + name: t('cluster_info.list.host_table.columns.cpu_usage'), + key: 'cpu_usage', + minWidth: 100, + maxWidth: 150, + onRender: (row: IExpandedHostItem) => { + if (!row.cpu_usage) { + return + } + const system = row.cpu_usage.system ?? 0 + const idle = row.cpu_usage.idle ?? 1 + const user = 1 - system - idle + const tooltipContent = ` +User: ${getValueFormat('percentunit')(user)} +System: ${getValueFormat('percentunit')(system)}` return ( -
- {getValueFormat('bytes')(partiton.partition.total, 1)} -
+ {tooltipContent.trim()}}> + + ) - }) + }, + }, + { + name: t('cluster_info.list.host_table.columns.memory'), + key: 'memory', + minWidth: 60, + maxWidth: 100, + onRender: (row: IExpandedHostItem) => { + if (!row.memory_usage) { + return + } + return getValueFormat('bytes')(row.memory_usage.total ?? 0, 1) + }, }, - }, - { - name: t('cluster_info.list.host_table.columns.disk_usage'), - key: 'disk_usage', - minWidth: 100, - maxWidth: 150, - onRender: ({ partitions }) => { - if (partitions === undefined || partitions.length === 0) { - return - } - return filterUniquePartitions(partitions).map((partiton, i) => { - const { total, free } = partiton.partition - const used = total - free - const usedPercent = (used / total).toFixed(3) + { + name: t('cluster_info.list.host_table.columns.memory_usage'), + key: 'memory_usage', + minWidth: 100, + maxWidth: 150, + onRender: (row: IExpandedHostItem) => { + if (!row.memory_usage) { + return + } + const { total, used } = row.memory_usage + const usedPercent = (used! / total!).toFixed(3) const title = (
- Used: {getValueFormat('bytes')(used, 1)} ( + Used: {getValueFormat('bytes')(used!, 1)} ( {getValueFormat('percentunit')(+usedPercent, 1)})
) return ( - - + + + + ) + }, + }, + { + name: t('cluster_info.list.host_table.columns.instances'), + key: 'instances', + minWidth: 100, + maxWidth: 200, + onRender: (row: IExpandedHostItem) => { + const item = InstanceKinds.map((ik) => { + if (row.instancesCount[ik] > 0) { + return `${row.instancesCount[ik]} ${InstanceKindName[ik]}` + } else { + return '' + } + }) + const content = item.filter((v) => v.length > 0).join(', ') + return ( + + {content} ) - }) + }, }, - }, - ] + ], + [t] + ) return ( ) } diff --git a/ui/lib/apps/ClusterInfo/components/Statistics.module.less b/ui/lib/apps/ClusterInfo/components/Statistics.module.less new file mode 100644 index 0000000000..6372349792 --- /dev/null +++ b/ui/lib/apps/ClusterInfo/components/Statistics.module.less @@ -0,0 +1,8 @@ +@import '~antd/es/style/themes/default.less'; + +// FIXME: We should not provide padding for CardTab content, so that user +// can control whether a padding is needed. For example, to a . +.content { + margin-left: -@padding-page; + margin-right: -@padding-page; +} diff --git a/ui/lib/apps/ClusterInfo/components/Statistics.tsx b/ui/lib/apps/ClusterInfo/components/Statistics.tsx new file mode 100644 index 0000000000..0d8c99693a --- /dev/null +++ b/ui/lib/apps/ClusterInfo/components/Statistics.tsx @@ -0,0 +1,102 @@ +import React from 'react' +import { useClientRequest } from '@lib/utils/useClientRequest' +import client, { ClusterinfoClusterStatisticsPartial } from '@lib/client' +import { AnimatedSkeleton, ErrorBar, Descriptions, Card } from '@lib/components' +import { useTranslation } from 'react-i18next' +import { getValueFormat } from '@baurine/grafana-value-formats' +import { Alert } from 'antd' + +import styles from './Statistics.module.less' +import { InstanceKinds, InstanceKindName } from '@lib/utils/instanceTable' + +function PartialInfo({ data }: { data?: ClusterinfoClusterStatisticsPartial }) { + const { t } = useTranslation() + return ( + + + {data?.number_of_instances ?? 'Unknown'} + + + {data?.number_of_hosts ?? 'Unknown'} + + + {getValueFormat('bytes')(data?.total_memory_capacity_bytes ?? 0, 1)} + + + {data?.total_physical_cores ?? 'Unknown'} + + + {data?.total_logical_cores ?? 'Unknown'} + + + ) +} + +export default function Statistics() { + const { data, isLoading, error } = useClientRequest((reqConfig) => + client.getInstance().clusterInfoGetStatistics(reqConfig) + ) + const { t } = useTranslation() + + return ( + + {error && } + {data && ( +
+ {(data.probe_failure_hosts ?? 0) > 0 && ( + + + + )} + + + + {(data.versions ?? []).join(', ')} + + + + + + + + {InstanceKinds.map((ik) => { + return ( + + + + ) + })} +
+ )} +
+ ) +} diff --git a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx index c4d80205a2..61b949b337 100644 --- a/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx +++ b/ui/lib/apps/ClusterInfo/components/StoreLocationTree/index.tsx @@ -7,6 +7,7 @@ import { QuestionCircleOutlined, } from '@ant-design/icons' import { Space, Tooltip } from 'antd' +import { cyan } from '@ant-design/colors' import { useTranslation } from 'react-i18next' export interface IStoreLocationProps { @@ -67,8 +68,7 @@ export default function StoreLocationTree({ const gLink = bound .append('g') .attr('fill', 'none') - .attr('stroke', '#555') - .attr('stroke-opacity', 0.4) + .attr('stroke', cyan[3]) .attr('stroke-width', 2) const gNode = bound .append('g') @@ -160,14 +160,15 @@ export default function StoreLocationTree({ nodeEnter .append('circle') - .attr('r', 6) - .attr('fill', (d: any) => (d._children ? '#ff4d4f' : '#3351ff')) - .attr('stroke-width', 10) + .attr('r', 8) + .attr('fill', '#fff') + .attr('stroke', (d: any) => (d._children ? cyan[5] : '#ddd')) + .attr('stroke-width', 3) nodeEnter .append('text') .attr('dy', '0.31em') - .attr('x', (d: any) => (d._children ? -8 : 8)) + .attr('x', (d: any) => (d._children ? -15 : 15)) .attr('text-anchor', (d: any) => (d._children ? 'end' : 'start')) .text(({ data: { name, value } }: any) => { if (value) { diff --git a/ui/lib/apps/ClusterInfo/pages/List.tsx b/ui/lib/apps/ClusterInfo/pages/List.tsx index b4ead3a335..4abb964c97 100644 --- a/ui/lib/apps/ClusterInfo/pages/List.tsx +++ b/ui/lib/apps/ClusterInfo/pages/List.tsx @@ -8,8 +8,10 @@ import { Card } from '@lib/components' import CardTabs from '@lib/components/CardTabs' import HostTable from '../components/HostTable' +import DiskTable from '../components/DiskTable' import InstanceTable from '../components/InstanceTable' import StoreLocation from '../components/StoreLocation' +import Statistics from '../components/Statistics' function renderTabBar(props, DefaultTabBar) { return ( @@ -47,12 +49,24 @@ export default function ListPage() { > + + + + + +
diff --git a/ui/lib/apps/ClusterInfo/translations/en.yaml b/ui/lib/apps/ClusterInfo/translations/en.yaml index fbd363c8ad..3465bf87b4 100644 --- a/ui/lib/apps/ClusterInfo/translations/en.yaml +++ b/ui/lib/apps/ClusterInfo/translations/en.yaml @@ -17,17 +17,37 @@ cluster_info: host_table: title: Hosts columns: - ip: Address + host: Host Address cpu: CPU cpu_usage: CPU Usage memory: Memory memory_usage: Memory Usage - deploy: Disk - disk_size: Disk Size + instances: Instances + instanceUnavailable: Host information is unavailable due to instances on the host is down + disk_table: + title: Disks + columns: + host: Host Address + mount_dir: Mount Directory + fs: File System + disk_size: Disk Capacity disk_usage: Disk Usage - instanceUnavailable: Host information is unknow due to instance unreachable + instances: Instances store_topology: title: Store Topology tooltip: You can also zoom in or out by pressing CTRL and scrolling mouse + statistics: + title: Statistics + summary_title: Cluster Summary + field: + version: Version + instances: '# Instances' + hosts: '# Hosts that instances deployed' + memory_capacity: Σ Memory capacity (of all hosts) + physical_cores: Σ CPU physical cores (of all hosts) + logical_cores: Σ CPU logical cores (of all hosts) + message: + instance_down: 'Some instances are down in {{n}} host(s) so that host related information may be inccurate.' + sub_statistics: Sub-statistics below are counted by instance kinds. The sum of host metrics in sub-statistics can be larger "Cluster Summary" when different instances are deployed in the same host. error: load: 'Load component {{comp}} error: {{cause}}' diff --git a/ui/lib/apps/ClusterInfo/translations/zh.yaml b/ui/lib/apps/ClusterInfo/translations/zh.yaml index e3e916b646..8a7deefa1c 100644 --- a/ui/lib/apps/ClusterInfo/translations/zh.yaml +++ b/ui/lib/apps/ClusterInfo/translations/zh.yaml @@ -17,17 +17,37 @@ cluster_info: host_table: title: 主机 columns: - ip: 主机地址 + host: 主机地址 cpu: CPU cpu_usage: CPU 使用率 memory: 物理内存 memory_usage: 内存使用率 - deploy: 部署磁盘 + instances: 实例 + instanceUnavailable: 由于该主机上没有实例存活,因此无法获取主机信息 + disk_table: + title: 磁盘 + columns: + host: 主机地址 + mount_dir: 磁盘挂载点 + fs: 文件系统 disk_size: 磁盘容量 disk_usage: 磁盘使用率 - instanceUnavailable: 获取该主机信息失败:无法访问实例 + instances: 实例 store_topology: title: 存储拓扑 tooltip: 按住 Ctrl 键并滑动鼠标滚轮可以缩放 + statistics: + title: 统计 + summary_title: 集群总计 + field: + version: 版本 + instances: 总实例数量 + hosts: 实例部署的总机器数量 + memory_capacity: 内存总量总和 (按实例部署的机器计算) + physical_cores: CPU 物理核心数总和 (按实例部署的机器计算) + logical_cores: CPU 逻辑核心数总和 (按实例部署的机器计算) + message: + instance_down: '由于有 {{n}} 台机器上的所有实例都未启动或无法访问,因此统计中关于机器的指标可能会不准确。' + sub_statistics: 子统计按不同实例类型分别计算。当一个机器上部署了不同类型实例时,以下子统计的机器指标累加起来会超过“集群总计”数量。 error: load: '加载组件 {{comp}} 失败: {{cause}}' From 5acbb9e932e39606e9dd055c1db6ba120ff97554 Mon Sep 17 00:00:00 2001 From: Wenxuan Date: Thu, 26 Nov 2020 17:09:06 +0800 Subject: [PATCH 25/29] ui: Open statement and slow log in new tab (#816) Signed-off-by: Breezewish --- .../apps/InstanceProfiling/pages/Detail.tsx | 1 + .../SearchLogs/components/SearchResult.tsx | 1 + .../SlowQuery/components/SlowQueriesTable.tsx | 23 ++++++----------- ui/lib/apps/SlowQuery/pages/Detail/index.tsx | 12 ++------- .../Statement/components/StatementsTable.tsx | 25 +++++++------------ ui/lib/apps/Statement/pages/Detail/index.tsx | 12 ++------- ui/lib/components/CardTable/index.tsx | 9 ++++++- 7 files changed, 31 insertions(+), 52 deletions(-) diff --git a/ui/lib/apps/InstanceProfiling/pages/Detail.tsx b/ui/lib/apps/InstanceProfiling/pages/Detail.tsx index 51cce5c1a1..74ec5fb2cb 100644 --- a/ui/lib/apps/InstanceProfiling/pages/Detail.tsx +++ b/ui/lib/apps/InstanceProfiling/pages/Detail.tsx @@ -149,6 +149,7 @@ export default function Page() { items={data?.tasks_status || []} errors={[error]} onRowClicked={handleRowClick} + hideLoadingWhenNotEmpty extendLastColumn />
diff --git a/ui/lib/apps/SearchLogs/components/SearchResult.tsx b/ui/lib/apps/SearchLogs/components/SearchResult.tsx index 8e3f0d513f..8c5db68609 100644 --- a/ui/lib/apps/SearchLogs/components/SearchResult.tsx +++ b/ui/lib/apps/SearchLogs/components/SearchResult.tsx @@ -152,6 +152,7 @@ export default function SearchResult({ patterns, taskGroupID, tasks }: Props) { items={logPreviews || []} onRenderRow={renderRow} extendLastColumn + hideLoadingWhenNotEmpty />
) diff --git a/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx b/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx index 6c195f845e..0f2070da76 100644 --- a/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx +++ b/ui/lib/apps/SlowQuery/components/SlowQueriesTable.tsx @@ -1,10 +1,6 @@ import { usePersistFn } from '@umijs/hooks' import React, { useCallback } from 'react' -import { useNavigate } from 'react-router-dom' - import { CardTable, ICardTableProps } from '@lib/components' -import openLink from '@lib/utils/openLink' - import DetailPage from '../pages/Detail' import { ISlowQueryTableController } from '../utils/useSlowQueryTableController' @@ -23,17 +19,14 @@ function SlowQueriesTable({ controller, ...restProps }: Props) { visibleColumnKeys, } = controller - const navigate = useNavigate() - const handleRowClick = usePersistFn( - (rec, _idx, ev: React.MouseEvent) => { - const qs = DetailPage.buildQuery({ - digest: rec.digest, - connectId: rec.connection_id, - timestamp: rec.timestamp, - }) - openLink(`/slow_query/detail?${qs}`, ev, navigate) - } - ) + const handleRowClick = usePersistFn((rec) => { + const qs = DetailPage.buildQuery({ + digest: rec.digest, + connectId: rec.connection_id, + timestamp: rec.timestamp, + }) + window.open(`#/slow_query/detail?${qs}`, '_blank') + }) const getKey = useCallback((row) => `${row.digest}_${row.timestamp}`, []) diff --git a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx index b0d513a54e..37320c5bbc 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/index.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/index.tsx @@ -1,8 +1,7 @@ import React from 'react' import { Space } from 'antd' import { useTranslation } from 'react-i18next' -import { Link, useLocation } from 'react-router-dom' -import { ArrowLeftOutlined } from '@ant-design/icons' +import { useLocation } from 'react-router-dom' import { useLocalStorageState } from '@umijs/hooks' import client from '@lib/client' @@ -68,14 +67,7 @@ function DetailPage() { return (
- - {t('slow_query.detail.head.back')} - - } - > + {error && } {!!data && ( diff --git a/ui/lib/apps/Statement/components/StatementsTable.tsx b/ui/lib/apps/Statement/components/StatementsTable.tsx index a2b7444c84..dac9176a30 100644 --- a/ui/lib/apps/Statement/components/StatementsTable.tsx +++ b/ui/lib/apps/Statement/components/StatementsTable.tsx @@ -1,10 +1,6 @@ import { usePersistFn } from '@umijs/hooks' import React, { useCallback } from 'react' -import { useNavigate } from 'react-router-dom' - import { CardTable, ICardTableProps } from '@lib/components' -import openLink from '@lib/utils/openLink' - import DetailPage from '../pages/Detail' import { IStatementTableController } from '../utils/useStatementTableController' @@ -24,18 +20,15 @@ export default function StatementsTable({ controller, ...restPrpos }: Props) { visibleColumnKeys, } = controller - const navigate = useNavigate() - const handleRowClick = usePersistFn( - (rec, _idx, ev: React.MouseEvent) => { - const qs = DetailPage.buildQuery({ - digest: rec.digest, - schema: rec.schema_name, - beginTime: begin_time, - endTime: end_time, - }) - openLink(`/statement/detail?${qs}`, ev, navigate) - } - ) + const handleRowClick = usePersistFn((rec) => { + const qs = DetailPage.buildQuery({ + digest: rec.digest, + schema: rec.schema_name, + beginTime: begin_time, + endTime: end_time, + }) + window.open(`#/statement/detail?${qs}`, '_blank') + }) const getKey = useCallback((row) => `${row.digest}_${row.schema_name}`, []) diff --git a/ui/lib/apps/Statement/pages/Detail/index.tsx b/ui/lib/apps/Statement/pages/Detail/index.tsx index a288b467c6..d950866a59 100644 --- a/ui/lib/apps/Statement/pages/Detail/index.tsx +++ b/ui/lib/apps/Statement/pages/Detail/index.tsx @@ -3,8 +3,7 @@ import { SelectionMode } from 'office-ui-fabric-react/lib/DetailsList' import { Selection } from 'office-ui-fabric-react/lib/Selection' import React, { useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' -import { Link, useLocation } from 'react-router-dom' -import { ArrowLeftOutlined } from '@ant-design/icons' +import { useLocation } from 'react-router-dom' import { useLocalStorageState } from '@umijs/hooks' import client, { StatementModel } from '@lib/client' @@ -76,14 +75,7 @@ function DetailPage() { return (
- - {t('statement.pages.detail.head.back')} - - } - > + {error && } {plans && plans.length > 0 && ( diff --git a/ui/lib/components/CardTable/index.tsx b/ui/lib/components/CardTable/index.tsx index 50beb9a080..6525af78dc 100644 --- a/ui/lib/components/CardTable/index.tsx +++ b/ui/lib/components/CardTable/index.tsx @@ -73,6 +73,7 @@ export interface ICardTableProps extends IDetailsListProps { className?: string style?: object loading?: boolean + hideLoadingWhenNotEmpty?: boolean // Whether loading animation should not show when data is not empty errors?: any[] cardExtra?: React.ReactNode @@ -133,6 +134,7 @@ export default function CardTable(props: ICardTableProps) { className, style, loading = false, + hideLoadingWhenNotEmpty, errors = [], cardExtra, cardNoMargin, @@ -218,7 +220,12 @@ export default function CardTable(props: ICardTableProps) { extra={cardExtra} > - +
Date: Thu, 26 Nov 2020 17:22:32 +0800 Subject: [PATCH 26/29] ui: add more time field for slow query detail page (#810) Signed-off-by: crazycs520 --- pkg/apiserver/slowquery/queries.go | 47 ++++++++------ .../SlowQuery/pages/Detail/DetailTabTime.tsx | 62 +++++++++++++++---- ui/lib/apps/SlowQuery/translations/en.yaml | 53 +++++++++++----- ui/lib/apps/SlowQuery/translations/zh.yaml | 56 ++++++++++++----- 4 files changed, 154 insertions(+), 64 deletions(-) diff --git a/pkg/apiserver/slowquery/queries.go b/pkg/apiserver/slowquery/queries.go index f200e80431..6b356cd4ed 100644 --- a/pkg/apiserver/slowquery/queries.go +++ b/pkg/apiserver/slowquery/queries.go @@ -37,11 +37,18 @@ type SlowQuery struct { ConnectionID string `gorm:"column:Conn_ID" json:"connection_id"` Success int `gorm:"column:Succ" json:"success"` - Timestamp float64 `gorm:"column:timestamp" proj:"(UNIX_TIMESTAMP(Time) + 0E0)" json:"timestamp"` // finish time - QueryTime float64 `gorm:"column:Query_time" json:"query_time"` // latency - ParseTime float64 `gorm:"column:Parse_time" json:"parse_time"` - CompileTime float64 `gorm:"column:Compile_time" json:"compile_time"` - ProcessTime float64 `gorm:"column:Process_time" json:"process_time"` + Timestamp float64 `gorm:"column:timestamp" proj:"(UNIX_TIMESTAMP(Time) + 0E0)" json:"timestamp"` // finish time + QueryTime float64 `gorm:"column:Query_time" json:"query_time"` // latency + ParseTime float64 `gorm:"column:Parse_time" json:"parse_time"` + CompileTime float64 `gorm:"column:Compile_time" json:"compile_time"` + RewriteTime float64 `gorm:"column:Rewrite_time" json:"rewrite_time"` + PreprocSubqueriesTime float64 `gorm:"column:Preproc_subqueries_time" json:"preproc_subqueries_time"` + OptimizeTime float64 `gorm:"column:Optimize_time" json:"optimize_time"` + WaitTSTime float64 `gorm:"column:Wait_TS" json:"wait_ts"` + CopTime float64 `gorm:"column:Cop_time" json:"cop_time"` + LockKeysTime float64 `gorm:"column:LockKeys_time" json:"lock_keys_time"` + WriteRespTime float64 `gorm:"column:Write_sql_response_total" json:"write_sql_response_total"` + ExecRetryTime float64 `gorm:"column:Exec_retry_time" json:"exec_retry_time"` MemoryMax int `gorm:"column:Mem_max" json:"memory_max"` DiskMax int `gorm:"column:Disk_max" json:"disk_max"` @@ -63,20 +70,22 @@ type SlowQuery struct { Host string `gorm:"column:Host" json:"host"` // Time - WaitTime float64 `gorm:"column:Wait_time" json:"wait_time"` - BackoffTime float64 `gorm:"column:Backoff_time" json:"backoff_time"` - GetCommitTSTime float64 `gorm:"column:Get_commit_ts_time" json:"get_commit_ts_time"` - LocalLatchWaitTime float64 `gorm:"column:Local_latch_wait_time" json:"local_latch_wait_time"` - ResolveLockTime float64 `gorm:"column:Resolve_lock_time" json:"resolve_lock_time"` - PrewriteTime float64 `gorm:"column:Prewrite_time" json:"prewrite_time"` - CommitTime float64 `gorm:"column:Commit_time" json:"commit_time"` - CommitBackoffTime float64 `gorm:"column:Commit_backoff_time" json:"commit_backoff_time"` - CopProcAvg float64 `gorm:"column:Cop_proc_avg" json:"cop_proc_avg"` - CopProcP90 float64 `gorm:"column:Cop_proc_p90" json:"cop_proc_p90"` - CopProcMax float64 `gorm:"column:Cop_proc_max" json:"cop_proc_max"` - CopWaitAvg float64 `gorm:"column:Cop_wait_avg" json:"cop_wait_avg"` - CopWaitP90 float64 `gorm:"column:Cop_wait_p90" json:"cop_wait_p90"` - CopWaitMax float64 `gorm:"column:Cop_wait_max" json:"cop_wait_max"` + ProcessTime float64 `gorm:"column:Process_time" json:"process_time"` + WaitTime float64 `gorm:"column:Wait_time" json:"wait_time"` + BackoffTime float64 `gorm:"column:Backoff_time" json:"backoff_time"` + GetCommitTSTime float64 `gorm:"column:Get_commit_ts_time" json:"get_commit_ts_time"` + LocalLatchWaitTime float64 `gorm:"column:Local_latch_wait_time" json:"local_latch_wait_time"` + ResolveLockTime float64 `gorm:"column:Resolve_lock_time" json:"resolve_lock_time"` + PrewriteTime float64 `gorm:"column:Prewrite_time" json:"prewrite_time"` + WaitPreWriteBinlogTime float64 `gorm:"column:Wait_prewrite_binlog_time" json:"wait_prewrite_binlog_time"` + CommitTime float64 `gorm:"column:Commit_time" json:"commit_time"` + CommitBackoffTime float64 `gorm:"column:Commit_backoff_time" json:"commit_backoff_time"` + CopProcAvg float64 `gorm:"column:Cop_proc_avg" json:"cop_proc_avg"` + CopProcP90 float64 `gorm:"column:Cop_proc_p90" json:"cop_proc_p90"` + CopProcMax float64 `gorm:"column:Cop_proc_max" json:"cop_proc_max"` + CopWaitAvg float64 `gorm:"column:Cop_wait_avg" json:"cop_wait_avg"` + CopWaitP90 float64 `gorm:"column:Cop_wait_p90" json:"cop_wait_p90"` + CopWaitMax float64 `gorm:"column:Cop_wait_max" json:"cop_wait_max"` // Transaction WriteKeys int `gorm:"column:Write_keys" json:"write_keys"` diff --git a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx index 222ccabd18..07c68758da 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx @@ -12,6 +12,15 @@ export interface ITabTimeProps { export default function TabBasic({ data }: ITabTimeProps) { const { t } = useTranslation() const items = [ + { + key: 'query_time2', + keyDisplay: ( + + {t('slow_query.fields.query_time2')} + + ), + value: data.query_time! * 10e8, + }, { key: 'parse_time', value: data.parse_time! * 10e8, @@ -20,6 +29,26 @@ export default function TabBasic({ data }: ITabTimeProps) { key: 'compile_time', value: data.compile_time! * 10e8, }, + { + key: 'rewrite_time', + value: data.rewrite_time! * 10e8, + }, + { + key: 'preproc_subqueries_time', + value: data.preproc_subqueries_time! * 10e8, + }, + { + key: 'optimize_time', + value: data.optimize_time! * 10e8, + }, + { + key: 'wait_ts', + value: data.wait_ts! * 10e8, + }, + { + key: 'cop_time', + value: data.cop_time! * 10e8, + }, { key: 'wait_time', value: data.wait_time! * 10e8, @@ -28,42 +57,49 @@ export default function TabBasic({ data }: ITabTimeProps) { key: 'process_time', value: data.process_time! * 10e8, }, + { + key: 'lock_keys_time', + value: data.lock_keys_time! * 10e8, + }, { key: 'backoff_time', value: data.backoff_time! * 10e8, }, { - key: 'get_commit_ts_time', - value: data.get_commit_ts_time! * 10e8, + key: 'prewrite_time', + value: data.prewrite_time! * 10e8, }, { key: 'local_latch_wait_time', value: data.local_latch_wait_time! * 10e8, }, { - key: 'resolve_lock_time', - value: data.resolve_lock_time! * 10e8, + key: 'wait_prewrite_binlog_time', + value: data.wait_prewrite_binlog_time! * 10e8, }, { - key: 'prewrite_time', - value: data.prewrite_time! * 10e8, + key: 'get_commit_ts_time', + value: data.get_commit_ts_time! * 10e8, }, { key: 'commit_time', value: data.commit_time! * 10e8, }, + { + key: 'resolve_lock_time', + value: data.resolve_lock_time! * 10e8, + }, { key: 'commit_backoff_time', value: data.commit_backoff_time! * 10e8, }, { - key: 'query_time2', - keyDisplay: ( - - {t('slow_query.fields.query_time2')} - - ), - value: data.query_time! * 10e8, + key: 'exec_retry_time', + value: data.exec_retry_time! * 10e8, + }, + { + key: 'write_sql_response_total', + value: data.write_sql_response_total! * 10e8, }, ] const columns = timeValueColumns('slow_query.fields.', items) diff --git a/ui/lib/apps/SlowQuery/translations/en.yaml b/ui/lib/apps/SlowQuery/translations/en.yaml index f59f30746e..e6b37792dd 100644 --- a/ui/lib/apps/SlowQuery/translations/en.yaml +++ b/ui/lib/apps/SlowQuery/translations/en.yaml @@ -34,22 +34,45 @@ slow_query: db: Execution Database db_tooltip: The database used to execute the query - parse_time: Parse Time - parse_time_tooltip: Time consumed when parsing the query - compile_time: Compile Time - compile_time_tooltip: Time consumed when optimizing the query - wait_time: Coprocessor Wait Time - process_time: Coprocessor Process Time - backoff_time: Backoff Retry Time - backoff_time_tooltip: The waiting time before retry when a query encounters errors that require a retry - get_commit_ts_time: Get Commit Ts Time - local_latch_wait_time: Local Latch Wait Time - resolve_lock_time: Resolve Lock Time - prewrite_time: Prewrite Time - commit_time: Commit Time - commit_backoff_time: Commit Backoff Time query_time2: Query Time - query_time2_tooltip: The execution time of a query (due to the parallel execution, it may be significantly smaller than the above time) + query_time2_tooltip: The elapsed wall time when execution the query + parse_time:   Parse Time + parse_time_tooltip: Time consumed when parsing the query + compile_time:   Generate Plan Time + rewrite_time:     Rewrite Plan Time + preproc_subqueries_time:       Preprocess Sub-Query Time + preproc_subqueries_time_tooltip: Time consumed when pre-processing the subquery during the rewrite plan phase + optimize_time:     Optimize Plan Time + wait_ts:   Get Start Ts Time + wait_ts_tooltip: Time consumed of retrieving the transaction start timestamp when transaction begins + cop_time:   Coprocessor Executor Time + cop_time_tooltip: 'The elapsed wall time when TiDB Coprocessor executor waiting all Coprocessor requests to finish (note: when there are JOIN in SQL statement, multiple TiDB Coprocessor executors may be running in parallel, which may cause this time not being a wall time)' + wait_time:     Coprocessor Wait Time + wait_time_tooltip: The total waiting time of Coprocessor request in TiKV + process_time:     Coprocessor Process Time + process_time_tooltip: 'The total time of Coprocessor request being executed in TiKV (note: TiKV executes requests in parallel so that this is not a wall time)' + backoff_time:   Backoff Retry Time + backoff_time_tooltip: 'The total waiting time before retry when a query encounters errors and requires a retry (note: there may be multiple backoff running in parallel so that this may not be a wall time)' + lock_keys_time:   Lock Keys Time + lock_keys_time_tooltip: Time consumed when locking keys in pessimistic transaction + get_commit_ts_time:   Get Commit Ts Time + get_commit_ts_time_tooltip: Time consumed when getting a commit timestamp for 2PC commit phase when transaction commits + local_latch_wait_time:   Local Latch Wait Time + local_latch_wait_time_tooltip: Time consumed when TiDB waits for the lock in the current TiDB instance before 2PC commit phase when transaction commits + resolve_lock_time:   Resolve Lock Time + resolve_lock_time_tooltip: Time consumed when TiDB resolves locks from other transactions in 2PC prewrite phase when transaction commits + prewrite_time:   Prewrite Time + prewrite_time_tooltip: Time consumed in 2PC prewrite phase when transaction commits + wait_prewrite_binlog_time:   Wait Binlog Prewrite Time + wait_prewrite_binlog_time_tooltip: Time consumed when waiting Binlog prewrite to finish + commit_time:   Commit Time + commit_time_tooltip: Time consumed in 2PC commit phase when transaction commits + commit_backoff_time:   Commit Backoff Time + commit_backoff_time_tooltip: Total backoff waiting time when 2PC commit encounters errors when transaction commits + write_sql_response_total:   Send response Time + write_sql_response_total_tooltip: Time consumed when sending response to the SQL client + exec_retry_time:   Retry execution Time + exec_retry_time_tooltip: Time consumed when retrying the SQL statement execution caused by locks or errors request_count: Request Count process_keys: Process Keys diff --git a/ui/lib/apps/SlowQuery/translations/zh.yaml b/ui/lib/apps/SlowQuery/translations/zh.yaml index f1521f82dd..d5c46b20df 100644 --- a/ui/lib/apps/SlowQuery/translations/zh.yaml +++ b/ui/lib/apps/SlowQuery/translations/zh.yaml @@ -34,25 +34,47 @@ slow_query: db: 执行数据库 db_tooltip: 执行该 SQL 查询时使用的数据库名称 - parse_time: 解析耗时 + query_time2: SQL 执行时间 + query_time2_tooltip: 执行 SQL 耗费的自然时间 + parse_time:   解析耗时 parse_time_tooltip: 解析该 SQL 查询的耗时 - compile_time: 优化耗时 - compile_time_tooltip: 优化该 SQL 查询的耗时 - wait_time: Coprocessor 累计等待耗时 - process_time: Coprocessor 累计执行耗时 - process_time_tooltip: Coprocessor 累计执行耗时 - backoff_time: 累计重试等待耗时 - get_commit_ts_time: 取 Commit Ts 耗时 - get_commit_ts_time_tooltip: 从 PD 取递交时间戳(事务号)步骤的耗时 - local_latch_wait_time: Local Latch Wait 耗时 + compile_time:   生成执行计划耗时 + compile_time_tooltip: 生成该 SQL 的执行计划的耗时 + rewrite_time:     重写执行计划耗时 + rewrite_time_tooltip: 重写执行计划的耗时,例如常量折叠等 + preproc_subqueries_time:       子查询预处理耗时 + optimize_time:     优化执行计划耗时 + optimize_time_tooltip: 优化器寻找执行计划的耗时,包括规则优化和物理优化的耗时 + wait_ts:   取事务 Start Ts 耗时 + wait_ts_tooltip: 从 PD 取事务开始时间戳步骤的耗时 + cop_time:   Coprocessor 执行耗时 + cop_time_tooltip: TiDB Coprocessor 算子等待所有任务在 TiKV 上并行执行完毕耗费的自然时间(注:当 SQL 语句中包含 JOIN 时,多个 TiDB Coprocessor 算子可能会并行执行,此时不再等同于自然时间) + wait_time:     Coprocessor 累计等待耗时 + wait_time_tooltip: TiKV 准备并等待 Coprocessor 任务执行的累计时间 + process_time:     Coprocessor 累计执行耗时 + process_time_tooltip: TiKV 执行 Coprocessor 任务的累计处理时间(注:TiKV 会并行处理请求,该时间不是自然流逝时间) + lock_keys_time:   上锁耗时 + lock_keys_time_tooltip: 悲观事务中对相关行数据进行上锁的耗时 + backoff_time:   累计重试等待耗时 + backoff_time_tooltip: 执行过程中遇到锁或者错误后的累计等待耗时(注:可能同时存在多个重试等待,因此该时间可能不是自然流逝时间) + get_commit_ts_time:   取事务 Commit Ts 耗时 + get_commit_ts_time_tooltip: 从 PD 取提交时间戳(事务号)步骤的耗时 + local_latch_wait_time:   Local Latch Wait 耗时 local_latch_wait_time_tooltip: 事务在 TiDB 本地与其他事务产生了锁冲突并等待的耗时 - resolve_lock_time: Resolve Lock 耗时 - resolve_lock_time_tooltip: 事务在 TiKV 与其他事务产生了锁冲突并处理锁冲突的耗时 - prewrite_time: Prewrite 阶段耗时 - commit_time: Commit 阶段耗时 - commit_backoff_time: Commit 重试等待耗时 - query_time2: SQL 执行时间 - query_time2_tooltip: 由于存在并行执行,因此 SQL 执行时间可能远小于上述各项时间 + resolve_lock_time:   Resolve Lock 耗时 + resolve_lock_time_tooltip: 事务在提交过程中与其他事务产生了锁冲突并处理锁冲突的耗时 + prewrite_time:   Prewrite 阶段耗时 + prewrite_time_tooltip: 事务两阶段提交中第一阶段(prewrite 阶段)的耗时 + wait_prewrite_binlog_time:   Binlog Prewrite 等待耗时 + wait_prewrite_binlog_time_tooltip: 等待 Binlog Prewrite 完成的耗时 + commit_time:   Commit 阶段耗时 + commit_time_tooltip: 事务两阶段提交中第二阶段(commit 阶段)的耗时 + commit_backoff_time:   Commit 重试等待耗时 + commit_backoff_time_tooltip: 事务两阶段提交过程中遇到锁或者错误后的等待耗时 + write_sql_response_total:   发送结果耗时 + write_sql_response_total_tooltip: 发送 SQL 结果给客户端的耗时 + exec_retry_time:   执行重试耗时 + exec_retry_time_tooltip: 由于锁冲突或者某些错误导致执行失败的执行耗时 request_count: Coprocessor 请求数 process_keys: 可见版本数 From 779904a79f352358492d75a74ac6791131b3a805 Mon Sep 17 00:00:00 2001 From: Wenxuan Date: Thu, 26 Nov 2020 18:22:51 +0800 Subject: [PATCH 27/29] slowlog: Improve descriptions (#817) Signed-off-by: Breezewish --- .../SlowQuery/pages/Detail/DetailTabTime.tsx | 36 +++++++++---------- ui/lib/apps/SlowQuery/translations/en.yaml | 14 ++++---- ui/lib/apps/SlowQuery/translations/zh.yaml | 22 ++++++------ ui/lib/utils/tableColumns.tsx | 7 +++- 4 files changed, 42 insertions(+), 37 deletions(-) diff --git a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx index 07c68758da..c6673b3c27 100644 --- a/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx +++ b/ui/lib/apps/SlowQuery/pages/Detail/DetailTabTime.tsx @@ -41,10 +41,6 @@ export default function TabBasic({ data }: ITabTimeProps) { key: 'optimize_time', value: data.optimize_time! * 10e8, }, - { - key: 'wait_ts', - value: data.wait_ts! * 10e8, - }, { key: 'cop_time', value: data.cop_time! * 10e8, @@ -58,36 +54,36 @@ export default function TabBasic({ data }: ITabTimeProps) { value: data.process_time! * 10e8, }, { - key: 'lock_keys_time', - value: data.lock_keys_time! * 10e8, - }, - { - key: 'backoff_time', - value: data.backoff_time! * 10e8, + key: 'local_latch_wait_time', + value: data.local_latch_wait_time! * 10e8, }, { - key: 'prewrite_time', - value: data.prewrite_time! * 10e8, + key: 'lock_keys_time', + value: data.lock_keys_time! * 10e8, }, { - key: 'local_latch_wait_time', - value: data.local_latch_wait_time! * 10e8, + key: 'resolve_lock_time', + value: data.resolve_lock_time! * 10e8, }, { - key: 'wait_prewrite_binlog_time', - value: data.wait_prewrite_binlog_time! * 10e8, + key: 'wait_ts', + value: data.wait_ts! * 10e8, }, { key: 'get_commit_ts_time', value: data.get_commit_ts_time! * 10e8, }, + { + key: 'prewrite_time', + value: data.prewrite_time! * 10e8, + }, { key: 'commit_time', value: data.commit_time! * 10e8, }, { - key: 'resolve_lock_time', - value: data.resolve_lock_time! * 10e8, + key: 'backoff_time', + value: data.backoff_time! * 10e8, }, { key: 'commit_backoff_time', @@ -101,6 +97,10 @@ export default function TabBasic({ data }: ITabTimeProps) { key: 'write_sql_response_total', value: data.write_sql_response_total! * 10e8, }, + { + key: 'wait_prewrite_binlog_time', + value: data.wait_prewrite_binlog_time! * 10e8, + }, ] const columns = timeValueColumns('slow_query.fields.', items) return ( diff --git a/ui/lib/apps/SlowQuery/translations/en.yaml b/ui/lib/apps/SlowQuery/translations/en.yaml index e6b37792dd..e904a49c40 100644 --- a/ui/lib/apps/SlowQuery/translations/en.yaml +++ b/ui/lib/apps/SlowQuery/translations/en.yaml @@ -44,15 +44,15 @@ slow_query: preproc_subqueries_time_tooltip: Time consumed when pre-processing the subquery during the rewrite plan phase optimize_time:     Optimize Plan Time wait_ts:   Get Start Ts Time - wait_ts_tooltip: Time consumed of retrieving the transaction start timestamp when transaction begins + wait_ts_tooltip: Time consumed when getting a start timestamp when transaction begins cop_time:   Coprocessor Executor Time cop_time_tooltip: 'The elapsed wall time when TiDB Coprocessor executor waiting all Coprocessor requests to finish (note: when there are JOIN in SQL statement, multiple TiDB Coprocessor executors may be running in parallel, which may cause this time not being a wall time)' wait_time:     Coprocessor Wait Time - wait_time_tooltip: The total waiting time of Coprocessor request in TiKV + wait_time_tooltip: 'The total time of Coprocessor request is prepared and wait to execute in TiKV, which may happen when retrieving a snapshot though Raft concensus protocol (note: TiKV waits requests in parallel so that this is not a wall time)' process_time:     Coprocessor Process Time process_time_tooltip: 'The total time of Coprocessor request being executed in TiKV (note: TiKV executes requests in parallel so that this is not a wall time)' - backoff_time:   Backoff Retry Time - backoff_time_tooltip: 'The total waiting time before retry when a query encounters errors and requires a retry (note: there may be multiple backoff running in parallel so that this may not be a wall time)' + backoff_time:   Execution Backoff Time + backoff_time_tooltip: 'The total backoff waiting time before retry when a query encounters errors (note: there may be multiple backoffs in parallel so that this may not be a wall time)' lock_keys_time:   Lock Keys Time lock_keys_time_tooltip: Time consumed when locking keys in pessimistic transaction get_commit_ts_time:   Get Commit Ts Time @@ -68,11 +68,11 @@ slow_query: commit_time:   Commit Time commit_time_tooltip: Time consumed in 2PC commit phase when transaction commits commit_backoff_time:   Commit Backoff Time - commit_backoff_time_tooltip: Total backoff waiting time when 2PC commit encounters errors when transaction commits + commit_backoff_time_tooltip: 'The total backoff waiting time when 2PC commit encounters errors (note: there may be multiple backoffs in parallel so that this may not be a wall time)' write_sql_response_total:   Send response Time write_sql_response_total_tooltip: Time consumed when sending response to the SQL client - exec_retry_time:   Retry execution Time - exec_retry_time_tooltip: Time consumed when retrying the SQL statement execution caused by locks or errors + exec_retry_time:   Retried execution Time + exec_retry_time_tooltip: Wall time consumed when SQL statement is retried and executed again, except for the last exection request_count: Request Count process_keys: Process Keys diff --git a/ui/lib/apps/SlowQuery/translations/zh.yaml b/ui/lib/apps/SlowQuery/translations/zh.yaml index d5c46b20df..e6d1bc030d 100644 --- a/ui/lib/apps/SlowQuery/translations/zh.yaml +++ b/ui/lib/apps/SlowQuery/translations/zh.yaml @@ -50,18 +50,18 @@ slow_query: cop_time:   Coprocessor 执行耗时 cop_time_tooltip: TiDB Coprocessor 算子等待所有任务在 TiKV 上并行执行完毕耗费的自然时间(注:当 SQL 语句中包含 JOIN 时,多个 TiDB Coprocessor 算子可能会并行执行,此时不再等同于自然时间) wait_time:     Coprocessor 累计等待耗时 - wait_time_tooltip: TiKV 准备并等待 Coprocessor 任务执行的累计时间 + wait_time_tooltip: TiKV 准备并等待 Coprocessor 任务执行的累计时间,等待过程中包括通过 Raft 一致性协议取快照等(注:TiKV 会并行等待任务,因此该时间不是自然流逝时间) process_time:     Coprocessor 累计执行耗时 - process_time_tooltip: TiKV 执行 Coprocessor 任务的累计处理时间(注:TiKV 会并行处理请求,该时间不是自然流逝时间) + process_time_tooltip: TiKV 执行 Coprocessor 任务的累计处理时间(注:TiKV 会并行处理任务,因此该时间不是自然流逝时间) lock_keys_time:   上锁耗时 lock_keys_time_tooltip: 悲观事务中对相关行数据进行上锁的耗时 - backoff_time:   累计重试等待耗时 - backoff_time_tooltip: 执行过程中遇到锁或者错误后的累计等待耗时(注:可能同时存在多个重试等待,因此该时间可能不是自然流逝时间) + backoff_time:   执行阶段累计 Backoff 耗时 + backoff_time_tooltip: 在执行失败时,Backoff 机制等待一段时间再重试时的 Backoff 累计耗时(注:可能同时存在多个 Backoff,因此该时间可能不是自然流逝时间) get_commit_ts_time:   取事务 Commit Ts 耗时 get_commit_ts_time_tooltip: 从 PD 取提交时间戳(事务号)步骤的耗时 - local_latch_wait_time:   Local Latch Wait 耗时 + local_latch_wait_time:   TiDB 本地等锁耗时 local_latch_wait_time_tooltip: 事务在 TiDB 本地与其他事务产生了锁冲突并等待的耗时 - resolve_lock_time:   Resolve Lock 耗时 + resolve_lock_time:   解锁耗时 resolve_lock_time_tooltip: 事务在提交过程中与其他事务产生了锁冲突并处理锁冲突的耗时 prewrite_time:   Prewrite 阶段耗时 prewrite_time_tooltip: 事务两阶段提交中第一阶段(prewrite 阶段)的耗时 @@ -69,12 +69,12 @@ slow_query: wait_prewrite_binlog_time_tooltip: 等待 Binlog Prewrite 完成的耗时 commit_time:   Commit 阶段耗时 commit_time_tooltip: 事务两阶段提交中第二阶段(commit 阶段)的耗时 - commit_backoff_time:   Commit 重试等待耗时 - commit_backoff_time_tooltip: 事务两阶段提交过程中遇到锁或者错误后的等待耗时 + commit_backoff_time:   Commit 阶段累计 Backoff 耗时 + commit_backoff_time_tooltip: 事务递交失败时,Backoff 机制等待一段时间再重试时的 Backoff 累计耗时(注:可能同时存在多个 Backoff,因此该时间可能不是自然流逝时间) write_sql_response_total:   发送结果耗时 - write_sql_response_total_tooltip: 发送 SQL 结果给客户端的耗时 - exec_retry_time:   执行重试耗时 - exec_retry_time_tooltip: 由于锁冲突或者某些错误导致执行失败的执行耗时 + write_sql_response_total_tooltip: 发送 SQL 语句执行结果给客户端的耗时 + exec_retry_time:   前序执行耗时 + exec_retry_time_tooltip: 由于锁冲突或错误,计划可能会执行失败并重试执行多次,该时间是不包含最后一次执行的前序执行自然时间(注:执行计划中的时间不含该前序时间) request_count: Coprocessor 请求数 process_keys: 可见版本数 diff --git a/ui/lib/utils/tableColumns.tsx b/ui/lib/utils/tableColumns.tsx index b37a7479dc..3b41590e80 100644 --- a/ui/lib/utils/tableColumns.tsx +++ b/ui/lib/utils/tableColumns.tsx @@ -133,12 +133,17 @@ function fieldsDescriptionColumn(transKeyPrefix: string): IColumn { minWidth: 150, maxWidth: 300, onRender: (rec) => { - return ( + const content = ( ) + return ( + + {content} + + ) }, } } From 16a85b1125d9ea0f8e614903b931491ae50a838c Mon Sep 17 00:00:00 2001 From: Breezewish Date: Thu, 26 Nov 2020 18:48:17 +0800 Subject: [PATCH 28/29] build: add action to check release-version is changed for release branch --- .github/workflows/e2e-test.yaml | 3 ++- .github/workflows/pr-build.yaml | 5 ++--- .github/workflows/release-check.yaml | 19 +++++++++++++++++++ .github/workflows/release.yaml | 7 +------ 4 files changed, 24 insertions(+), 10 deletions(-) create mode 100644 .github/workflows/release-check.yaml diff --git a/.github/workflows/e2e-test.yaml b/.github/workflows/e2e-test.yaml index fb1b3f7469..71b4ee6f8a 100644 --- a/.github/workflows/e2e-test.yaml +++ b/.github/workflows/e2e-test.yaml @@ -4,6 +4,7 @@ on: pull_request: branches: - master + - release jobs: e2e_test: @@ -12,7 +13,7 @@ jobs: timeout-minutes: 10 steps: - name: Checkout code - uses: actions/checkout@master + uses: actions/checkout@v2 - uses: actions/setup-node@v1 with: node-version: "12.x" diff --git a/.github/workflows/pr-build.yaml b/.github/workflows/pr-build.yaml index b82d115d5a..96b78c5c3f 100644 --- a/.github/workflows/pr-build.yaml +++ b/.github/workflows/pr-build.yaml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@master + uses: actions/checkout@v2 - uses: actions/setup-go@v1 with: go-version: "1.13.5" @@ -32,13 +32,12 @@ jobs: - name: Check uncommitted lint changes run: | git diff --exit-code - frontend: name: frontend runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@master + uses: actions/checkout@v2 - uses: actions/setup-node@v1 with: node-version: "12.x" diff --git a/.github/workflows/release-check.yaml b/.github/workflows/release-check.yaml new file mode 100644 index 0000000000..f7d61571af --- /dev/null +++ b/.github/workflows/release-check.yaml @@ -0,0 +1,19 @@ +name: Release Check + +on: + pull_request: + branches: + - release + +jobs: + check_release_version: + runs-on: ubuntu-latest + steps: + - name: Checkout code and release branch + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Check release version + run: > + ! (git diff --name-only --exit-code origin/release -- ./release-version) + || (echo "Please update the release-version file" && false) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 023c7c435b..75f96a8a8f 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -4,24 +4,19 @@ on: push: branches: - release - paths: - - release-version jobs: release: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@master + uses: actions/checkout@v2 - uses: actions/setup-node@v1 with: node-version: "12.x" - uses: actions/setup-go@v1 with: go-version: "1.13.5" - - name: Check release version - run: | - git diff --name-only --exit-code ./release-version || (echo "Update the release version file" && false) - name: Lookup yarn cache id: yarn_cache run: echo "::set-output name=dir::$(yarn cache dir)" From 93b0a15a68d65793b590933b7e625a610c24201b Mon Sep 17 00:00:00 2001 From: Breezewish Date: Thu, 26 Nov 2020 18:55:23 +0800 Subject: [PATCH 29/29] Release v2020.11.26.1 --- release-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release-version b/release-version index 3809da7b70..c97ab13739 100644 --- a/release-version +++ b/release-version @@ -1,3 +1,3 @@ # This file specifies the TiDB Dashboard internal version, which will be printed in `--version` # and UI. In release branch, changing this file will result in publishing a new version and tag. -2020.10.21.1 +2020.11.26.1