diff --git a/.babelrc b/.babelrc index 6b0168a562a9..ebe4672214fa 100644 --- a/.babelrc +++ b/.babelrc @@ -1,28 +1,27 @@ { "plugins": [ "babel-plugin-styled-components", - "transform-flow-strip-types", - "add-react-displayname", - "transform-decorators-legacy", - ["transform-builtin-extend", { - "globals": ["Error", "Array"] - }], - "syntax-trailing-function-commas" + "@babel/plugin-transform-flow-strip-types", + "@babel/plugin-proposal-export-default-from", + ["@babel/plugin-proposal-decorators", { "legacy": true }] ], - "presets": ["es2015", "stage-0", "react"], + "presets": ["@babel/preset-env", "@babel/preset-react"], "env": { "development": { "presets": [] }, "extract": { "plugins": [ - ["ttag", { - "extract": { - "output": "locales/metabase-frontend.pot" - }, - "discover": ["t", "jt"], - "numberedExpressions": true - }] + [ + "ttag", + { + "extract": { + "output": "locales/metabase-frontend.pot" + }, + "discover": ["t", "jt"], + "numberedExpressions": true + } + ] ] } } diff --git a/.circleci/config.yml b/.circleci/config.yml index c22318d6104d..3be1c3e8fccf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,28 +1,33 @@ version: 2.1 +######################################################################################################################## +# ORBS # +######################################################################################################################## +orbs: + aws-cli: circleci/aws-cli@4.0 + aws-ecr: circleci/aws-ecr@9.0 + + ######################################################################################################################## # EXECUTORS # ######################################################################################################################## executors: - # Our brand new builder - clojure-and-node: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 - - # CircleCI base (Lein 2.9.5) + Node + Headless browsers + Clojure CLI - big one + # CircleCI base Node + Headless browsers + Clojure CLI - big one # Maildev runs by default with all Cypress tests clojure-and-node-and-browsers: working_directory: /home/circleci/metabase/metabase/ docker: - - image: circleci/clojure:lein-2.9.5-node-browsers + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: maildev/maildev + - image: metabase/qa-databases:postgres-sample-12 + - image: metabase/qa-databases:mongo-sample-4.0 + - image: metabase/qa-databases:mysql-sample-8 java-8: working_directory: /home/circleci/metabase/metabase/ docker: - - image: circleci/clojure:openjdk-8-lein-2.9.5-buster + - image: metabase/ci:circleci-java-8-clj-1.10.3.929-07-27-2021-node-browsers # Java 11 tests also test Metabase with the at-rest encryption enabled. See # https://metabase.com/docs/latest/operations-guide/encrypting-database-details-at-rest.html for an explanation of @@ -30,19 +35,19 @@ executors: java-11: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_ENCRYPTION_SECRET_KEY: Orw0AAyzkO/kPTLJRxiyKoBHXa/d6ZcO+p+gpZO/wSQ= java-16: working_directory: /home/circleci/metabase/metabase/ docker: - - image: circleci/clojure:openjdk-16-lein-2.9.5-buster + - image: metabase/ci:circleci-java-16-clj-1.10.3.929-07-27-2021-node-browsers postgres-9-6: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: postgres MB_DB_PORT: 5432 @@ -58,7 +63,7 @@ executors: postgres-latest: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: postgres MB_DB_PORT: 5432 @@ -75,7 +80,7 @@ executors: mysql-5-7: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -88,7 +93,7 @@ executors: mysql-latest: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -101,7 +106,7 @@ executors: mariadb-10-2: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -114,7 +119,7 @@ executors: mariadb-latest: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -128,16 +133,22 @@ executors: # MYSQL_USER: root # MYSQL_ALLOW_EMPTY_PASSWORD: yes - mongo: + mongo-4-0: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: circleci/mongo:4.0 - presto: + mongo-latest: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers + - image: circleci/mongo:latest + + presto-186: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: metabase/presto-mb-ci:0.186 environment: JAVA_TOOL_OPTIONS: "-Xmx2g" @@ -145,22 +156,41 @@ executors: # OOM sometimes with the default medium size. resource_class: large + presto-jdbc-env: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers + - image: metabase/presto-mb-ci:latest # version 0.254 + environment: + JAVA_TOOL_OPTIONS: "-Xmx2g" + MB_PRESTO_JDBC_TEST_CATALOG: test_data + MB_PRESTO_JDBC_TEST_HOST: localhost + MB_PRESTO_JDBC_TEST_PORT: 8443 + MB_PRESTO_JDBC_TEST_SSL: true + MB_PRESTO_JDBC_TEST_USER: metabase + MB_PRESTO_JDBC_TEST_PASSWORD: metabase + MB_ENABLE_PRESTO_JDBC_DRIVER: true + MB_PRESTO_JDBC_TEST_ADDITIONAL_OPTIONS: > + SSLTrustStorePath=/tmp/cacerts-with-presto-ssl.jks&SSLTrustStorePassword=changeit + # (see above) + resource_class: large + sparksql: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: metabase/spark:2.1.1 vertica: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: sumitchawla/vertica sqlserver: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_SQLSERVER_TEST_HOST: localhost MB_SQLSERVER_TEST_PASSWORD: 'P@ssw0rd' @@ -174,7 +204,7 @@ executors: druid: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: metabase/druid:0.20.2 environment: CLUSTER_SIZE: nano-quickstart @@ -182,26 +212,6 @@ executors: # OOM all the time with the default medium size. resource_class: large - - fe-mongo-4: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: circleci/clojure:lein-2.9.5-node-browsers - - image: metabase/qa-databases:mongo-sample-4.0 - - fe-postgres-12: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: circleci/clojure:lein-2.9.5-node-browsers - - image: metabase/qa-databases:postgres-sample-12 - - fe-mysql-8: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: circleci/clojure:lein-2.9.5-node-browsers - - image: metabase/qa-databases:mysql-sample-8 - - ######################################################################################################################## # MAP FRAGMENTS AND CACHE KEYS # ######################################################################################################################## @@ -209,7 +219,8 @@ executors: # `default_parameters` isn't a key that CircleCI uses, but this form lets us reuse parameter definitions default_parameters: &Params edition: - type: string + type: enum + enum: ["oss", "ee"] default: "oss" # .BACKEND-CHECKSUMS, .FRONTEND-CHECKSUMS, and .MODULE-CHECKSUMS are created during the checkout step; see that step @@ -226,23 +237,25 @@ default_parameters: &Params # uncontrollably since old deps would continue to accumulate. Restoring big caches is really slow in Circle. It's # actually faster to recreate the deps cache from scratch whenever we need to which keeps the size down. cache-key-backend-deps: &CacheKeyBackendDeps - key: v1-{{ checksum ".CACHE-PREFIX" }}-be-deps-{{ checksum "project.clj" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }} + # TODO -- this should actually include the Java source files and the Spark SQL AOT source files as well since we now + # compile those as part of this step. FIXME + key: v5-{{ checksum ".CACHE-PREFIX" }}-be-deps-{{ checksum "deps.edn" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }} cache-key-frontend-deps: &CacheKeyFrontendDeps - key: v1-{{ checksum ".CACHE-PREFIX" }}-fe-deps-{{ checksum "yarn.lock" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-fe-deps-{{ checksum "yarn.lock" }} # Key used for implementation of run-on-change -- this is the cache key that contains the .SUCCESS dummy file # By default the key ALWAYS includes the name of the test job itself ($CIRCLE_STAGE) so you don't need to add that yourself. cache-key-run-on-change: &CacheKeyRunOnChange - key: v1-{{ checksum ".CACHE-PREFIX" }}-run-on-change-{{ .Environment.CIRCLE_STAGE }}-<< parameters.checksum >> + key: v5-{{ checksum ".CACHE-PREFIX" }}-run-on-change-{{ .Environment.CIRCLE_STAGE }}-<< parameters.checksum >> # Key for the local maven installation of metabase-core (used by build-uberjar-drivers) cache-key-metabase-core: &CacheKeyMetabaseCore - key: v1-{{ checksum ".CACHE-PREFIX" }}-metabase-core-{{ checksum ".BACKEND-CHECKSUMS" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-metabase-core-{{ checksum ".BACKEND-CHECKSUMS" }} # Key for the drivers built by build-uberjar-drivers cache-key-drivers: &CacheKeyDrivers - key: v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }}-<< parameters.edition >> + key: v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }}-<< parameters.edition >> # This is also used by the uberjar-build-drivers step; this is a unique situation because the build-drivers script has # logic to determine whether to rebuild drivers or not that is quite a bit more sophisticated that the run-on-change @@ -250,17 +263,17 @@ cache-key-drivers: &CacheKeyDrivers # redshift driver. cache-keys-drivers-with-fallback-keys: &CacheKeyDrivers_WithFallbackKeys keys: - - v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }} - - v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }} - - v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>- + - v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }} + - v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }} + - v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>- # Key for frontend client built by uberjar-build-frontend step cache-key-frontend: &CacheKeyFrontend - key: v1-{{ checksum ".CACHE-PREFIX" }}-frontend-<< parameters.edition >>-{{ checksum ".FRONTEND-CHECKSUMS" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-frontend-<< parameters.edition >>-{{ checksum ".FRONTEND-CHECKSUMS" }} # Key for uberjar built by build-uberjar cache-key-uberjar: &CacheKeyUberjar - key: v1-{{ checksum ".CACHE-PREFIX" }}-uberjar-<< parameters.edition >>-{{ checksum ".BACKEND-CHECKSUMS" }}-{{ checksum ".FRONTEND-CHECKSUMS" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-uberjar-<< parameters.edition >>-{{ checksum ".BACKEND-CHECKSUMS" }}-{{ checksum ".FRONTEND-CHECKSUMS" }} ######################################################################################################################## @@ -396,12 +409,12 @@ commands: fi echo "Created checksums for $(cat << parameters.filename >> | wc -l) files" - run-lein-command: + run-clojure-command: parameters: before-steps: type: steps default: [] - lein-command: + clojure-args: type: string after-steps: type: steps @@ -411,9 +424,9 @@ commands: - restore-be-deps-cache - steps: << parameters.before-steps >> - run: - name: lein << parameters.lein-command >> + name: clojure << parameters.clojure-args >>:<< parameters.edition >>:<< parameters.edition >>-dev command: | - lein with-profile +ci,+<< parameters.edition >> << parameters.lein-command >> + clojure << parameters.clojure-args >>:<< parameters.edition >>:<< parameters.edition >>-dev no_output_timeout: 15m - steps: << parameters.after-steps >> - store_test_results: @@ -487,6 +500,15 @@ commands: wget --output-document=plugins/<< parameters.dest >> ${<< parameters.source >>} no_output_timeout: 15m + run-command: + parameters: + command: + type: string + steps: + - run: + name: Run command + command: << parameters.command >> + jobs: ######################################################################################################################## @@ -494,7 +516,7 @@ jobs: ######################################################################################################################## checkout: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - checkout # .BACKEND-CHECKSUMS is every Clojure source file as well as dependency files like deps.edn and plugin manifests @@ -536,6 +558,9 @@ jobs: else echo '' > .CACHE-PREFIX fi + - run-yarn-command: + command-name: Create static visualization js bundle + command: build-static-viz - persist_to_workspace: root: /home/circleci/ paths: @@ -543,7 +568,7 @@ jobs: check-migrations: executor: - clojure-and-node + clojure-and-node-and-browsers steps: - attach-workspace - create-checksum-file: @@ -566,35 +591,51 @@ jobs: ######################################################################################################################## be-deps: - executor: clojure-and-node + executor: clojure-and-node-and-browsers parameters: <<: *Params steps: - attach-workspace - # This step is pretty slow, even with the cache, so only run it if project.clj has changed - # TODO -- we should cache the build script deps as well, and driver deps? + # This step is pretty slow, even with the cache, so only run it if deps.edn has changed - run-on-change: - checksum: '{{ checksum "project.clj" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }}' + checksum: 'v5-{{ checksum "deps.edn" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }}' steps: - restore-be-deps-cache - - run: lein with-profile +include-all-drivers,+cloverage,+junit,+dev,+<< parameters.edition >> deps - - run: | - cd /home/circleci/metabase/metabase/bin/build-mb && clojure -P + - run: + name: Compile Java source file(s) + command: clojure -X:deps prep + - run: + name: Compile driver AOT namespaces + command: cd modules/drivers && clojure -X:deps prep + - run: + name: Fetch dependencies + command: clojure -P -X:dev:ci:ee:ee-dev:drivers:drivers-dev + - run: + name: Fetch dependencies (./bin/build/build-mb) + command: cd /home/circleci/metabase/metabase/bin/build-mb && clojure -P -M:test + # Not sure why this is needed since you would think build-mb would fetch this stuff as well. It doesn't + # seem to fetch everything tho. :shrug: + - run: + name: Fetch dependencies (./bin/build/build-drivers) + command: cd /home/circleci/metabase/metabase/bin/build-drivers && clojure -P -M:test - save_cache: name: Cache backend dependencies <<: *CacheKeyBackendDeps paths: - /home/circleci/.m2 + - /home/circleci/.gitlibs + - /home/circleci/metabase/metabase/java/target/classes + - /home/circleci/metabase/metabase/modules/drivers/sparksql/target/classes - lein: + clojure: parameters: e: type: executor - default: clojure-and-node + default: clojure-and-node-and-browsers before-steps: type: steps default: [] - lein-command: + clojure-args: type: string after-steps: type: steps @@ -612,22 +653,22 @@ jobs: - run-on-change: checksum: '{{ checksum ".BACKEND-CHECKSUMS" }}' steps: - - run-lein-command: + - run-clojure-command: before-steps: << parameters.before-steps >> - lein-command: << parameters.lein-command >> + clojure-args: << parameters.clojure-args >> after-steps: << parameters.after-steps >> edition: << parameters.edition >> - unless: condition: << parameters.skip-when-no-change >> steps: - - run-lein-command: + - run-clojure-command: before-steps: << parameters.before-steps >> - lein-command: << parameters.lein-command >> + clojure-args: << parameters.clojure-args >> after-steps: << parameters.after-steps >> edition: << parameters.edition >> be-linter-reflection-warnings: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace - run-on-change: @@ -643,7 +684,7 @@ jobs: parameters: e: type: executor - default: clojure-and-node + default: clojure-and-node-and-browsers driver: type: string timeout: @@ -652,6 +693,9 @@ jobs: before-steps: type: steps default: [] + after-steps: + type: steps + default: [] description: type: string default: "" @@ -671,13 +715,14 @@ jobs: name: Test << parameters.driver >> driver << parameters.description >> environment: DRIVERS: << parameters.driver >> - command: << parameters.extra-env >> lein with-profile +ci,+junit,+ee test + command: << parameters.extra-env >> clojure -X:dev:ci:ee:ee-dev:drivers:drivers-dev:test no_output_timeout: << parameters.timeout >> - store_test_results: path: /home/circleci/metabase/metabase/target/junit + - steps: << parameters.after-steps >> test-build-scripts: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace - run-on-change: @@ -721,7 +766,7 @@ jobs: ######################################################################################################################## fe-deps: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace # This step is *really* slow, so we can skip it if yarn.lock hasn't changed since last time we ran it @@ -731,7 +776,7 @@ jobs: - restore-fe-deps-cache - run: name: Run yarn to install deps - command: yarn; + command: rm -rf node_modules/ && yarn --frozen-lockfile; no_output_timeout: 15m - save_cache: name: Cache frontend dependencies @@ -743,7 +788,7 @@ jobs: - /home/circleci/.cache/Cypress shared-tests-cljs: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - run-yarn-command: command-name: Run Cljs tests for shared/ code @@ -753,7 +798,7 @@ jobs: # Unlike the other build-uberjar steps, this step should be run once overall and the results can be shared between # OSS and EE uberjars. build-uberjar-drivers: - executor: clojure-and-node + executor: clojure-and-node-and-browsers parameters: <<: *Params steps: @@ -787,25 +832,14 @@ jobs: name: Cache the built drivers <<: *CacheKeyDrivers paths: - - /home/circleci/metabase/metabase/modules/drivers/bigquery/target - - /home/circleci/metabase/metabase/modules/drivers/druid/target - - /home/circleci/metabase/metabase/modules/drivers/google/target - - /home/circleci/metabase/metabase/modules/drivers/googleanalytics/target - - /home/circleci/metabase/metabase/modules/drivers/mongo/target - - /home/circleci/metabase/metabase/modules/drivers/oracle/target - - /home/circleci/metabase/metabase/modules/drivers/presto/target - - /home/circleci/metabase/metabase/modules/drivers/redshift/target - - /home/circleci/metabase/metabase/modules/drivers/snowflake/target - - /home/circleci/metabase/metabase/modules/drivers/sparksql/target - - /home/circleci/metabase/metabase/modules/drivers/sqlite/target - - /home/circleci/metabase/metabase/modules/drivers/sqlserver/target - - /home/circleci/metabase/metabase/modules/drivers/vertica/target + - /home/circleci/metabase/metabase/resources/modules # Build the frontend client. parameters.edition determines whether we build the OSS or EE version. build-uberjar-frontend: parameters: <<: *Params - executor: clojure-and-node + executor: clojure-and-node-and-browsers + resource_class: large steps: - attach-workspace - run-on-change: @@ -829,7 +863,7 @@ jobs: build-uberjar: parameters: <<: *Params - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace - run-on-change: @@ -857,7 +891,7 @@ jobs: # INTERACTIVE=false will tell the clojure build scripts not to do interactive retries etc. INTERACTIVE: "false" MB_EDITION: << parameters.edition >> - command: ./bin/build version drivers uberjar + command: ./bin/build version uberjar no_output_timeout: 15m - store_artifacts: path: /home/circleci/metabase/metabase/target/uberjar/metabase.jar @@ -880,9 +914,15 @@ jobs: source-folder: type: string default: "" + folder: + type: string + default: "" test-files: type: string default: "" + qa-db: + type: boolean + default: false before-steps: type: steps default: [] @@ -891,6 +931,7 @@ jobs: environment: MB_EDITION: << parameters.edition >> CYPRESS_GROUP: << parameters.cypress-group >> + QA_DB_ENABLED: << parameters.qa-db >> DISPLAY: "" steps: - attach-workspace @@ -904,8 +945,9 @@ jobs: name: Restore cached uberjar built in previous step <<: *CacheKeyUberjar - steps: << parameters.before-steps >> + # Make both `test-files` and `source-folder` parameters optional. Translates to: if `parameter` => run associated flag (`--spec` and `--folder`, respectively) command: | - run test-cypress-no-build <<# parameters.test-files >> --spec << parameters.test-files >> <> --folder << parameters.source-folder >> + run test-cypress-no-build <<# parameters.test-files >> --spec << parameters.test-files >> <> <<# parameters.source-folder >> --folder << parameters.source-folder >> <> after-steps: - store_artifacts: path: /home/circleci/metabase/metabase/cypress @@ -925,402 +967,485 @@ default_matrix: &Matrix workflows: version: 2 - build: + # build: + # jobs: + # - checkout + + # - check-migrations: + # requires: + # - checkout + + # - be-deps: + # requires: + # - checkout + + # - clojure: + # name: be-tests-<< matrix.edition >> + # requires: + # - be-deps + # e: java-8 + # clojure-args: -X:dev:ci:test + # skip-when-no-change: true + # <<: *Matrix + + # - clojure: + # name: be-tests-java-11-<< matrix.edition >> + # requires: + # - be-deps + # e: java-11 + # clojure-args: -X:dev:ci:test + # skip-when-no-change: true + # <<: *Matrix + + # - clojure: + # name: be-tests-java-16-<< matrix.edition >> + # requires: + # - be-deps + # e: java-16 + # clojure-args: -X:dev:ci:test + # skip-when-no-change: true + # <<: *Matrix + + # - clojure: + # name: be-linter-cloverage + # requires: + # - be-deps + # # TODO FIXME + # clojure-args: -X:dev:ee:ee-dev:test:cloverage + # after-steps: + # - run: + # name: Upload code coverage to codecov.io + # command: bash <(curl -s https://codecov.io/bash) -F back-end + + # skip-when-no-change: true + + # - test-driver: + # name: be-tests-bigquery-ee + # requires: + # - be-tests-ee + # driver: bigquery + + # - test-driver: + # name: be-tests-bigquery-cloud-sdk-ee + # requires: + # - be-tests-ee + # driver: bigquery-cloud-sdk + + # - test-driver: + # name: be-tests-druid-ee + # requires: + # - be-tests-ee + # e: druid + # driver: druid + + # - test-driver: + # name: be-tests-googleanalytics-ee + # requires: + # - be-tests-ee + # driver: googleanalytics + + # - test-driver: + # name: be-tests-mongo-ee + # description: "(Mongo 4.0)" + # requires: + # - be-tests-ee + # e: mongo-4-0 + # driver: mongo + + # - test-driver: + # name: be-tests-mongo-latest-ee + # description: "(Mongo latest)" + # requires: + # - be-tests-ee + # e: mongo-latest + # driver: mongo + + # - test-driver: + # name: be-tests-mysql-ee + # description: "(MySQL 5.7)" + # requires: + # - be-tests-ee + # e: + # name: mysql-5-7 + # driver: mysql + + # - test-driver: + # name: be-tests-mysql-latest-ee + # description: "(MySQL latest)" + # requires: + # - be-tests-ee + # e: + # name: mysql-latest + # driver: mysql + # # set up env vars for something named "MYSQL_SSL" to run MySQL SSL tests verifying connectivity with PEM cert + # # they are deliberately given a different name to prevent them from affecting the regular test run against + # # the configured MySQL instance, but there is one particular test (mysql-connect-with-ssl-and-pem-cert-test) + # # that overrides the MB_MYSQL_TEST_* values with them + # # the MYSQL_RDS_SSL_INSTANCE vars are secret and/or changeable, so they are defined in the CircleCI settings + # extra-env: >- + # MB_MYSQL_SSL_TEST_HOST=$MYSQL_RDS_SSL_INSTANCE_HOST + # MB_MYSQL_SSL_TEST_SSL=true + # MB_MYSQL_SSL_TEST_ADDITIONAL_OPTIONS='verifyServerCertificate=true' + # MB_MYSQL_SSL_TEST_SSL_CERT="$(cat /home/circleci/metabase/metabase/resources/certificates/rds-combined-ca-bundle.pem)" + # MB_MYSQL_SSL_TEST_USER=metabase + # MB_MYSQL_SSL_TEST_PASSWORD=$MYSQL_RDS_SSL_INSTANCE_PASSWORD + + # - test-driver: + # name: be-tests-mariadb-ee + # description: "(MariaDB 10.2)" + # requires: + # - be-tests-ee + # e: + # name: mariadb-10-2 + # driver: mysql + + # - test-driver: + # name: be-tests-mariadb-latest-ee + # description: "(MariaDB latest)" + # requires: + # - be-tests-ee + # e: + # name: mariadb-latest + # driver: mysql + + # - test-driver: + # name: be-tests-oracle-ee + # requires: + # - be-tests-ee + # before-steps: + # - fetch-jdbc-driver: + # source: ORACLE_JDBC_JAR + # dest: ojdbc8.jar + # driver: oracle + # extra-env: >- + # MB_ORACLE_SSL_TEST_SSL=true + # MB_ORACLE_SSL_TEST_PORT=2484 + # JVM_OPTS="-Djavax.net.ssl.trustStore=/home/circleci/metabase/metabase/resources/certificates/cacerts_with_RDS_root_ca.jks + # -Djavax.net.ssl.trustStoreType=JKS + # -Djavax.net.ssl.trustStorePassword=metabase $JAVA_OPTS" + + # - test-driver: + # name: be-tests-postgres-ee + # description: "(9.6)" + # requires: + # - be-tests-ee + # e: postgres-9-6 + # driver: postgres + + # - test-driver: + # name: be-tests-postgres-latest-ee + # description: "(Latest)" + # requires: + # - be-tests-ee + # e: postgres-latest + # driver: postgres + + # - test-driver: + # name: be-tests-presto-ee + # requires: + # - be-tests-ee + # e: presto-186 + # before-steps: + # - wait-for-port: + # port: 8080 + # driver: presto + + # - test-driver: + # name: be-tests-presto-jdbc-ee + # requires: + # - be-tests-ee + # e: presto-jdbc-env # specific env for running Presto JDBC tests (newer Presto version, SSL, etc.) + # before-steps: + # - wait-for-port: + # port: 8443 + # - run: + # name: Create temp cacerts file based on bundled JDK one + # command: cp $JAVA_HOME/lib/security/cacerts /tmp/cacerts-with-presto-ssl.jks + # - run: + # name: Capture Presto server self signed CA + # command: | + # while [[ ! -s /tmp/presto-ssl-ca.pem ]]; + # do echo "Waiting to capture SSL CA" \ + # && openssl s_client -connect localhost:8443 2>/dev/null /tmp/presto-ssl-ca.pem \ + # && sleep 1; done + # - run: + # name: Convert Presto CA from PEM to DER + # command: openssl x509 -outform der -in /tmp/presto-ssl-ca.pem -out /tmp/presto-ssl-ca.der + # - run: + # name: Import Presto CA into temp cacerts file + # command: | + # sudo keytool -noprompt -import -alias presto -keystore /tmp/cacerts-with-presto-ssl.jks \ + # -storepass changeit -file /tmp/presto-ssl-ca.der -trustcacerts + # after-steps: + # - run: + # name: Capture max memory usage + # command: cat /sys/fs/cgroup/memory/memory.max_usage_in_bytes + # when: always + # driver: presto-jdbc + + # - test-driver: + # name: be-tests-redshift-ee + # requires: + # - be-tests-ee + # driver: redshift + # timeout: 15m + + # - test-driver: + # name: be-tests-snowflake-ee + # requires: + # - be-tests-ee + # driver: snowflake + # timeout: 115m + + # - test-driver: + # name: be-tests-sparksql-ee + # requires: + # - be-tests-ee + # e: sparksql + # before-steps: + # - wait-for-port: + # port: 10000 + # driver: sparksql + + # - test-driver: + # name: be-tests-sqlite-ee + # requires: + # - be-tests-ee + # driver: sqlite + + # - test-driver: + # name: be-tests-sqlserver-ee + # requires: + # - be-tests-ee + # e: sqlserver + # driver: sqlserver + + # - test-driver: + # name: be-tests-vertica-ee + # requires: + # - be-tests-ee + # e: vertica + # before-steps: + # - fetch-jdbc-driver: + # source: VERTICA_JDBC_JAR + # dest: vertica-jdbc-7.1.2-0.jar + # driver: vertica + + # - test-build-scripts: + # requires: + # - be-deps + + # - build-uberjar-drivers: + # name: build-uberjar-drivers-<< matrix.edition >> + # requires: + # - be-deps + # <<: *Matrix + + # - build-uberjar-frontend: + # name: build-uberjar-frontend-<< matrix.edition >> + # requires: + # - fe-deps + # <<: *Matrix + + # - build-uberjar: + # name: build-uberjar-<< matrix.edition >> + # requires: + # - build-uberjar-drivers-<< matrix.edition >> + # - build-uberjar-frontend-<< matrix.edition >> + # <<: *Matrix + + # - fe-deps: + # requires: + # - checkout + # - shared-tests-cljs: + # requires: + # - fe-deps + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # folder: ["admin", "binning", "collections", "dashboard", "dashboard-filters", "dashboard-filters-sql", "moderation", "native", "native-filters", "onboarding", "permissions", "question", "sharing", "smoketest", "visualizations"] + # name: e2e-tests-<< matrix.folder >>-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "smoketest-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/smoketest + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-admin-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "admin-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/admin + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-collections-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "collections-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/collections + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-dashboard-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "dashboard-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/dashboard + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-dashboard-filters-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "dashboard-filters-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/dashboard-filters + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-onboarding-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "onboarding-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/onboarding + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-native-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "native-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/native + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-native-filters-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "native-filters-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/native-filters + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-question-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "question-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/question + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-binning-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "binning-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/binning + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-sharing-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "sharing-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/sharing + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-visualizations-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "visualizations-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/visualizations + + # - fe-tests-cypress: + # name: e2e-tests-mongo-4-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # e: fe-mongo-4 + # cypress-group: "mongo" + # source-folder: frontend/test/metabase-db/mongo + # before-steps: + # - wait-for-port: + # port: 27017 + # <<: *Matrix + + # - fe-tests-cypress: + # name: e2e-tests-postgres-12-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # e: fe-postgres-12 + # cypress-group: "postgres" + # source-folder: frontend/test/metabase-db/postgres + # before-steps: + # - wait-for-port: + # port: 5432 + # <<: *Matrix + + # - fe-tests-cypress: + # name: e2e-tests-mysql-8-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # e: fe-mysql-8 + # cypress-group: "mysql" + # source-folder: frontend/test/metabase-db/mysql + # before-steps: + # - wait-for-port: + # port: 3306 + # - wait-for-port: + # port: 5432 + # - wait-for-port: + # port: 27017 + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: percy-visual-tests-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "percy-visual-<< matrix.edition >>" + # test-files: "./frontend/test/metabase-visual/**/*.cy.spec.js" + build_and_push_image: jobs: - - checkout - - - check-migrations: - requires: - - checkout - - - be-deps: - requires: - - checkout - - - lein: - name: be-tests-<< matrix.edition >> - requires: - - be-deps - e: java-8 - lein-command: with-profile +junit test - skip-when-no-change: true - <<: *Matrix - - - lein: - name: be-tests-java-11-<< matrix.edition >> - requires: - - be-deps - e: java-11 - lein-command: with-profile +junit test - skip-when-no-change: true - <<: *Matrix - - - lein: - name: be-tests-java-16-<< matrix.edition >> - requires: - - be-deps - e: java-16 - lein-command: with-profile +junit test - skip-when-no-change: true - <<: *Matrix - - - lein: - name: be-linter-cloverage - requires: - - be-deps - lein-command: cloverage --codecov - after-steps: - - run: - name: Upload code coverage to codecov.io - command: bash <(curl -s https://codecov.io/bash) - skip-when-no-change: true - - - test-driver: - name: be-tests-bigquery-ee - requires: - - be-tests-ee - driver: bigquery - - - test-driver: - name: be-tests-druid-ee - requires: - - be-tests-ee - e: druid - driver: druid - - - test-driver: - name: be-tests-googleanalytics-ee - requires: - - be-tests-ee - driver: googleanalytics - - - test-driver: - name: be-tests-mongo-ee - requires: - - be-tests-ee - e: mongo - driver: mongo - - - test-driver: - name: be-tests-mysql-ee - description: "(MySQL 5.7)" - requires: - - be-tests-ee - e: - name: mysql-5-7 - driver: mysql - - - test-driver: - name: be-tests-mysql-latest-ee - description: "(MySQL latest)" - requires: - - be-tests-ee - e: - name: mysql-latest - driver: mysql - # set up env vars for something named "MYSQL_SSL" to run MySQL SSL tests verifying connectivity with PEM cert - # they are deliberately given a different name to prevent them from affecting the regular test run against - # the configured MySQL instance, but there is one particular test (mysql-connect-with-ssl-and-pem-cert-test) - # that overrides the MB_MYSQL_TEST_* values with them - # the MYSQL_RDS_SSL_INSTANCE vars are secret and/or changeable, so they are defined in the CircleCI settings - extra-env: >- - MB_MYSQL_SSL_TEST_HOST=$MYSQL_RDS_SSL_INSTANCE_HOST - MB_MYSQL_SSL_TEST_SSL=true - MB_MYSQL_SSL_TEST_ADDITIONAL_OPTIONS='verifyServerCertificate=true' - MB_MYSQL_SSL_TEST_SSL_CERT="$(cat /home/circleci/metabase/metabase/resources/certificates/rds-combined-ca-bundle.pem)" - MB_MYSQL_SSL_TEST_USER=metabase - MB_MYSQL_SSL_TEST_PASSWORD=$MYSQL_RDS_SSL_INSTANCE_PASSWORD - - - test-driver: - name: be-tests-mariadb-ee - description: "(MariaDB 10.2)" - requires: - - be-tests-ee - e: - name: mariadb-10-2 - driver: mysql - - - test-driver: - name: be-tests-mariadb-latest-ee - description: "(MariaDB latest)" - requires: - - be-tests-ee - e: - name: mariadb-latest - driver: mysql - - - test-driver: - name: be-tests-oracle-ee - requires: - - be-tests-ee - before-steps: - - fetch-jdbc-driver: - source: ORACLE_JDBC_JAR - dest: ojdbc8.jar - driver: oracle - extra-env: >- - MB_ORACLE_SSL_TEST_SSL=true - MB_ORACLE_SSL_TEST_PORT=2484 - JVM_OPTS="-Djavax.net.ssl.trustStore=/home/circleci/metabase/metabase/resources/certificates/cacerts_with_RDS_root_ca.jks - -Djavax.net.ssl.trustStoreType=JKS - -Djavax.net.ssl.trustStorePassword=metabase $JAVA_OPTS" - - - test-driver: - name: be-tests-postgres-ee - description: "(9.6)" - requires: - - be-tests-ee - e: postgres-9-6 - driver: postgres - - - test-driver: - name: be-tests-postgres-latest-ee - description: "(Latest)" - requires: - - be-tests-ee - e: postgres-latest - driver: postgres - - - test-driver: - name: be-tests-presto-ee - requires: - - be-tests-ee - e: presto - before-steps: - - wait-for-port: - port: 8080 - driver: presto - - - test-driver: - name: be-tests-redshift-ee - requires: - - be-tests-ee - driver: redshift - timeout: 15m - - - test-driver: - name: be-tests-snowflake-ee - requires: - - be-tests-ee - driver: snowflake - timeout: 115m - - - test-driver: - name: be-tests-sparksql-ee - requires: - - be-tests-ee - e: sparksql - before-steps: - - wait-for-port: - port: 10000 - driver: sparksql - - - test-driver: - name: be-tests-sqlite-ee - requires: - - be-tests-ee - driver: sqlite - - - test-driver: - name: be-tests-sqlserver-ee - requires: - - be-tests-ee - e: sqlserver - driver: sqlserver - - - test-driver: - name: be-tests-vertica-ee - requires: - - be-tests-ee - e: vertica - before-steps: - - fetch-jdbc-driver: - source: VERTICA_JDBC_JAR - dest: vertica-jdbc-7.1.2-0.jar - driver: vertica - - - test-build-scripts: - requires: - - be-deps - - - build-uberjar-drivers: - name: build-uberjar-drivers-<< matrix.edition >> - requires: - - be-deps - <<: *Matrix - - - build-uberjar-frontend: - name: build-uberjar-frontend-<< matrix.edition >> - requires: - - fe-deps - <<: *Matrix - - - build-uberjar: - name: build-uberjar-<< matrix.edition >> - requires: - - build-uberjar-drivers-<< matrix.edition >> - - build-uberjar-frontend-<< matrix.edition >> - <<: *Matrix - - - fe-deps: - requires: - - checkout - - shared-tests-cljs: - requires: - - fe-deps - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-smoketest-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "smoketest-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/smoketest - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-admin-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "admin-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/admin - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-collections-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "collections-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/collections - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-dashboard-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "dashboard-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/dashboard - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-dashboard-filters-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "dashboard-filters-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/dashboard-filters - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-onboarding-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "onboarding-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/onboarding - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-native-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "native-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/native - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-native-filters-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "native-filters-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/native-filters - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-question-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "question-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/question - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-binning-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "binning-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/binning - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-sharing-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "sharing-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/sharing - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-visualizations-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "visualizations-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/visualizations - - - fe-tests-cypress: - name: e2e-tests-mongo-4-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - e: fe-mongo-4 - cypress-group: "mongo" - source-folder: frontend/test/metabase-db/mongo - before-steps: - - wait-for-port: - port: 27017 - <<: *Matrix - - - fe-tests-cypress: - name: e2e-tests-postgres-12-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - e: fe-postgres-12 - cypress-group: "postgres" - source-folder: frontend/test/metabase-db/postgres - before-steps: - - wait-for-port: - port: 5432 - <<: *Matrix - - - fe-tests-cypress: - name: e2e-tests-mysql-8-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - e: fe-mysql-8 - cypress-group: "mysql" - source-folder: frontend/test/metabase-db/mysql - before-steps: - - wait-for-port: - port: 3306 - <<: *Matrix + - aws-ecr/build_and_push_image: + account_id: ${AWS_ACCOUNT_ID} + auth: + - aws-cli/setup + context: + - org-global + repo: metabase-k8s + extra_build_args: '--compress' + platform: linux/amd64 + push_image: true + region: ${AWS_DEFAULT_REGION} + skip_when_tags_exist: true + tag: latest,${CIRCLKE_SHA1} + # tag: latest,${CIRCLKE_SHA1},${CIRCLE_TAG} + # filters: + # tags: + # only: /^v0.4*/ diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index a205a5272cdc..2c379de819a4 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -2,7 +2,10 @@ FROM mcr.microsoft.com/vscode/devcontainers/java:11 RUN apt-key adv --refresh-keys --keyserver keyserver.ubuntu.com\ && apt-get update && export DEBIAN_FRONTEND=noninteractive \ - && apt-get -y install --no-install-recommends leiningen yarn + && apt-get -y install --no-install-recommends yarn RUN curl -fsSL https://deb.nodesource.com/setup_14.x | bash -RUN apt-get update && apt-get -y install --no-install-recommends nodejs \ No newline at end of file +RUN apt-get update && apt-get -y install --no-install-recommends nodejs + +RUN curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh \ + && bash ./linux-install-1.10.3.933.sh diff --git a/.dir-locals.el b/.dir-locals.el index 72cadc8d07d1..0ea88960076c 100644 --- a/.dir-locals.el +++ b/.dir-locals.el @@ -1,49 +1,78 @@ -((nil . ((indent-tabs-mode . nil) ; always use spaces for tabs - (require-final-newline . t))) ; add final newline on save - (js2-mode . ((js2-mode-show-parse-errors . nil) ; these settings will let flycheck do everything through eslint, - (js2-mode-show-strict-warnings . nil))) ; because js2-mode can't handle flowtype - (clojure-mode . ((eval . (progn - ;; Specify which arg is the docstring for certain macros - ;; (Add more as needed) - (put 'defendpoint 'clojure-doc-string-elt 3) - (put 'defendpoint-async 'clojure-doc-string-elt 3) - (put 'api/defendpoint 'clojure-doc-string-elt 3) - (put 'api/defendpoint-async 'clojure-doc-string-elt 3) - (put 'defsetting 'clojure-doc-string-elt 2) - (put 'setting/defsetting 'clojure-doc-string-elt 2) - (put 's/defn 'clojure-doc-string-elt 2) - (put 'p.types/defprotocol+ 'clojure-doc-string-elt 2) +((nil + ;; always use spaces for tabs + (indent-tabs-mode . nil) + ;; add final newline on save + (require-final-newline . t) + ;; prefer keeping source width about ~118, GitHub seems to cut off stuff at either 119 or 120 and it's nicer + ;; to look at code in GH when you don't have to scroll back and forth + (fill-column . 118) + ;; tell find-things-fast to always use this directory as project root regardless of presence of other + ;; deps.edn files + (ftf-project-finders . (ftf-get-top-git-dir))) - ;; Define custom indentation for functions inside metabase. - ;; This list isn't complete; add more forms as we come across them. - (define-clojure-indent - (db/insert-many! 1) - (let-404) - (macros/case 0) - (match 1) - (mbql.match/match 1) - (mt/test-drivers 1) - (mt/query 1) - (mbql.match/match-one 1) - (mbql.match/replace 1) - (mbql.match/replace-in 2) - (impl/test-migrations 2) - (l/matche '(1 (:defn))) - (l/matcha '(1 (:defn))) - (p/defprotocol+ '(1 (:defn))) - (p.types/defprotocol+ '(1 (:defn))) - (p.types/def-abstract-type '(1 (:defn))) - (p.types/deftype+ '(2 nil nil (:defn))) - (p/def-map-type '(2 nil nil (:defn))) - (p.types/defrecord+ '(2 nil nil (:defn))) - (qp.streaming/streaming-response 1) - (prop/for-all 1) - (tools.macro/macrolet '(1 (:defn)))))) - (clojure-indent-style . always-align) - ;; if you're using clj-refactor (highly recommended!) - (cljr-favor-prefix-notation . nil) - ;; prefer keeping source width about ~118, GitHub seems to cut off stuff at either 119 or 120 and - ;; it's nicer to look at code in GH when you don't have to scroll back and forth - (fill-column . 118) - (clojure-docstring-fill-column . 118) - (cider-preferred-build-tool . lein)))) + (js2-mode + ;; these settings will let flycheck do everything through eslint, + (js2-mode-show-parse-errors . nil) + ;; because js2-mode can't handle flowtype + (js2-mode-show-strict-warnings . nil)) + + (clojure-mode + ;; Specify which arg is the docstring for certain macros + ;; (Add more as needed) + (eval . (put 'defendpoint 'clojure-doc-string-elt 3)) + (eval . (put 'defendpoint-async 'clojure-doc-string-elt 3)) + (eval . (put 'define-premium-feature 'clojure-doc-string-elt 2)) + (eval . (put 'api/defendpoint 'clojure-doc-string-elt 3)) + (eval . (put 'api/defendpoint-async 'clojure-doc-string-elt 3)) + (eval . (put 'defsetting 'clojure-doc-string-elt 2)) + (eval . (put 'setting/defsetting 'clojure-doc-string-elt 2)) + (eval . (put 's/defn 'clojure-doc-string-elt 2)) + (eval . (put 'p.types/defprotocol+ 'clojure-doc-string-elt 2)) + ;; Define custom indentation for functions inside metabase. + ;; This list isn't complete; add more forms as we come across them. + ;; + ;; `put-clojure-indent' is a safe-local-eval-function, so use a bunch of calls to that + ;; instead of one call to `define-clojure-indent' + (eval . (put-clojure-indent 'c/step 1)) + (eval . (put-clojure-indent 'db/insert-many! 1)) + (eval . (put-clojure-indent 'impl/test-migrations 2)) + (eval . (put-clojure-indent 'let-404 0)) + (eval . (put-clojure-indent 'macros/case 0)) + (eval . (put-clojure-indent 'match 1)) + (eval . (put-clojure-indent 'mbql.match/match 1)) + (eval . (put-clojure-indent 'mbql.match/match-one 1)) + (eval . (put-clojure-indent 'mbql.match/replace 1)) + (eval . (put-clojure-indent 'mbql.match/replace-in 2)) + (eval . (put-clojure-indent 'mt/dataset 1)) + (eval . (put-clojure-indent 'mt/query 1)) + (eval . (put-clojure-indent 'mt/test-drivers 1)) + (eval . (put-clojure-indent 'prop/for-all 1)) + (eval . (put-clojure-indent 'qp.streaming/streaming-response 1)) + ;; these ones have to be done with `define-clojure-indent' for now because of upstream bug + ;; https://github.com/clojure-emacs/clojure-mode/issues/600 once that's resolved we should use `put-clojure-indent' + ;; instead. Please don't add new entries unless they don't work with `put-clojure-indent' + (eval . (define-clojure-indent + (l/matcha '(1 (:defn))) + (l/matche '(1 (:defn))) + (p.types/def-abstract-type '(1 (:defn))) + (p.types/defprotocol+ '(1 (:defn))) + (p.types/defrecord+ '(2 nil nil (:defn))) + (p.types/deftype+ '(2 nil nil (:defn))) + (p/def-map-type '(2 nil nil (:defn))) + (p/defprotocol+ '(1 (:defn))) + (tools.macro/macrolet '(1 ((:defn)) :form)))) + (cider-clojure-cli-aliases . "dev:drivers:drivers-dev:ee:ee-dev:user") + (clojure-indent-style . always-align) + (cljr-favor-prefix-notation . nil) + (clojure-docstring-fill-column . 118) + (cider-preferred-build-tool . clojure-cli)) + + ("shared" + (clojure-mode + (cider-default-cljs-repl . shadow-select) + (cider-shadow-default-options . "node-repl") + (cider-preferred-build-tool . shadow-cljs))) + + ("bin" + (clojure-mode + (cider-clojure-cli-aliases . "dev")))) diff --git a/.dockerignore b/.dockerignore index 6891f6c8b2d6..67486beb4266 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,12 +1,25 @@ -.babel_cache/* +.babel_cache docs/* OSX/* target/* - -**node_modules +.circleci +.cpcache +.devcontainer +.github +.husky +.lsp +.shadow-cljs +.github +.vscode +hooks/* +test/* +test_config/* +test_modules/* +test_resources/* +node_modules **metabase.jar *.db -.dockerignore Dockerfile +.dockerignore \ No newline at end of file diff --git a/.eslintrc b/.eslintrc index 633392d26eaa..d43f54802017 100644 --- a/.eslintrc +++ b/.eslintrc @@ -21,13 +21,13 @@ "react/no-unescaped-entities": 2, "react/jsx-no-target-blank": 2, "react/jsx-key": 2, + "react/forbid-component-props": [2, { "forbid": ["w", "h"] }], "prefer-const": [1, { "destructuring": "all" }], "no-useless-escape": 0, "no-only-tests/no-only-tests": "error", "complexity": ["error", { "max": 54 }] }, "globals": { - "pending": false, "before": true, "cy": true, "Cypress": true diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 5e1d2d0bad58..ab16313bc1dc 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -5,7 +5,6 @@ ### Tests - [ ] Run the frontend and Cypress end-to-end tests with `yarn lint && yarn test`) -- [ ] If there are changes to the backend codebase, run the backend tests with `lein test && lein lint && ./bin/reflection-linter` - +- [ ] If there are changes to the backend codebase, run the backend tests with `clojure -X:dev:test` - [ ] Sign the [Contributor License Agreement](https://docs.google.com/a/metabase.com/forms/d/1oV38o7b9ONFSwuzwmERRMi9SYrhYeOrkbmNaq9pOJ_E/viewform) (unless it's a tiny documentation change). diff --git a/.github/scripts/README.md b/.github/scripts/README.md new file mode 100644 index 000000000000..faaf8e036f03 --- /dev/null +++ b/.github/scripts/README.md @@ -0,0 +1,3 @@ +# GitHub Action Test Scripts + +Scripts related to running integration tests (ex: through GitHub actions) diff --git a/.github/scripts/run-presto-kerberos-integration-test.sh b/.github/scripts/run-presto-kerberos-integration-test.sh new file mode 100755 index 000000000000..7e2aac1e7c3e --- /dev/null +++ b/.github/scripts/run-presto-kerberos-integration-test.sh @@ -0,0 +1,68 @@ +#! /usr/bin/env bash +# runs one or more Metabase test(s) against a Kerberized Presto instance +set -eo pipefail + +# Need Java commands on $PATH, which apparently is not yet the case +export PATH="$PATH:$JAVA_HOME/bin" + +# ensure java commmand is available +which java + +# install clojure version needed for Metabase +curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh +chmod +x linux-install-1.10.3.933.sh +./linux-install-1.10.3.933.sh + +RESOURCES_DIR=/app/source/resources + +# ensure the expected files are in place, in the resources dir +if [ ! -f "$RESOURCES_DIR/ssl_keystore.jks" ]; then + echo "$RESOURCES_DIR/ssl_keystore.jks does not exist; cannot run test" >&2 + exit 11 +fi + +if [ ! -f "$RESOURCES_DIR/krb5.conf" ]; then + echo "$RESOURCES_DIR/krb5.conf does not exist; cannot run test" >&2 + exit 12 +fi + +if [ ! -f "$RESOURCES_DIR/client.keytab" ]; then + echo "$RESOURCES_DIR/client.keytab does not exist; cannot run test" >&2 + exit 13 +fi + +# Copy the JDK cacerts file to our resources +cp $JAVA_HOME/lib/security/cacerts $RESOURCES_DIR/cacerts-with-presto-ca.jks + +# Capture the Presto server self signed CA in PEM format +openssl s_client -showcerts -connect presto-kerberos:7778 $RESOURCES_DIR/presto-ssl-root-ca.pem + +# Convert the Presto server self signed CA to DER format +openssl x509 -outform der -in $RESOURCES_DIR/presto-ssl-root-ca.pem -out $RESOURCES_DIR/presto-ssl-root-ca.der + +# Add Presto's self signed CA to the truststore +keytool -noprompt -import -alias presto-kerberos -keystore $RESOURCES_DIR/cacerts-with-presto-ca.jks \ + -storepass changeit -file $RESOURCES_DIR/presto-ssl-root-ca.der -trustcacerts + +ADDITIONAL_OPTS="SSLKeyStorePath=$RESOURCES_DIR/ssl_keystore.jks&SSLKeyStorePassword=presto\ +&SSLTrustStorePath=$RESOURCES_DIR/cacerts-with-presto-ca.jks&SSLTrustStorePassword=changeit" + +# Prepare dependencies +source "./bin/prep.sh" +prep_deps + +# Set up the environment variables pointing to all of this, and run some tests +DRIVERS=presto-jdbc \ +MB_ENABLE_PRESTO_JDBC_DRIVER=true \ +MB_PRESTO_JDBC_TEST_HOST=presto-kerberos \ +MB_PRESTO_JDBC_TEST_PORT=7778 \ +MB_PRESTO_JDBC_TEST_SSL=true \ +MB_PRESTO_JDBC_TEST_KERBEROS=true \ +MB_PRESTO_JDBC_TEST_USER=bob@EXAMPLE.COM \ +MB_PRESTO_JDBC_TEST_KERBEROS_PRINCIPAL=bob@EXAMPLE.COM \ +MB_PRESTO_JDBC_TEST_KERBEROS_REMOTE_SERVICE_NAME=HTTP \ +MB_PRESTO_JDBC_TEST_KERBEROS_KEYTAB_PATH=$RESOURCES_DIR/client.keytab \ +MB_PRESTO_JDBC_TEST_KERBEROS_CONFIG_PATH=$RESOURCES_DIR/krb5.conf \ +MB_PRESTO_JDBC_TEST_ADDITIONAL_OPTIONS=$ADDITIONAL_OPTS \ +clojure -X:dev:test:drivers:drivers-dev :only metabase.driver.presto-jdbc-test diff --git a/.github/workflows/auto-backport.yml b/.github/workflows/auto-backport.yml new file mode 100644 index 000000000000..21eb7454345b --- /dev/null +++ b/.github/workflows/auto-backport.yml @@ -0,0 +1,131 @@ +# Creates a pull request with the latest release branch as a target with a cherry-picked commit if an associated pull request has `backport` label +name: AutoBackport + +on: + push: + branches: + - master + +jobs: + pr_info: + name: Check if the commit should be backported + runs-on: ubuntu-latest + outputs: + title: ${{ fromJson(steps.collect_pr_info.outputs.result).title }} + number: ${{ fromJson(steps.collect_pr_info.outputs.result).pullRequestNumber }} + author: ${{ fromJson(steps.collect_pr_info.outputs.result).author }} + should_backport: ${{ fromJson(steps.collect_pr_info.outputs.result).hasBackportLabel }} + steps: + - uses: actions/github-script@v4 + id: collect_pr_info + with: + script: | + const commitMessage = context.payload.commits[0].message; + const pullRequestNumbers = Array.from(commitMessage.matchAll(/\(#(.*?)\)/g)) + + if (pullRequestNumbers.length === 0) { + return; + } + + if (pullRequestNumbers > 1) { + throw "Multiple PRs are associated with this commit"; + } + + const pullRequestNumber = pullRequestNumbers[0][1]; + + const { data } = await github.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: pullRequestNumber + }); + + const hasBackportLabel = data.labels.some((label) => label.name === 'backport'); + const { title, user } = data + + console.log(`PR #${pullRequestNumber}: "${title}" hasBackportLabel=${hasBackportLabel}`) + + return { + author: user.login, + pullRequestNumber, + title: data.title, + hasBackportLabel + } + + get_latest_release_branch: + name: Get latest release branch + runs-on: ubuntu-latest + outputs: + branch_name: ${{ steps.get_branch_name.outputs.result }} + steps: + - uses: actions/github-script@v4 + id: get_branch_name + with: + result-encoding: string + script: | + const releaseBranches = await github.git.listMatchingRefs({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: "heads/release-x.", + }); + + const getVersionFromBranch = branch => { + const match = branch.match(/release-x\.(.*?)\.x/); + return match && parseInt(match[1]) + }; + const latestReleaseBranch = releaseBranches.data + .filter(branch => getVersionFromBranch(branch.ref) !== null) + .reduce((prev, current) => getVersionFromBranch(prev.ref) > getVersionFromBranch(current.ref) ? prev : current); + const latestReleaseBranchName = latestReleaseBranch.ref.replace(/^refs\/heads\//, ""); + + console.log(`Latest release branch: ${latestReleaseBranchName}`) + + return latestReleaseBranchName; + + create_backport_pull_request: + runs-on: ubuntu-latest + name: Create a backport PR with the commit + needs: [pr_info, get_latest_release_branch] + if: ${{ needs.pr_info.outputs.should_backport == 'true' }} + env: + TARGET_BRANCH: ${{ needs.get_latest_release_branch.outputs.branch_name }} + ORIGINAL_PULL_REQUEST_NUMBER: ${{ needs.pr_info.outputs.number }} + ORIGINAL_TITLE: ${{ needs.pr_info.outputs.title }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v2 + name: Checkout + with: + fetch-depth: 0 + - run: | + git config --global user.email "metabase-github-automation@metabase.com" + git config --global user.name "$GITHUB_ACTOR" + + BACKPORT_BRANCH="backport-$GITHUB_SHA" + + git fetch --all + git checkout -b "${BACKPORT_BRANCH}" origin/"${TARGET_BRANCH}" + git cherry-pick "${GITHUB_SHA}" + git push -u origin "${BACKPORT_BRANCH}" + + hub pull-request -b "${TARGET_BRANCH}" -h "${BACKPORT_BRANCH}" -l "auto-backported" -a "${GITHUB_ACTOR}" -F- <<<"🤖 backported \"${ORIGINAL_TITLE}\" + + #${ORIGINAL_PULL_REQUEST_NUMBER}" + + notify_when_failed: + runs-on: ubuntu-latest + name: Notify about failure + needs: [pr_info, create_backport_pull_request] + if: ${{ failure() }} + steps: + - uses: actions/github-script@v4 + with: + script: | + const { GITHUB_SERVER_URL, GITHUB_REPOSITORY, GITHUB_RUN_ID } = process.env; + const runUrl = `${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}` + + github.issues.createComment({ + issue_number: ${{ needs.pr_info.outputs.number }}, + owner: context.repo.owner, + repo: context.repo.repo, + body: `@${{ needs.pr_info.outputs.author }} could not automatically create a backport PR 😩 [[Logs]](${runUrl})` + }) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 9660e0511426..0b050a73ea88 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -4,13 +4,9 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - tags: - '**' paths: - - '**.clj' + - '**.clj*' - '**.edn' - '**.java' - '**/metabase-plugin.yaml' @@ -26,53 +22,31 @@ jobs: - name: Run clj-kondo run: docker run -v $PWD:/work --rm cljkondo/clj-kondo clj-kondo --config /work/lint-config.edn --lint /work/src /work/enterprise/backend/src /work/backend/mbql/src /work/shared/src - be-linter-bikeshed: - runs-on: ubuntu-20.04 - timeout-minutes: 10 - steps: - - uses: actions/checkout@v2 - - name: Prepare JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Get M2 cache - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-bikeshed-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci bikeshed - be-linter-eastwood: runs-on: ubuntu-20.04 - timeout-minutes: 10 + timeout-minutes: 20 steps: - uses: actions/checkout@v2 - name: Prepare JDK 11 uses: actions/setup-java@v1 with: java-version: 11 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - name: Get M2 cache uses: actions/cache@v2 with: - path: ~/.m2 - key: ${{ runner.os }}-eastwood-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci eastwood - - be-linter-docstring-checker: - runs-on: ubuntu-20.04 - timeout-minutes: 10 - steps: - - uses: actions/checkout@v2 - - name: Prepare JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Get M2 cache - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-docstring-checker-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci docstring-checker + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-eastwood-${{ hashFiles('**/deps.edn') }} + - name: Compile Java & AOT Sources + run: | + source ./bin/prep.sh && prep_deps + - run: clojure -X:dev:ee:ee-dev:drivers:drivers-dev:eastwood + name: Run Eastwood linter be-linter-namespace-decls: runs-on: ubuntu-20.04 @@ -83,26 +57,19 @@ jobs: uses: actions/setup-java@v1 with: java-version: 11 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - name: Get M2 cache uses: actions/cache@v2 with: - path: ~/.m2 - key: ${{ runner.os }}-namespace-decls-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci check-namespace-decls - - be-linter-reflection-warnings: - runs-on: ubuntu-20.04 - timeout-minutes: 10 - steps: - - uses: actions/checkout@v2 - - name: Prepare JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Get M2 cache - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-reflection-warnings-${{ hashFiles('**/project.clj') }} - - run: ./bin/reflection-linter - name: Run reflection warnings checker + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-namespace-decls-${{ hashFiles('**/deps.edn') }} + - name: Compile Java & AOT Sources + run: | + source ./bin/prep.sh && prep_deps + - run: clojure -X:dev:ee:ee-dev:drivers:drivers-dev:namespace-checker + name: Check ns forms diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 000000000000..d0c6466bd219 --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,131 @@ +# Cherry-picks commits from current branch to a specified one in a command "@metabase-bot backport release-x.40.x" +name: Backport + +on: + issue_comment: + types: [created] + +jobs: + create_pull_request: + name: Creates a pull request + if: contains(github.event.comment.body, '@metabase-bot backport') + runs-on: ubuntu-latest + steps: + - uses: actions/github-script@v4 + id: branch_info + with: + script: | + // Example: @metabase-bot backport release-x.40.x + const [_botName, _command, targetBranch] = context.payload.comment.body.split(" "); + console.log(`Target branch is ${targetBranch}`); + + const { data: originalPullRequest } = await github.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.issue.number, + }); + + const { data: commits } = await github.pulls.listCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.issue.number, + }); + + const targetRef = await github.git.getRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `heads/${targetBranch}`, + }); + + const backportBranch = `backport-${originalPullRequest.head.ref}` + + try { + await github.git.getRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `heads/${backportBranch}`, + }); + } catch(e) { + if (e.status === 404) { + await github.git.createRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `refs/heads/${backportBranch}`, + sha: targetRef.data.object.sha, + }); + } + } + + return { + backportBranch, + targetBranch, + originalPullRequest, + startSha: commits[0].sha, + endSha: commits[commits.length - 1].sha + } + - uses: actions/checkout@v2 + name: Cherry-pick commits and create PR + with: + fetch-depth: 0 + - run: | + git config --global user.email "metabase-github-automation@metabase.com" + git config --global user.name "$GITHUB_ACTOR" + + git fetch --all + + git checkout "${BACKPORT_BRANCH}" + git reset --hard origin/${TARGET_BRANCH} + + if [[ -z $(git ls-remote --heads origin ${ORIGINAL_HEAD_REF}) ]]; then + echo "PR has been merged, searching for a squashed commit in the base branch" + echo "searching for a commit in a ${ORIGINAL_BASE_REF} that contains pull request number ${ORIGINAL_PULL_REQUEST_NUMBER}" + SQUASHED_COMMIT=$(env -i git log ${ORIGINAL_BASE_REF} --grep="(#${ORIGINAL_PULL_REQUEST_NUMBER})" --format="%H") + echo "found commit ${SQUASHED_COMMIT}" + git cherry-pick ${SQUASHED_COMMIT} + else + echo "PR has not been merged, copying all commits" + git cherry-pick ${ORIGINAL_BASE_SHA}..${ORIGINAL_HEAD_SHA} + fi + + git push origin "${BACKPORT_BRANCH}" --force-with-lease + + if [[ $(hub pr list -b "${TARGET_BRANCH}" -h "${BACKPORT_BRANCH}" -s "open") ]]; then + echo "PR already exists" + else + hub pull-request -b "${TARGET_BRANCH}" -h "${BACKPORT_BRANCH}" -l "auto-backported" -a "${GITHUB_ACTOR}" -F- <<<"🤖 backported \"${ORIGINAL_TITLE}\" + + #${ORIGINAL_PULL_REQUEST_NUMBER}" + echo "New PR has been created" + fi + env: + ORIGINAL_TITLE: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.title }} + ORIGINAL_BASE_REF: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.base.ref }} + ORIGINAL_BASE_SHA: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.base.sha }} + ORIGINAL_HEAD_REF: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.head.ref }} + ORIGINAL_HEAD_SHA: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.head.sha }} + ORIGINAL_PULL_REQUEST_NUMBER: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.number }} + TARGET_BRANCH: ${{ fromJson(steps.branch_info.outputs.result).targetBranch }} + BACKPORT_BRANCH: ${{ fromJson(steps.branch_info.outputs.result).backportBranch }} + START_SHA: ${{ fromJson(steps.branch_info.outputs.result).startSha }} + END_SHA: ${{ fromJson(steps.branch_info.outputs.result).endSha }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + notify_when_failed: + runs-on: ubuntu-latest + name: Notify about failure + needs: create_pull_request + if: ${{ failure() }} + steps: + - uses: actions/github-script@v4 + with: + script: | + const { GITHUB_SERVER_URL, GITHUB_REPOSITORY, GITHUB_RUN_ID} = process.env; + const runUrl = `${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}` + const author = context.payload.comment.user.login; + + github.issues.createComment({ + issue_number: context.payload.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: `@${author} could not automatically a backport PR 😩 [[Logs]](${runUrl})` + }) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index c2f730b1f135..7191522823b5 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -23,4 +23,4 @@ jobs: run: while ! curl -s localhost:3000/api/health; do sleep 1; done timeout-minutes: 1 - name: Check API health - run: curl -s localhost:3000/api/health \ No newline at end of file + run: curl -s localhost:3000/api/health diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index 4ebee9f2ac2f..aa9c701958ea 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -4,12 +4,6 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - - 'fix**' - - 'ci**' - tags: - '**' paths: - 'frontend/**' @@ -18,6 +12,7 @@ on: - 'docs/**' - '**/package.json' - '**/yarn.lock' + - '**/.eslintrc' - '.github/workflows/**' jobs: @@ -49,6 +44,11 @@ jobs: uses: actions/setup-node@v1 with: node-version: 14.x + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-cljs-${{ hashFiles('**/shadow-cljs.edn') }} - name: Get yarn cache uses: actions/cache@v2 with: @@ -78,21 +78,31 @@ jobs: fe-tests-unit: runs-on: ubuntu-20.04 - timeout-minutes: 10 + timeout-minutes: 12 steps: - uses: actions/checkout@v2 - name: Prepare Node.js uses: actions/setup-node@v1 with: node-version: 14.x + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-cljs-${{ hashFiles('**/shadow-cljs.edn') }} - name: Get yarn cache uses: actions/cache@v2 with: path: ~/.cache/yarn key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - run: yarn install --frozen-lockfile --prefer-offline - - run: yarn run test-unit + - run: yarn run test-unit --coverage --silent name: Run frontend unit tests + - name: Upload coverage to codecov.io + uses: codecov/codecov-action@v2 + with: + files: ./coverage/lcov.info + flags: front-end fe-tests-timezones: runs-on: ubuntu-20.04 @@ -103,6 +113,11 @@ jobs: uses: actions/setup-node@v1 with: node-version: 14.x + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-cljs-${{ hashFiles('**/shadow-cljs.edn') }} - name: Get yarn cache uses: actions/cache@v2 with: diff --git a/.github/workflows/i18n.yml b/.github/workflows/i18n.yml index 04606c99a52d..2f4ee2e8374f 100644 --- a/.github/workflows/i18n.yml +++ b/.github/workflows/i18n.yml @@ -4,13 +4,9 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - tags: - '**' paths: - - '**.clj' + - '**.clj*' - '**.js' - '**.jsx' - '.github/workflows/**' @@ -38,8 +34,8 @@ jobs: - name: Install Clojure CLI run: | - curl -O https://download.clojure.org/install/linux-install-1.10.1.708.sh && - sudo bash ./linux-install-1.10.1.708.sh + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - run: ./bin/i18n/update-translation-template name: Check i18n tags/make sure template can be built diff --git a/.github/workflows/percy-issue-comment.yml b/.github/workflows/percy-issue-comment.yml new file mode 100644 index 000000000000..13e376bfec26 --- /dev/null +++ b/.github/workflows/percy-issue-comment.yml @@ -0,0 +1,144 @@ +# Triggers Percy job by "@metabase-bot run visual tests" comment in a PR +name: PercyIssueComment + +on: + issue_comment: + types: [created] + +jobs: + pr_info: + if: github.event.comment.body == '@metabase-bot run visual tests' + runs-on: ubuntu-20.04 + outputs: + pull_request_number: ${{ fromJson(steps.fetch_pr.outputs.data).head.number }} + branch_name: ${{ fromJson(steps.fetch_pr.outputs.data).head.ref }} + commit_sha: ${{ fromJson(steps.fetch_pr.outputs.data).head.sha }} + steps: + - name: Fetch issue + uses: octokit/request-action@v2.x + id: fetch_issue + with: + route: GET ${{ github.event.issue.url }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Fetch PR + uses: octokit/request-action@v2.x + id: fetch_pr + with: + route: GET ${{ fromJson(steps.fetch_issue.outputs.data).pull_request.url }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + build: + runs-on: ubuntu-20.04 + needs: pr_info + timeout-minutes: 60 + strategy: + matrix: + edition: [oss] + env: + MB_EDITION: ${{ matrix.edition }} + INTERACTIVE: false + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ needs.pr_info.outputs.branch_name }} + token: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/deps.edn') }} + + - run: yarn install --frozen-lockfile --prefer-offline + - run: ./bin/build + + - name: Mark with the commit hash + run: git rev-parse --short HEAD > COMMIT-ID + - name: Calculate SHA256 checksum + run: sha256sum ./target/uberjar/metabase.jar > SHA256.sum + - name: Upload JARs as artifact + uses: actions/upload-artifact@v2 + with: + name: metabase-${{ matrix.edition }}-uberjar + path: | + ./target/uberjar/metabase.jar + ./COMMIT-ID + ./SHA256.sum + + percy: + timeout-minutes: 30 + needs: [build, pr_info] + runs-on: ubuntu-20.04 + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ needs.pr_info.outputs.branch_name }} + token: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - run: yarn install --frozen-lockfile --prefer-offline + + - uses: actions/download-artifact@v2 + name: Retrieve uberjar artifact + with: + name: metabase-oss-uberjar + - name: Get the version info + run: | + jar xf target/uberjar/metabase.jar version.properties + mv version.properties resources/ + + - name: Percy Test + run: yarn run test-visual-no-build + env: + PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }} + PERCY_COMMIT: ${{ needs.pr_info.outputs.commit_sha }} + PERCY_BRANCH: ${{ needs.pr_info.outputs.branch_name }} + PERCY_PULL_REQUEST: ${{ needs.pr_info.outputs.pull_request_number }} diff --git a/.github/workflows/percy.yml b/.github/workflows/percy.yml new file mode 100644 index 000000000000..33f82ef0f12c --- /dev/null +++ b/.github/workflows/percy.yml @@ -0,0 +1,115 @@ +# Triggers Percy job on push to master and release branches to create baseline screenshots +name: Percy + +on: + push: + branches: + - master + - "release-**" + paths-ignore: + - "docs/**" + - "**.md" + - "**unit.spec.js" + - "frontend/test/**" + - "!frontend/test/metabase-visual/**" + +jobs: + build: + runs-on: ubuntu-20.04 + timeout-minutes: 60 + strategy: + matrix: + edition: [oss] + env: + MB_EDITION: ${{ matrix.edition }} + INTERACTIVE: false + steps: + - uses: actions/checkout@v2 + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/deps.edn') }} + + - run: yarn install --frozen-lockfile --prefer-offline + - run: ./bin/build + + - name: Mark with the commit hash + run: git rev-parse --short HEAD > COMMIT-ID + - name: Calculate SHA256 checksum + run: sha256sum ./target/uberjar/metabase.jar > SHA256.sum + - name: Upload JARs as artifact + uses: actions/upload-artifact@v2 + with: + name: metabase-${{ matrix.edition }}-uberjar + path: | + ./target/uberjar/metabase.jar + ./COMMIT-ID + ./SHA256.sum + + percy: + runs-on: ubuntu-20.04 + timeout-minutes: 30 + needs: build + steps: + - uses: actions/checkout@v2 + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - run: yarn install --frozen-lockfile --prefer-offline + + - uses: actions/download-artifact@v2 + name: Retrieve uberjar artifact + with: + name: metabase-oss-uberjar + - name: Get the version info + run: | + jar xf target/uberjar/metabase.jar version.properties + mv version.properties resources/ + + - name: Percy Test + run: yarn run test-visual-no-build + env: + PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }} diff --git a/.github/workflows/presto-kerberos-integration-test.yml b/.github/workflows/presto-kerberos-integration-test.yml new file mode 100644 index 000000000000..bbbbd6ebf754 --- /dev/null +++ b/.github/workflows/presto-kerberos-integration-test.yml @@ -0,0 +1,70 @@ +name: Kerberized Presto Integration Test + +on: + pull_request: + push: + branches: + - master + - 'release**' + - 'feature**' + tags: + - '**' + paths: + - '**/presto_jdbc/**' + - '**/presto_jdbc.clj' + +jobs: + run-presto-kerberos-test: + runs-on: ubuntu-20.04 + timeout-minutes: 40 + steps: + - name: Install babashka + run: > + mkdir -p /tmp/babashka-install \ + && cd /tmp/babashka-install \ + && curl -sLO https://raw.githubusercontent.com/babashka/babashka/master/install \ + && chmod +x install \ + && sudo ./install \ + && cd - + - name: Checkout Metabase repository + uses: actions/checkout@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + - name: Check out Presto Kerberos Docker Compose + uses: actions/checkout@v2 + with: + repository: metabase/presto-kerberos-docker + ref: add-test_data-catalog + token: ${{ secrets.GITHUB_TOKEN }} + path: presto-kerberos-docker + - name: Bring up Presto+Kerberos cluster + run: cd presto-kerberos-docker && docker-compose up -d && cd .. + - name: Run Presto test query from command line (sanity check) + run: cd presto-kerberos-docker && ./test.sh && cd .. + # Since we are managing the Docker containers from the GitHub action container, we need to copy all the + # relevant resources now, into the resources dir for later consumption by the app + - name: Copy Presto SSL keystore to resources + run: docker cp presto-kerberos:/tmp/ssl_keystore.jks resources + - name: Copy krb5.conf file to resources + run: docker cp presto-kerberos:/etc/krb5.conf resources + - name: Copy client.keytab file to resources + run: docker cp presto-kerberos:/home/presto/client.keytab resources + - name: Checkout mba + uses: actions/checkout@v2 + with: + repository: metabase/mba + ref: master + token: ${{ secrets.GITHUB_TOKEN }} + path: mba-src + - name: ls mba + run: ls -latr mba-src + - name: Symlink mba + run: cd mba-src && sudo ln -s $(pwd)/src/main.clj /usr/local/bin/mba && chmod +x /usr/local/bin/mba && cd .. + - name: Ensure mba + run: which mba + - name: Run Metabase via MBA + run: /home/runner/work/metabase/metabase/mba-src/src/main.clj --mb . --data-db postgres-data -n example.com up + - name: Run test script in MBA instance + run: > + mba --mb . --data-db postgres-data -n example.com \ + run .github/scripts/run-presto-kerberos-integration-test.sh diff --git a/.github/workflows/uberjar.yml b/.github/workflows/uberjar.yml index a4183f2dea14..173ef8584bd4 100644 --- a/.github/workflows/uberjar.yml +++ b/.github/workflows/uberjar.yml @@ -3,9 +3,6 @@ name: Uberjar on: push: branches: - - master - - 'release-**' - tags: - '**' paths-ignore: - 'docs/**' @@ -35,15 +32,13 @@ jobs: java-version: 8 - name: Install Clojure CLI run: | - curl -O https://download.clojure.org/install/linux-install-1.10.1.708.sh && - sudo bash ./linux-install-1.10.1.708.sh + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - name: Check versions run: | echo "Node.js `node --version`" echo "yarn `yarn --version`" java -version - echo "Clojure `clojure -e "(println (clojure-version))"`" - lein --version - name: Get yarn cache uses: actions/cache@v2 @@ -53,12 +48,13 @@ jobs: - name: Get M2 cache uses: actions/cache@v2 with: - path: ~/.m2 - key: ${{ runner.os }}-m2-${{ hashFiles('**/project.clj') }}-${{ hashFiles('**/deps.edn') }} + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-m2-${{ hashFiles('**/deps.edn') }} - run: yarn install --frozen-lockfile --prefer-offline - - run: lein with-profile +include-all-drivers,+cloverage,+junit,+${{ matrix.edition }} deps - - run: ./bin/build + - run: MB_EDITION=${{ matrix.edition }} ./bin/build - name: Mark with the commit hash run: git rev-parse --short HEAD > COMMIT-ID @@ -81,7 +77,7 @@ jobs: strategy: matrix: edition: [ee, oss] - java-version: [8, 11, 16] + java-version: [8, 11, 17] steps: - name: Prepare JRE (Java Run-time Environment) uses: actions/setup-java@v1 @@ -96,9 +92,9 @@ jobs: name: metabase-${{ matrix.edition }}-uberjar - name: Launch uberjar - run: | - java -jar ./target/uberjar/metabase.jar & - sleep 180 + run: java -jar ./target/uberjar/metabase.jar & + - name: Wait for Metabase to start + run: while ! curl -s localhost:3000/api/health; do sleep 1; done timeout-minutes: 5 - name: Check API health diff --git a/.github/workflows/whitespace.yml b/.github/workflows/whitespace.yml new file mode 100644 index 000000000000..6189ceb11a27 --- /dev/null +++ b/.github/workflows/whitespace.yml @@ -0,0 +1,41 @@ +name: Whitespace + +on: + pull_request: + push: + branches: + - '**' + paths: + - '**.yaml' + - '**.yml' + - '**.clj' + - '**.edn' + - '**.el' + - '**.html' + - '**.json' + - '**.js*' + - '**.sh' + +jobs: + whitespace-linter: + runs-on: ubuntu-20.04 + timeout-minutes: 5 + steps: + - uses: actions/checkout@v2 + - name: Prepare JDK 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-whitespace-linter-${{ hashFiles('**/deps.edn') }} + - run: clojure -T:whitespace-linter lint + name: Run Whitespace Linter diff --git a/.github/workflows/yaml.yml b/.github/workflows/yaml.yml index 630d5c7ab2bf..3f5f4a3b15a5 100644 --- a/.github/workflows/yaml.yml +++ b/.github/workflows/yaml.yml @@ -4,10 +4,6 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - tags: - '**' paths: - '**.yml' diff --git a/.gitignore b/.gitignore index 1676473ceb07..b41ae1537ac3 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,8 @@ /resources/frontend_client/index.html /resources/frontend_client/public.html /resources/i18n/*.edn +/resources/license-backend-third-party.txt +/resources/license-frontend-third-party.txt /resources/namespaces.edn /resources/sample-dataset.db.trace.db /resources/version.properties @@ -78,6 +80,7 @@ dev/src/dev/nocommit/ **/cypress_sample_dataset.json /frontend/src/cljs .shadow-cljs +.clj-kondo/cache/ # lsp: ignore all but the config file .lsp/* diff --git a/.lein-classpath b/.lein-classpath deleted file mode 100644 index a38ac2ef8384..000000000000 --- a/.lein-classpath +++ /dev/null @@ -1 +0,0 @@ -lein_tasks diff --git a/.lsp/config.edn b/.lsp/config.edn index 0018f36a72b2..117bb4b453b8 100644 --- a/.lsp/config.edn +++ b/.lsp/config.edn @@ -1,4 +1,4 @@ {:keep-require-at-start? true :show-docs-arity-on-same-line? true - :project-specs [{:project-path "project.clj" - :classpath-cmd ["lein" "with-profile" "+ee" "classpath"]}]} + :project-specs [{:project-path "deps.edn" + :classpath-cmd ["clojure" "-A:dev:ee:ee-dev:drivers:drivers-dev" "-Spath"]}]} diff --git a/.percy.yml b/.percy.yml new file mode 100644 index 000000000000..b09920de5a83 --- /dev/null +++ b/.percy.yml @@ -0,0 +1,7 @@ +version: 2 +snapshot: + widths: + - 1280 + min-height: 800 +discovery: + disable-cache: true diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000000..679f283d3f2e --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,22 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Debug with Firefox", + "request": "launch", + "type": "firefox", + "url": "http://localhost:3000", + "webRoot": "${workspaceFolder}" + }, + { + "name": "Debug with Chrome", + "request": "launch", + "type": "pwa-chrome", + "url": "http://localhost:3000", + "webRoot": "${workspaceFolder}" + }, + ] +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 4f1b6a517454..5c35bb10de4c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,72 +2,36 @@ # STAGE 1.1: builder frontend ################### -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as frontend +FROM metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers as frontend ARG ENV=production ARG MB_EDITION=oss -WORKDIR /app/source - -COPY . . - -RUN apk add --no-cache patch +WORKDIR /home/circleci RUN if [ "$ENV" = "staging" ] ; then patch -p1 < staging.patch; fi -RUN NODE_ENV=production MB_EDITION=$MB_EDITION yarn --frozen-lockfile && yarn build && bin/i18n/build-translation-resources - -################### -# STAGE 1.2: backend deps -################### - -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as backend - -WORKDIR /app/source - -# backend dependencies -COPY project.clj . -RUN lein deps :tree - -################### -# STAGE 1.3: drivers -################### - -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as drivers - -ARG MB_EDITION=oss - -WORKDIR /app/source - -COPY --from=backend /root/.m2/repository/. /root/.m2/repository/. - -# add the rest of the source -COPY . . - -# build the app -RUN INTERACTIVE=false MB_EDITION=$MB_EDITION sh bin/build-drivers.sh +COPY --chown=circleci . . +RUN NODE_ENV=production MB_EDITION=$MB_EDITION yarn --frozen-lockfile && \ + yarn build && yarn build-static-viz && bin/i18n/build-translation-resources ################### # STAGE 1.4: main builder ################### -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as builder +FROM metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers as builder ARG MB_EDITION=oss -WORKDIR /app/source +WORKDIR /home/circleci # try to reuse caching as much as possible -COPY --from=frontend /root/.m2/repository/. /root/.m2/repository/. -COPY --from=frontend /app/source/. . -COPY --from=backend /root/.m2/repository/. /root/.m2/repository/. -COPY --from=backend /app/source/. . -COPY --from=drivers /root/.m2/repository/. /root/.m2/repository/. -COPY --from=drivers /app/source/. . +COPY --from=frontend /home/circleci/.m2/repository/. /home/circleci/.m2/repository/. +COPY --from=frontend /home/circleci/. . # build the app -RUN INTERACTIVE=false MB_EDITION=$MB_EDITION bin/build version uberjar +RUN INTERACTIVE=false MB_EDITION=$MB_EDITION bin/build version drivers uberjar # ################### # # STAGE 2: runner @@ -90,7 +54,7 @@ RUN apk upgrade && apk add --update-cache --no-cache bash ttf-dejavu fontconfig mkdir -p /plugins && chmod a+rwx /plugins # add Metabase script and uberjar -COPY --from=builder /app/source/target/uberjar/metabase.jar /app/ +COPY --from=builder /home/circleci/target/uberjar/metabase.jar /app/ COPY bin/docker/run_metabase.sh /app/ # expose our default runtime port diff --git a/OSX/Metabase/Backend/ResetPasswordTask.m b/OSX/Metabase/Backend/ResetPasswordTask.m index 1260ff631316..b675fd1e06b0 100644 --- a/OSX/Metabase/Backend/ResetPasswordTask.m +++ b/OSX/Metabase/Backend/ResetPasswordTask.m @@ -23,58 +23,47 @@ - (void)resetPasswordForEmailAddress:(NSString *)emailAddress success:(void (^)( // first, we need to stop the main Metabase task so we can access the DB NSLog(@"Stopping Metabase task in order to reset password..."); [[AppDelegate instance] stopMetabaseTask]; - + self.task = [[NSTask alloc] init]; - - // time travelers from the future: this is hardcoded since I'm the only one who works on this. I give you permission to fix it - Cam - #define DEBUG_RUN_LEIN_TASK 0 - - #if DEBUG_RUN_LEIN_TASK - self.task.environment = @{@"MB_DB_FILE": DBPath()}; - self.task.currentDirectoryPath = @"/Users/cam/metabase"; - self.task.launchPath = @"/usr/local/bin/lein"; - self.task.arguments = @[@"run", @"reset-password", emailAddress]; - NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' lein run reset-password %@", DBPath(), emailAddress); - #else - self.task.environment = @{@"MB_DB_FILE": DBPath()}; - self.task.launchPath = JREPath(); - self.task.arguments = @[@"-Djava.awt.headless=true", // this prevents the extra java icon from popping up in the dock when running - @"-Xverify:none", // disable bytecode verification for faster launch speed, not really needed here since JAR is packaged as part of signed .app - @"-jar", UberjarPath(), - @"reset-password", emailAddress]; - NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' %@ -jar %@ reset-password %@", DBPath(), JREPath(), UberjarPath(), emailAddress); - #endif - + + self.task.environment = @{@"MB_DB_FILE": DBPath()}; + self.task.launchPath = JREPath(); + self.task.arguments = @[@"-Djava.awt.headless=true", // this prevents the extra java icon from popping up in the dock when running + @"-Xverify:none", // disable bytecode verification for faster launch speed, not really needed here since JAR is packaged as part of signed .app + @"-jar", UberjarPath(), + @"reset-password", emailAddress]; + NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' %@ -jar %@ reset-password %@", DBPath(), JREPath(), UberjarPath(), emailAddress); + __weak ResetPasswordTask *weakSelf = self; self.task.terminationHandler = ^(NSTask *task) { NSLog(@"ResetPasswordTask terminated with status: %d", task.terminationStatus); [weakSelf terminate]; - + dispatch_async(dispatch_get_main_queue(), ^{ if (!task.terminationStatus && weakSelf.output.length >= 38) { // should be of format _<36-char-uuid>, e.g. "1_b20466b9-1f5b-488d-8ab6-5039107482f8" successBlock(weakSelf.output); } else { errorBlock(weakSelf.output.length ? weakSelf.output : @"An unknown error has occured."); } - + // now restart the main Metabase task NSLog(@"Reset password complete, restarting Metabase task..."); [[AppDelegate instance] startMetabaseTask]; }); }; - + [self.task launch]; }); } - (void)readHandleDidRead:(NSString *)message { NSLog(@"[PasswordResetTask] %@", message); - + /// output comes back like "STATUS [[[message]]]" NSRegularExpression *regex = [NSRegularExpression regularExpressionWithPattern:@"^(?:(?:OK)||(?:FAIL))\\s+\\[\\[\\[(.+)\\]\\]\\]\\s*$" options:NSRegularExpressionAnchorsMatchLines|NSRegularExpressionAllowCommentsAndWhitespace error:NULL]; if (![regex numberOfMatchesInString:message options:0 range:NSMakeRange(0, message.length)]) return; - + NSString *result = [regex stringByReplacingMatchesInString:message options:0 range:NSMakeRange(0, message.length) withTemplate:@"$1"]; if (result) { self.output = result; diff --git a/OSX/src/macos_release.clj b/OSX/src/macos_release.clj index 6ed28f0e9989..f08d37d6d855 100644 --- a/OSX/src/macos_release.clj +++ b/OSX/src/macos_release.clj @@ -22,7 +22,7 @@ :upload upload/upload!)) (defn- do-step! [step-name] - (let [thunk (or (get steps* (keyword step-name)) + (let [thunk (or (get steps* (u/parse-as-keyword step-name)) (throw (ex-info (format "Invalid step name: %s" step-name) {:found (set (keys steps*))})))] (println (colorize/magenta (format "Running step %s..." step-name))) diff --git a/backend/junit/test/metabase/junit.clj b/backend/junit/test/metabase/junit.clj deleted file mode 100644 index 49bb3aec0fc9..000000000000 --- a/backend/junit/test/metabase/junit.clj +++ /dev/null @@ -1,73 +0,0 @@ -(ns metabase.junit - "Formatter for JUnit test output for CI." - (:require [clojure.pprint :as pp] - [clojure.string :as str] - [medley.core :as m] - [metabase.util :as u] - [pjstadig.print :as p] - [test-report-junit-xml.core :as junit-xml] - [test_report_junit_xml.shaded.clojure.data.xml :as xml])) - -(defn- escape-unprintable-characters - [s] - (str/join (for [c s] - (if (and (Character/isISOControl c) - (not (Character/isWhitespace c))) - (format "&#%d;" (int c)) - c)))) - -(defn- decolorize-and-escape - "Remove ANSI color escape sequences, then encode things as character entities as needed" - [s] - (-> s u/decolorize escape-unprintable-characters)) - -(defn- event-description [{:keys [file line context message]}] - (str - (format "%s:%d" file line) - (when (seq context) - (str "\n" (str/join " " (reverse context)))) - (when message - (str "\n" message)))) - -(defn- print-expected [expected actual] - (p/rprint "expected: ") - (pp/pprint expected) - (p/rprint " actual: ") - (pp/pprint actual) - (p/clear)) - -(defn- result-output [{:keys [expected actual diffs message], :as event}] - (let [s (with-out-str - (println (event-description event)) - ;; this code is adapted from `pjstadig.util` - (p/with-pretty-writer - (fn [] - (if (seq diffs) - (doseq [[actual [a b]] diffs] - (print-expected expected actual) - (p/rprint " diff:") - (if a - (do (p/rprint " - ") - (pp/pprint a) - (p/rprint " + ")) - (p/rprint " + ")) - (when b - (pp/pprint b)) - (p/clear)) - (print-expected expected actual)))))] - (decolorize-and-escape s))) - -(defmulti format-result - {:arglists '([event])} - :type) - -(defmethod format-result :default - [event] - (-> (#'junit-xml/format-result event) - (m/update-existing-in [:attrs :message] decolorize-and-escape) - (m/update-existing :content (comp xml/cdata decolorize-and-escape)))) - -(defmethod format-result :fail - [event] - {:tag :failure - :content (xml/cdata (result-output event))}) diff --git a/bin/build b/bin/build index 7833eef8487e..d360de021445 100755 --- a/bin/build +++ b/bin/build @@ -2,8 +2,18 @@ set -euo pipefail +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/build-mb clojure -M -m build $@ diff --git a/bin/build-driver.sh b/bin/build-driver.sh index ed438085bb68..8380131c246a 100755 --- a/bin/build-driver.sh +++ b/bin/build-driver.sh @@ -5,12 +5,22 @@ set -eo pipefail driver="$1" if [ ! "$driver" ]; then - echo "Usage: ./bin/build-driver.sh [driver]" + echo "Usage: ./bin/build-driver.sh [edition]" exit -1 fi +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/build-drivers -clojure -M -m build-driver "$driver" +clojure -M -m build-driver $@ diff --git a/bin/build-drivers.sh b/bin/build-drivers.sh index f50aac9d22c2..bea160d25b78 100755 --- a/bin/build-drivers.sh +++ b/bin/build-drivers.sh @@ -2,8 +2,18 @@ set -euo pipefail +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/build-drivers clojure -M -m build-drivers $@ diff --git a/bin/build-drivers/README.md b/bin/build-drivers/README.md index 0033455aba5d..8862c9502df3 100644 --- a/bin/build-drivers/README.md +++ b/bin/build-drivers/README.md @@ -7,7 +7,7 @@ There are three main entrypoints. Shell script wrappers are provided for conveni ### `build-drivers` -Builds *all* drivers as needed. If drivers were recently built and no relevant source code changed, skips rebuild. +Builds *all* drivers as needed. ``` cd bin/build-drivers diff --git a/bin/build-drivers/deps.edn b/bin/build-drivers/deps.edn index 0231cb4d6284..5c89854601cd 100644 --- a/bin/build-drivers/deps.edn +++ b/bin/build-drivers/deps.edn @@ -1,17 +1,29 @@ {:paths ["src"] :deps - {common/common {:local/root "../common"} - cheshire/cheshire {:mvn/version "5.8.1"} - commons-codec/commons-codec {:mvn/version "1.14"} - hiccup/hiccup {:mvn/version "1.0.5"} - io.forward/yaml {:mvn/version "1.0.9"} ; don't upgrade to 1.0.10 -- doesn't work on Java 8 (!) - leiningen/leiningen {:mvn/version "2.9.5"} ; for parsing Leiningen projects - org.flatland/ordered {:mvn/version "1.5.9"} ; used by io.forward/yaml -- need the newer version - stencil/stencil {:mvn/version "0.5.0"}} + {common/common {:local/root "../common"} + com.github.seancorfield/depstar {:mvn/version "2.1.278"} + cheshire/cheshire {:mvn/version "5.8.1"} + commons-codec/commons-codec {:mvn/version "1.14"} + hiccup/hiccup {:mvn/version "1.0.5"} + io.forward/yaml {:mvn/version "1.0.9"} ; Don't upgrade yet, new version doesn't support Java 8 (see https://github.com/owainlewis/yaml/issues/37) + io.github.clojure/tools.build {:git/tag "v0.1.6", :git/sha "5636e61"} + org.clojure/tools.deps.alpha {:mvn/version "0.12.985"} + org.flatland/ordered {:mvn/version "1.5.9"} ; used by io.forward/yaml -- need the newer version + stencil/stencil {:mvn/version "0.5.0"} + ;; local source + metabase/metabase-core {:local/root "../.."} + metabase/driver-modules {:local/root "../../modules/drivers"}} + + :jvm-opts + ["-XX:-OmitStackTraceInFastThrow"] :aliases - {:test {:extra-paths ["test"] - :extra-deps {com.cognitect/test-runner {:git/url "https://github.com/cognitect-labs/test-runner.git" - :sha "209b64504cb3bd3b99ecfec7937b358a879f55c1"}} - :main-opts ["-m" "cognitect.test-runner"]}}} + {:dev + {:extra-paths ["test"]} + + :test + {:extra-paths ["test"] + :extra-deps {com.cognitect/test-runner {:git/url "https://github.com/cognitect-labs/test-runner.git" + :sha "209b64504cb3bd3b99ecfec7937b358a879f55c1"}} + :main-opts ["-m" "cognitect.test-runner"]}}} diff --git a/bin/build-drivers/src/build_driver.clj b/bin/build-drivers/src/build_driver.clj index 1c792623eafb..5824f5880be4 100644 --- a/bin/build-drivers/src/build_driver.clj +++ b/bin/build-drivers/src/build_driver.clj @@ -7,4 +7,4 @@ (u/exit-when-finished-nonzero-on-exception (when-not (seq driver) (throw (ex-info "Usage: clojure -m build-driver [edition]" {}))) - (build-driver/build-driver! (keyword driver) (or (keyword edition) :oss)))) + (build-driver/build-driver! (u/parse-as-keyword driver) (or (u/parse-as-keyword edition) :oss)))) diff --git a/bin/build-drivers/src/build_drivers.clj b/bin/build-drivers/src/build_drivers.clj index 2dd1ba2d3b46..55057b37b1de 100644 --- a/bin/build-drivers/src/build_drivers.clj +++ b/bin/build-drivers/src/build_drivers.clj @@ -7,12 +7,24 @@ (defn- all-drivers [] (->> (.listFiles (io/file (u/filename u/project-root-directory "modules" "drivers"))) +<<<<<<< HEAD (filter (fn [^File d] (and (.isDirectory d) ;; watch for errant DS_Store files on os_x ;; only consider a directory to be a driver if it contains a lein or deps build file (some true? (map (fn [f] (.exists (io/file d f))) ["project.clj" "deps.edn"]))))) (map (comp keyword #(.getName %))))) +======= + (filter (fn [^File d] ; + (and + ;; watch for errant DS_Store files on os_x + (.isDirectory d) + ;; ignore stuff like .cpcache + (not (.isHidden d)) + ;; only consider a directory to be a driver if it contains a lein or deps build file + (.exists (io/file d "deps.edn"))))) + (map (comp keyword #(.getName ^File %))))) +>>>>>>> tags/v0.41.0 (defn build-drivers! [edition] (let [edition (or edition :oss)] @@ -24,4 +36,4 @@ (defn -main [& [edition]] (u/exit-when-finished-nonzero-on-exception - (build-drivers! (keyword edition)))) + (build-drivers! (u/parse-as-keyword edition)))) diff --git a/bin/build-drivers/src/build_drivers/build_driver.clj b/bin/build-drivers/src/build_drivers/build_driver.clj index 498e51f175ac..f05b81b57064 100644 --- a/bin/build-drivers/src/build_drivers/build_driver.clj +++ b/bin/build-drivers/src/build_drivers/build_driver.clj @@ -1,168 +1,35 @@ (ns build-drivers.build-driver - "Logic for building a single driver." - (:require [build-drivers.checksum :as checksum] - [build-drivers.common :as c] - [build-drivers.install-driver-locally :as install-locally] - [build-drivers.metabase :as metabase] - [build-drivers.plugin-manifest :as manifest] - [build-drivers.strip-and-compress :as strip-and-compress] + (:require [build-drivers.common :as c] + [build-drivers.compile-source-files :as compile-source-files] + [build-drivers.copy-source-files :as copy-source-files] + [build-drivers.create-uberjar :as create-uberjar] [build-drivers.verify :as verify] - [clojure.string :as str] - [colorize.core :as colorize] - [environ.core :as env] [metabuild-common.core :as u])) -(defn- copy-driver! - "Copy the driver JAR from its `target/` directory to `resources/modules`/." - [driver] - (u/step (format "Copy %s driver uberjar from %s -> %s" - driver - (u/assert-file-exists (c/driver-jar-build-path driver)) - (c/driver-jar-destination-path driver)) - (u/delete-file-if-exists! (c/driver-jar-destination-path driver)) - (u/create-directory-unless-exists! c/driver-jar-destination-directory) - (u/copy-file! (c/driver-jar-build-path driver) - (c/driver-jar-destination-path driver)))) - -(defn- clean-driver-artifacts! - "Delete built JARs of `driver`." - [driver] - (u/step (format "Delete %s driver artifacts" driver) - (u/delete-file-if-exists! (c/driver-target-directory driver)) +(defn clean! [driver] + (u/step "Clean" + (u/delete-file-if-exists! (c/compiled-source-target-dir driver)) (u/delete-file-if-exists! (c/driver-jar-destination-path driver)))) -(defn- clean-parents! - "Delete built JARs and local Maven installations of the parent drivers of `driver`." - [driver] - (u/step (format "Clean %s parent driver artifacts" driver) - (doseq [parent (manifest/parent-drivers driver)] - (clean-driver-artifacts! parent) - (install-locally/clean! parent) - (clean-parents! parent)))) - -(defn- clean-all! - "Delete all artifacts relating to building `driver`, including the driver JAR itself and installed - `metabase-core`/Metabase uberjar and any parent driver artifacts." - [driver] - (u/step "Clean all" - (clean-driver-artifacts! driver) - (clean-parents! driver) - (metabase/clean-metabase!))) - -(declare build-driver!) - -(defn- build-parents! - "Build and install to the local Maven repo any parent drivers of `driver` (e.g. `:google` is a parent of `:bigquery`). - The driver must be built as an uberjar so we can remove duplicate classes during the `strip-and-compress` stage; it - must be installed as a library so we can use it as a `:provided` dependency when building the child driver." - [driver edition] - (u/step (format "Build %s parent drivers" driver) - (when-let [parents (not-empty (manifest/parent-drivers driver))] - (doseq [parent parents] - (build-parents! parent edition) - (install-locally/install-locally! parent edition) - (build-driver! parent edition)) - (u/announce "%s parents built successfully." driver)))) - -(defn- build-uberjar! [driver edition] - (u/step (format "Build %s uberjar (%s edition)" driver edition) - (u/delete-file-if-exists! (c/driver-target-directory driver)) - (u/sh {:dir (c/driver-project-dir driver)} "lein" "clean") - (u/sh {:dir (c/driver-project-dir driver) - :env {"LEIN_SNAPSHOTS_IN_RELEASE" "true" - "HOME" (env/env :user-home) - "PATH" (env/env :path) - "JAVA_HOME" (env/env :java-home)}} - "lein" "with-profile" (format "+%s" (name edition)) "uberjar") - (strip-and-compress/strip-and-compress-uberjar! driver) - (u/announce "%s uberjar (%s edition) built successfully." driver edition))) - -(defn- build-and-verify! - "Build `driver` and verify the built JAR. This function ignores any existing artifacts and will always rebuild." - [driver edition] - {:pre [(#{:oss :ee} edition)]} - (u/step (str/join " " [(colorize/green "Build") - (colorize/yellow driver) - (colorize/green "driver") - (colorize/yellow (format "(%s edition)" edition))]) - (clean-driver-artifacts! driver) - (u/step (format "Build %s driver (%s edition) prerequisites if needed" driver edition) - (metabase/build-metabase!) - (build-parents! driver edition)) - (build-uberjar! driver edition) - (copy-driver! driver) - (verify/verify-driver driver) - (u/step (format "Save checksum for %s driver (%s edition) to %s" - driver edition (c/driver-checksum-filename driver)) - (let [filename (c/driver-checksum-filename driver) - checksum (checksum/driver-checksum driver edition)] - (spit filename checksum) - (u/announce "Wrote checksum %s to file %s" (pr-str checksum) filename))))) - -(defn- driver-checksum-matches? - "Check whether the saved checksum for the driver from the last build is the same as the current one. If so, we don't - need to build again. This checksum is based on driver sources as well as the checksums for Metabase sources and - parent drivers." - [driver edition] - (u/step (format "Determine whether %s driver (%s edition) source files have changed since last build" driver edition) - (let [existing-checksum (checksum/existing-driver-checksum driver)] - (cond - (not existing-checksum) - (do - (u/announce "No previous checksum. Need to rebuild driver") - false) - - (= existing-checksum (checksum/driver-checksum driver edition)) - (do - (u/announce "Checksum is the same. Do not need to rebuild driver.") - true) - - :else - (do - (u/announce "Checksum is different. Need to rebuild driver.") - false))))) - (defn build-driver! - "Build `driver`, if needed." - [driver edition] - {:pre [(#{:oss :ee nil} edition)]} - (let [edition (or edition :oss)] - (u/step (str/join " " [(colorize/green "Build") - (colorize/yellow driver) - (colorize/green "driver") - (colorize/yellow (format "(%s edition)" edition)) - (colorize/green "if needed")]) - ;; When we build a driver, we save a checksum of driver source code + metabase source code + parent drivers - ;; alongside the built driver JAR. The next time this script is called, we recalculate that checksum -- if the - ;; current checksum matches the saved one associated with the built driver JAR, we do not need to rebuild the - ;; driver. If anything relevant has changed, we have to rebuild the driver. - (if (driver-checksum-matches? driver edition) - ;; even if we're not rebuilding the driver, copy the artifact from `modules/drivers//target/uberjar/` - ;; to `resources/modules` so we can be sure we have the most up-to-date version there. - (try - (copy-driver! driver) - (verify/verify-driver driver) - ;; if verification fails, delete all the existing artifacts and just rebuild the driver from scratch. - (catch Throwable e - (u/error "Error verifying existing driver:\n%s" (pr-str e)) - (u/announce "Deleting existing driver artifacts and rebuilding.") - (clean-driver-artifacts! driver) - (build-driver! driver edition))) - ;; if checksum does not match, build and verify the driver - (try - (build-and-verify! driver edition) - ;; if building fails, clean everything, including metabase-core, the metabase uberjar, and parent - ;; dependencies, *then* retry. - (catch Throwable e - (u/announce "Cleaning ALL and retrying...") - (clean-all! driver) - (try - (build-and-verify! driver edition) - ;; if building the driver failed again, even after cleaning, delete anything that was built and then - ;; give up. - (catch Throwable e - (u/safe-println (colorize/red (format "Failed to build %s driver." driver))) - (clean-driver-artifacts! driver) - (throw e)))))) - ;; if we make it this far, we've built the driver successfully. - (u/announce "Success.")))) + ;; 1-arity that takes just a map is mean for use directly with clojure -X + ([{:keys [driver edition], :as options}] + (build-driver! driver edition (dissoc options :driver :edition))) + + ([driver edition] + (build-driver! driver edition nil)) + + ([driver edition {:keys [project-dir target-dir], :as options}] + (let [edition (or edition :oss) + start-time-ms (System/currentTimeMillis)] + (binding [c/*driver-project-dir* (or project-dir + c/*driver-project-dir*) + c/*target-directory* (or target-dir + c/*target-directory*)] + (u/step (format "Build driver %s (edition = %s, options = %s)" driver edition (pr-str options)) + (clean! driver) + (copy-source-files/copy-source-files! driver edition) + (compile-source-files/compile-clojure-source-files! driver edition) + (create-uberjar/create-uberjar! driver edition) + (u/announce "Built %s driver in %d ms." driver (- (System/currentTimeMillis) start-time-ms)) + (verify/verify-driver driver)))))) diff --git a/bin/build-drivers/src/build_drivers/checksum.clj b/bin/build-drivers/src/build_drivers/checksum.clj deleted file mode 100644 index a46e91e4151d..000000000000 --- a/bin/build-drivers/src/build_drivers/checksum.clj +++ /dev/null @@ -1,82 +0,0 @@ -(ns build-drivers.checksum - "Shared code for calculating and reading hex-encoded MD5 checksums for relevant files." - (:require [build-drivers.common :as c] - [build-drivers.plugin-manifest :as manifest] - [clojure.java.io :as io] - [clojure.string :as str] - [colorize.core :as colorize] - [metabuild-common.core :as u]) - (:import org.apache.commons.codec.digest.DigestUtils)) - -(defn checksum-from-file - "Read a saved MD5 hash checksum from a file." - [filename] - (u/step (format "Read saved checksum from %s" filename) - (let [file (io/file filename)] - (if-not (.exists file) - (u/announce "%s does not exist" filename) - (or (when-let [[checksum-line] (not-empty (str/split-lines (slurp file)))] - (when-let [[_ checksum-hex] (re-matches #"(^(?:\w+-)?[0-9a-f]{32}).*$" checksum-line)] - (u/safe-println (format "Saved checksum is %s" (colorize/cyan checksum-hex))) - checksum-hex)) - (u/error (format "Checksum file %s exists, but does not contain a valid checksum" filename))))))) - -;;; -------------------------------------------- Metabase source checksum -------------------------------------------- - -(defn- metabase-source-paths [] - (sort - (cons - (u/filename u/project-root-directory "project.clj") - (mapcat (fn [dir] - (try - (u/find-files dir (fn [s] - (or (str/ends-with? s ".clj") - (str/ends-with? s ".cljc")))) - (catch Throwable _ - []))) - [(u/filename u/project-root-directory "src") - (u/filename u/project-root-directory "enterprise" "backend" "src") - (u/filename u/project-root-directory "shared" "src")])))) - -(defn metabase-source-checksum - "Checksum of Metabase backend source files and `project.clj`." - ^String [] - (let [paths (metabase-source-paths)] - (u/step (format "Calculate checksum for %d Metabase source files" (count paths)) - (let [checksum (DigestUtils/md5Hex (str/join (map slurp paths)))] - (u/safe-println (format "Current checksum of Metabase files is %s" (colorize/cyan checksum))) - checksum)))) - - -;;; ---------------------------------------------- Driver source files ----------------------------------------------- - -(defn existing-driver-checksum - "Checksum from the relevant sources from last time we built `driver`." - [driver] - (checksum-from-file (c/driver-checksum-filename driver))) - -(defn- driver-source-paths - "Returns sequence of the source filenames for `driver`." - [driver] - (u/find-files (c/driver-project-dir driver) - (fn [path] - (or (and (str/ends-with? path ".clj") - (not (str/starts-with? path (u/filename (c/driver-project-dir driver) "test")))) - (str/ends-with? path ".yaml"))))) - -(defn driver-checksum - "The driver checksum is based on a checksum of all the driver source files (`.clj` files and the plugin manifest YAML - file) combined with the checksums for `metabase-core` *and* the parent drivers. After building a driver, we save - this checksum. Next time the script is ran, we recalculate the checksum to determine whether anything relevant has - changed -- if it has, and the current checksum doesn't match the saved one, we need to rebuild the driver." - ^String [driver edition] - (let [source-paths (driver-source-paths driver)] - (u/step (format "Calculate checksum for %d files: %s ..." (count source-paths) (first source-paths)) - (let [checksum (str - (c/edition-checksum-prefix driver edition) - (DigestUtils/md5Hex (str/join (concat [(metabase-source-checksum)] - (map #(driver-checksum % edition) - (manifest/parent-drivers driver)) - (map slurp (driver-source-paths driver))))))] - (u/safe-println (format "Current checksum of %s driver (%s edition) is %s" driver edition (colorize/cyan checksum))) - checksum)))) diff --git a/bin/build-drivers/src/build_drivers/common.clj b/bin/build-drivers/src/build_drivers/common.clj index 36de81a78989..ec3607b81c4e 100644 --- a/bin/build-drivers/src/build_drivers/common.clj +++ b/bin/build-drivers/src/build_drivers/common.clj @@ -1,77 +1,46 @@ (ns build-drivers.common - "Shared constants and functions related to source and artifact paths used throughout this code." - (:require [environ.core :as env] - [leiningen.core.project :as lein.project] + (:require [clojure.java.io :as io] + [clojure.tools.deps.alpha :as deps] [metabuild-common.core :as u])) -(def ^String maven-repository-path - (u/filename (env/env :user-home) ".m2" "repository")) +(def ^:dynamic *driver-project-dir* nil) -;;; -------------------------------------------------- Driver Paths -------------------------------------------------- +(def ^:dynamic *target-directory* nil) (defn driver-project-dir "e.g. \"/home/cam/metabase/modules/drivers/redshift\"" - [driver] - (u/filename u/project-root-directory "modules" "drivers" (name driver))) + ^String [driver] + (or *driver-project-dir* + (u/filename u/project-root-directory "modules" "drivers" (name driver)))) (defn driver-jar-name "e.g. \"redshift.metabase-driver.jar\"" - [driver] + ^String [driver] (format "%s.metabase-driver.jar" (name driver))) -(defn driver-target-directory - [driver] - (u/filename (driver-project-dir driver) "target")) - -(defn driver-jar-build-path - "e.g. \"/home/cam/metabase/modules/drivers/redshift/target/uberjar/redshift.metabase-driver.jar\"" - [driver] - (u/filename (driver-target-directory driver) "uberjar" (driver-jar-name driver))) - -(def ^String driver-jar-destination-directory - (u/filename u/project-root-directory "resources" "modules")) +(defn driver-jar-destination-directory ^String [] + (or *target-directory* + (u/filename u/project-root-directory "resources" "modules"))) (defn driver-jar-destination-path "e.g. \"/home/cam/metabase/resources/modules/redshift.metabase-driver.jar\"" ^String [driver] - (u/filename driver-jar-destination-directory (driver-jar-name driver))) - -(defn- lein-project-map - "Read the `project.clj` file for `driver` and return it as a map." - [driver & profiles] - (let [project-filename (u/assert-file-exists (u/filename (driver-project-dir driver) "project.clj"))] - (lein.project/read project-filename profiles))) - -(defn has-edition-profile? - "Whether `driver` has a separate profile for `edition`, e.g. `:ee`. This means this version of the driver is different - from other versions of the driver (e.g. :ee Oracle ships with the non-free Oracle JDBC driver, :oss does not)." - [driver edition] - (let [has-profile? (boolean - (contains? (:profiles (lein-project-map driver)) edition))] - (u/safe-println (format "%s %s have a separate %s profile" driver (if has-profile? "DOES" "DOES NOT") edition)) - has-profile?)) - -(defn edition-checksum-prefix - "Prefix to add to checksums of driver for `edition` -- normally this is `nil`, but if the driver has a specific - profile for `edition` (e.g. Oracle has a different profile for `:ee` builds) this is a prefix to make the checksum - different from the normal one." - [driver edition] - (when (has-edition-profile? driver edition) - (format "%s-" (name edition)))) - -(defn driver-checksum-filename - "e.g. \"/home/cam/metabase/modules/drivers/redshift/target/checksum.md5\"" - [driver] - (u/filename (driver-project-dir driver) "target" "checksum.md5")) + (u/filename (driver-jar-destination-directory) (driver-jar-name driver))) -(defn driver-plugin-manifest-filename - "e.g. \"/home/cam/metabase/modules/drivers/bigquery/resources/plugin-manifest.yaml\"" - [driver] - (u/filename (driver-project-dir driver) "resources" "metabase-plugin.yaml")) +(defn compiled-source-target-dir [driver] + (u/filename (driver-project-dir driver) "target" "jar")) +(defn driver-edn-filename [driver] + (u/filename (driver-project-dir driver) "deps.edn")) -;;; ------------------------------------------ Metabase Local Install Paths ------------------------------------------ +(defn- ->absolute [driver path] + (if (u/absolute? path) + path + (u/filename (driver-project-dir driver) path))) -(def ^String metabase-uberjar-path - "e.g. \"home/cam/metabase/target/uberjar/metabase.jar\"" - (u/filename u/project-root-directory "target" "uberjar" "metabase.jar")) +(defn driver-edn [driver edition] + (let [edn (deps/merge-edns ((juxt :root-edn :project-edn) (deps/find-edn-maps (driver-edn-filename driver)))) + combined (deps/combine-aliases edn #{edition})] + (-> (deps/tool edn combined) + ;; make sure :paths are absolute + (update :paths (partial mapv (partial ->absolute driver)))))) diff --git a/bin/build-drivers/src/build_drivers/compile_source_files.clj b/bin/build-drivers/src/build_drivers/compile_source_files.clj new file mode 100644 index 000000000000..cfe2277cfdb5 --- /dev/null +++ b/bin/build-drivers/src/build_drivers/compile_source_files.clj @@ -0,0 +1,54 @@ +(ns build-drivers.compile-source-files + (:require [build-drivers.common :as c] + [clojure.java.io :as io] + [clojure.tools.namespace.dependency :as ns.deps] + [clojure.tools.namespace.find :as ns.find] + [clojure.tools.namespace.parse :as ns.parse] + [metabuild-common.core :as u])) + +(defn driver-source-paths [driver edition] + (let [dirs (:paths (c/driver-edn driver edition))] + (assert (every? u/absolute? dirs) + (format "All dirs should be absolute, got: %s" (pr-str dirs))) + dirs)) + +(defn- dependencies-graph + "Return a `clojure.tools.namespace` dependency graph of namespaces named by `ns-symbol`." + [ns-decls] + (reduce + (fn [graph ns-decl] + (let [ns-symbol (ns.parse/name-from-ns-decl ns-decl)] + (reduce + (fn [graph dep] + (ns.deps/depend graph ns-symbol dep)) + graph + (ns.parse/deps-from-ns-decl ns-decl)))) + (ns.deps/graph) + ns-decls)) + +;; topologically sort the namespaces so we don't end up with weird compilation issues. +(defn source-path-namespaces [source-paths] + (let [ns-decls (mapcat + (comp ns.find/find-ns-decls-in-dir io/file) + source-paths) + ns-symbols (set (map ns.parse/name-from-ns-decl ns-decls))] + (->> (dependencies-graph ns-decls) + ns.deps/topo-sort + (filterv ns-symbols)))) + +(defn compile-clojure-source-files! [driver edition] + (u/step "Compile clojure source files" + (let [start-time-ms (System/currentTimeMillis) + source-paths (driver-source-paths driver edition) + target-dir (c/compiled-source-target-dir driver) + namespaces (source-path-namespaces source-paths)] + (u/announce "Compiling Clojure source files in %s to %s" (pr-str source-paths) target-dir) + (u/create-directory-unless-exists! target-dir) + (u/announce "Compiling namespaces %s" (pr-str namespaces)) + (binding [*compile-path* target-dir] + (doseq [a-namespace namespaces] + (#'clojure.core/serialized-require a-namespace) + (compile a-namespace))) + (u/announce "Compiled %d namespace(s) in %d ms." + (count namespaces) + (- (System/currentTimeMillis) start-time-ms))))) diff --git a/bin/build-drivers/src/build_drivers/copy_source_files.clj b/bin/build-drivers/src/build_drivers/copy_source_files.clj new file mode 100644 index 000000000000..f9b87805841b --- /dev/null +++ b/bin/build-drivers/src/build_drivers/copy_source_files.clj @@ -0,0 +1,18 @@ +(ns build-drivers.copy-source-files + (:require [build-drivers.common :as c] + [clojure.tools.build.api :as build] + [metabuild-common.core :as u])) + +(defn copy-source-files! [driver edition] + (u/step (format "Copy %s source files" driver) + (let [start-time-ms (System/currentTimeMillis) + dirs (:paths (c/driver-edn driver edition))] + (assert (every? u/absolute? dirs) + (format "All dirs should be absolute, got: %s" (pr-str dirs))) + (u/announce "Copying files in %s" (pr-str dirs)) + (build/copy-dir + {:src-dirs dirs + :target-dir (c/compiled-source-target-dir driver)}) + (u/announce "Copied files in %d directories in %d ms." + (count dirs) + (- (System/currentTimeMillis) start-time-ms))))) diff --git a/bin/build-drivers/src/build_drivers/create_uberjar.clj b/bin/build-drivers/src/build_drivers/create_uberjar.clj new file mode 100644 index 000000000000..d27c584375d1 --- /dev/null +++ b/bin/build-drivers/src/build_drivers/create_uberjar.clj @@ -0,0 +1,79 @@ +(ns build-drivers.create-uberjar + (:require [build-drivers.common :as c] + [clojure.java.io :as io] + [clojure.tools.deps.alpha :as deps] + [clojure.tools.deps.alpha.util.dir :as deps.dir] + [colorize.core :as colorize] + [hf.depstar.api :as depstar] + [metabuild-common.core :as u])) + +(defn driver-basis [driver edition] + (let [edn (c/driver-edn driver edition)] + (binding [deps.dir/*the-dir* (io/file (c/driver-project-dir driver))] + (deps/calc-basis edn)))) + +(defonce metabase-core-edn + (deps/merge-edns + ((juxt :root-edn :project-edn) + (deps/find-edn-maps (u/filename u/project-root-directory "deps.edn"))))) + +(defonce metabase-core-basis + (binding [deps.dir/*the-dir* (io/file u/project-root-directory)] + (deps/calc-basis metabase-core-edn))) + +(defonce metabase-core-provided-libs + (set (keys (:libs metabase-core-basis)))) + +(defn- driver-parents [driver edition] + (when-let [parents (not-empty (:metabase.driver/parents (c/driver-edn driver edition)))] + (u/announce "Driver has parent drivers %s" (pr-str parents)) + parents)) + +(defn- parent-provided-libs [driver edition] + (into {} (for [parent (driver-parents driver edition) + lib (keys (:libs (driver-basis parent edition)))] + [lib parent]))) + +(defn- provided-libs + "Return a map of lib -> provider, where lib is a symbol like `com.h2database/h2` and provider is either + `metabase-core` or the parent driver that provided that lib." + [driver edition] + (into (parent-provided-libs driver edition) + (map (fn [lib] + [lib 'metabase-core])) + metabase-core-provided-libs)) + +(defn remove-provided-libs [basis driver edition] + (let [provided-lib->provider (into {} + (filter (fn [[lib]] + (get-in basis [:libs lib]))) + (provided-libs driver edition))] + ;; log which libs we're including and excluding. + (doseq [lib (sort (keys (:libs basis)))] + (u/announce (if-let [provider (get provided-lib->provider lib)] + (format "SKIP %%45s (provided by %s)" provider) + "INCLUDE %s") + (colorize/yellow lib))) + ;; now remove the provide libs from `:classpath`, `:classpath-roots`, and `:libs` + (let [provided-libs-set (into #{} (keys provided-lib->provider)) + provided-paths-set (into #{} (mapcat #(get-in basis [:libs % :paths])) provided-libs-set)] + (-> basis + (update :classpath-roots #(vec (remove provided-paths-set %))) + (update :libs #(into {} (remove (fn [[lib]] (provided-libs-set lib))) %)) + (update :classpath #(into {} (remove (fn [[path]] (provided-paths-set path))) %)))))) + +(defn- uberjar-basis [driver edition] + (u/step "Determine which dependencies to include" + (-> (driver-basis driver edition) + (remove-provided-libs driver edition) + ;; remove unneeded keys so Depstar doesn't try to do anything clever and resolve them + (dissoc :deps :aliases :mvn/repos)))) + +(defn create-uberjar! [driver edition] + (u/step (format "Write %s %s uberjar -> %s" driver edition (c/driver-jar-destination-path driver)) + (let [start-time-ms (System/currentTimeMillis)] + (depstar/uber + {:class-dir (c/compiled-source-target-dir driver) + :uber-file (c/driver-jar-destination-path driver) + :basis (uberjar-basis driver edition)}) + (u/announce "Created uberjar in %d ms." (- (System/currentTimeMillis) start-time-ms))))) diff --git a/bin/build-drivers/src/build_drivers/install_driver_locally.clj b/bin/build-drivers/src/build_drivers/install_driver_locally.clj deleted file mode 100644 index 7189c74bd913..000000000000 --- a/bin/build-drivers/src/build_drivers/install_driver_locally.clj +++ /dev/null @@ -1,49 +0,0 @@ -(ns build-drivers.install-driver-locally - "Logic related to installing a driver as a library in the local Maven repository so it can be used as a dependency - when building descandant drivers. Right now this is only used for `:google`, which is used by `:bigquery` and - `:googleanalytics`." - (:require [build-drivers.checksum :as checksum] - [build-drivers.common :as c] - [colorize.core :as colorize] - [metabuild-common.core :as u])) - -(defn- local-install-path [driver] - (u/filename c/maven-repository-path "metabase" (format "%s-driver" (name driver)))) - -(defn- local-install-checksum-filename [driver edition] - (u/filename (local-install-path driver) (str (c/edition-checksum-prefix driver edition) "checksum.md5"))) - -(defn clean! - "Delete local Maven installation of the library version of `driver`." - [driver] - (u/step (format "Deleting existing Maven installation of %s driver" driver) - (u/delete-file-if-exists! (local-install-path driver)))) - -(defn- local-install-checksum-matches? - "After installing the library version of `driver`, we save a checksum based on its sources; next time we call - `install-locally!`, we can recalculate the checksum; if the saved one matches the current one, we do not need to - reinstall." - [driver edition] - (u/step "Determine whether %s driver source files have changed since last local install" - (let [existing-checksum (checksum/checksum-from-file (local-install-checksum-filename driver edition)) - current-checksum (checksum/driver-checksum driver edition) - same? (= existing-checksum current-checksum)] - (u/announce (if same? - "Checksum is the same. Do not need to rebuild driver." - "Checksum is different. Need to rebuild driver.")) - same?))) - -(defn install-locally! - "Install `driver` as a library in the local Maven repository IF NEEDED so descendant drivers can use it as a - `:provided` dependency when building. E.g. before building `:bigquery` we need to install `:google` as a library - locally." - [driver edition] - {:pre [(keyword? driver)]} - (u/step (str (colorize/green "Install ") (colorize/yellow driver) (colorize/green " driver to local Maven repo if needed")) - (if (local-install-checksum-matches? driver edition) - (u/announce "Already installed locally.") - (u/step (str (colorize/green "Install ") (colorize/yellow driver) (colorize/green " driver to local Maven repo")) - (u/sh {:dir (c/driver-project-dir driver)} "lein" "clean") - (u/sh {:dir (c/driver-project-dir driver)} "lein" "install-for-building-drivers") - (u/step (format "Save checksum to %s" driver (local-install-checksum-filename driver edition)) - (spit (local-install-checksum-filename driver edition) (checksum/driver-checksum driver edition))))))) diff --git a/bin/build-drivers/src/build_drivers/metabase.clj b/bin/build-drivers/src/build_drivers/metabase.clj deleted file mode 100644 index e94b29798ef6..000000000000 --- a/bin/build-drivers/src/build_drivers/metabase.clj +++ /dev/null @@ -1,88 +0,0 @@ -(ns build-drivers.metabase - "Code for installing the main Metabase project as a library (`metabase-core`) in the local Maven repository, and for - building a Metabase uberjar. Both are needed when building drivers." - (:require [build-drivers - [checksum :as checksum] - [common :as c]] - [metabuild-common.core :as u])) - -(def ^String ^:private uberjar-checksum-path - (str c/metabase-uberjar-path ".md5")) - -(def ^String ^:private metabase-core-install-path - (u/filename c/maven-repository-path "metabase-core")) - -(def ^String ^:private metabase-core-checksum-path - (u/filename metabase-core-install-path "checksum.md5")) - -(defn metabase-core-checksum-matches? [] - (u/step "Determine whether Metabase source files checksum has changed since last install of metabase-core" - (let [existing-checksum (checksum/checksum-from-file metabase-core-checksum-path) - current-checksum (checksum/metabase-source-checksum) - same? (= existing-checksum current-checksum)] - (u/announce (if same? - "Checksum is the same. Do not need to reinstall metabase-core locally." - "Checksum is different. Need to reinstall metabase-core locally.")) - same?))) - -(defn- delete-metabase-core-install! [] - (u/step "Delete local installation of metabase-core" - (u/delete-file-if-exists! metabase-core-install-path))) - -(defn- install-metabase-core! [] - (u/step "Install metabase-core locally if needed" - (if (metabase-core-checksum-matches?) - (u/announce "Up-to-date metabase-core already installed to local Maven repo") - (do - (delete-metabase-core-install!) - (u/sh {:dir u/project-root-directory} "lein" "clean") - (u/sh {:dir u/project-root-directory} "lein" "install-for-building-drivers") - (u/step "Save checksum for local installation of metabase-core" - (spit metabase-core-checksum-path (checksum/metabase-source-checksum))) - (u/announce "metabase-core dep installed to local Maven repo successfully."))))) - -(defn uberjar-checksum-matches? - "After installing/building Metabase we save a MD5 hex checksum of Metabase backend source files (including - `project.clj`). The next time we run `build-metabase!`, if the checksums have changed we know we need to - rebuild/reinstall." - [] - (u/step "Determine whether Metabase source files checksum has changed since last build of uberjar" - (let [existing-checksum (checksum/checksum-from-file uberjar-checksum-path) - current-checksum (checksum/metabase-source-checksum) - same? (= existing-checksum current-checksum)] - (u/announce (if same? - "Checksum is the same. Do not need to rebuild Metabase uberjar." - "Checksum is different. Need to rebuild Metabase uberjar.")) - same?))) - -(defn- delete-metabase-uberjar! [] - (u/step "Delete exist metabase uberjar" - (u/delete-file-if-exists! (u/filename u/project-root-directory "target")))) - -(defn- build-metabase-uberjar! [] - (u/step "Build Metabase uberjar if needed" - (if (uberjar-checksum-matches?) - (u/announce "Update-to-date Metabase uberjar already built") - (do - (delete-metabase-uberjar!) - (u/sh {:dir u/project-root-directory} "lein" "clean") - (u/sh {:dir u/project-root-directory} "lein" "uberjar") - (u/step "Save checksum for Metabase uberar" - (spit uberjar-checksum-path (checksum/metabase-source-checksum))) - (u/announce "Metabase uberjar built successfully"))))) - -(defn clean-metabase! - "Delete local Maven repository installation of the `metabase-core` library and delete the built Metabase uberjar." - [] - (u/step "Clean local Metabase deps" - (delete-metabase-core-install!) - (delete-metabase-uberjar!))) - -(defn build-metabase! - "Install `metabase-core` as a library in the local Maven repo, and build the Metabase uberjar IF NEEDED. We need to do - both because `metabase-core` is used as a dependency for drivers, and the Metabase uberjar is checked to make sure - we don't ship duplicate classes in the driver JAR (as part of the `strip-and-compress` stage.)" - [] - (u/step "Build metabase-core and install locally" - (install-metabase-core!) - (build-metabase-uberjar!))) diff --git a/bin/build-drivers/src/build_drivers/plugin_manifest.clj b/bin/build-drivers/src/build_drivers/plugin_manifest.clj deleted file mode 100644 index 163c6096475d..000000000000 --- a/bin/build-drivers/src/build_drivers/plugin_manifest.clj +++ /dev/null @@ -1,55 +0,0 @@ -(ns build-drivers.plugin-manifest - "Code for reading the YAML plugin manifest for a driver. " - (:require [build-drivers.common :as c] - [metabuild-common.core :as u] - [yaml.core :as yaml])) - -(defn- plugin-manifest - "Read `driver` plugin manifest and return a map." - [driver] - {:post [(map? %)]} - (yaml/from-file (u/assert-file-exists (c/driver-plugin-manifest-filename driver)))) - -(defn- driver-declarations [manifest] - ;; driver plugin manifest can have a single `:driver`, or multiple drivers, e.g. Spark SQL which also has the - ;; `:hive-like` abstract driver - (let [{driver-declaration :driver} manifest] - (if (map? driver-declaration) - [driver-declaration] - driver-declaration))) - -(defn- declared-drivers - "Sequence of all drivers declared in a plugin `manifest`. Usually only one driver, except for Spark SQL which declares - both `:hive-like` and `:sparksql`." - [manifest] - (map (comp keyword :name) (driver-declarations manifest))) - -(def ^:private metabase-core-drivers - "Drivers that ship as part of the core Metabase project (as opposed to a plugin) and thus do not need to be built." - #{:sql - :sql-jdbc - :mysql - :h2 - :postgres}) - -(defn parent-drivers - "Get the parent drivers of a driver for purposes of building a driver. Excludes drivers that ship as part of - `metabase-core`, since we don't need to worry about building those. - - e.g. - - (parent-drivers :googleanalytics) ;-> (:google)" - [driver] - (let [manifest (plugin-manifest driver) - declared (declared-drivers manifest)] - (or (not-empty - (for [{parent-declaration :parent} (driver-declarations manifest) - :let [parents (if (string? parent-declaration) - [parent-declaration] - parent-declaration)] - parent parents - :let [parent (keyword parent)] - :when (and (not (contains? (set declared) parent)) - (not (contains? metabase-core-drivers parent)))] - parent)) - (u/announce "%s does not have any parents" driver)))) diff --git a/bin/build-drivers/src/build_drivers/strip_and_compress.clj b/bin/build-drivers/src/build_drivers/strip_and_compress.clj deleted file mode 100644 index b2d594ea8d5c..000000000000 --- a/bin/build-drivers/src/build_drivers/strip_and_compress.clj +++ /dev/null @@ -1,65 +0,0 @@ -(ns build-drivers.strip-and-compress - (:require [build-drivers.common :as c] - [build-drivers.plugin-manifest :as manifest] - [metabuild-common.core :as u]) - (:import java.io.FileOutputStream - [java.util.zip ZipEntry ZipFile ZipOutputStream] - org.apache.commons.io.IOUtils)) - -(def ^:private files-to-always-include - "Files to always include regardless of whether they are present in blacklist JAR." - #{"metabase-plugin.yaml"}) - -(defn- jar-contents - "Get a set of all files in a JAR that we should strip out from the driver JAR -- either the Metabase uberjar itself or - a parent driver JAR." - [^String jar-path] - (with-open [zip-file (ZipFile. jar-path)] - (set - (for [^ZipEntry zip-entry (enumeration-seq (.entries zip-file)) - :let [filename (str zip-entry)] - :when (not (files-to-always-include filename))] - filename)))) - -(defn- strip-classes! [^String driver-jar-path ^String blacklist-jar-path] - (u/step (format "Remove classes from %s that are present in %s and recompress" driver-jar-path blacklist-jar-path) - (let [jar-contents (jar-contents blacklist-jar-path) - temp-driver-jar-path "/tmp/driver.jar" - wrote (atom 0) - skipped (atom 0)] - (u/delete-file-if-exists! temp-driver-jar-path) - (with-open [source-zip (ZipFile. (u/assert-file-exists driver-jar-path)) - os (doto (ZipOutputStream. (FileOutputStream. temp-driver-jar-path)) - (.setMethod ZipOutputStream/DEFLATED) - (.setLevel 9))] - (doseq [^ZipEntry entry (enumeration-seq (.entries source-zip))] - (if (jar-contents (str entry)) - (swap! skipped inc) - (with-open [is (.getInputStream source-zip entry)] - (.putNextEntry os (ZipEntry. (.getName entry))) - (IOUtils/copy is os) - (.closeEntry os) - (swap! wrote inc))))) - (u/announce (format "Done. wrote: %d skipped: %d" @wrote @skipped)) - (u/safe-println (format "Original size: %s" (u/format-bytes (u/file-size driver-jar-path)))) - (u/safe-println (format "Stripped/extra-compressed size: %s" (u/format-bytes (u/file-size temp-driver-jar-path)))) - (u/step "replace the original source JAR with the stripped one" - (u/delete-file-if-exists! driver-jar-path) - (u/copy-file! temp-driver-jar-path driver-jar-path))))) - -(defn strip-and-compress-uberjar! - "Remove any classes in compiled `driver` that are also present in the Metabase uberjar or parent drivers. The classes - will be available at runtime, and we don't want to make things unpredictable by including them more than once in - different drivers. - - This is only needed because `lein uberjar` does not seem to reliably exclude classes from `:provided` Clojure - dependencies like `metabase-core` and the parent drivers." - [driver] - (u/step (str (format "Strip out any classes in %s driver JAR found in core Metabase uberjar or parent JARs" driver) - " and recompress with higher compression ratio") - (let [driver-jar-path (u/assert-file-exists (c/driver-jar-build-path driver))] - (u/step "strip out any classes also found in the core Metabase uberjar" - (strip-classes! driver-jar-path (u/assert-file-exists c/metabase-uberjar-path))) - (u/step "remove any classes also found in any of the parent JARs" - (doseq [parent (manifest/parent-drivers driver)] - (strip-classes! driver-jar-path (u/assert-file-exists (c/driver-jar-build-path parent)))))))) diff --git a/bin/build-drivers/src/build_drivers/verify.clj b/bin/build-drivers/src/build_drivers/verify.clj index 6386f2538afa..5f09d75b712b 100644 --- a/bin/build-drivers/src/build_drivers/verify.clj +++ b/bin/build-drivers/src/build_drivers/verify.clj @@ -19,6 +19,16 @@ (u/announce "Driver init class file found.") (throw (ex-info (format "Driver verification failed: init class file %s not found" driver-init-class-filename) {})))))) +(defn- verify-does-not-have-clojure-core [driver] + (let [jar-filename (c/driver-jar-destination-path driver)] + (u/step (format "Check %s does not contain Clojure core classes" jar-filename) + (doseq [file ["clojure/spec/alpha__init.class" + "clojure/core__init.class" + "clojure/core.clj"]] + (when (jar-contains-file? jar-filename file) + (throw (ex-info (format "Driver verification failed: driver contains compiled Clojure core file %s" file) + {:file file}))))))) + (defn- verify-has-plugin-manifest [driver] (let [jar-filename (c/driver-jar-destination-path driver)] (u/step (format "Check %s contains metabase-plugin.yaml" jar-filename) @@ -34,4 +44,5 @@ (u/assert-file-exists (c/driver-jar-destination-path driver)) (verify-has-init-class driver) (verify-has-plugin-manifest driver) + (verify-does-not-have-clojure-core driver) (u/announce (format "%s driver verification successful." driver)))) diff --git a/bin/build-drivers/src/verify_driver.clj b/bin/build-drivers/src/verify_driver.clj index 5964ba99c0e1..4e96397e8f87 100644 --- a/bin/build-drivers/src/verify_driver.clj +++ b/bin/build-drivers/src/verify_driver.clj @@ -7,4 +7,4 @@ (u/exit-when-finished-nonzero-on-exception (when-not (seq driver) (throw (ex-info "Usage: clojure -m verify-driver " {}))) - (verify/verify-driver (keyword driver)))) + (verify/verify-driver (u/parse-as-keyword driver)))) diff --git a/bin/build-drivers/test/build_drivers/build_driver_test.clj b/bin/build-drivers/test/build_drivers/build_driver_test.clj index 8eab16884f24..20afc365fb8e 100644 --- a/bin/build-drivers/test/build_drivers/build_driver_test.clj +++ b/bin/build-drivers/test/build_drivers/build_driver_test.clj @@ -15,19 +15,11 @@ (build-driver/build-driver! :oracle :oss) (is (.exists (java.io.File. (jar-path)))) (testing "JAR should not contain the JDBC driver classes" - (is (not (jar-contains-jdbc-classes?)))) - (testing "Wouldn't need to rebuild :oss version of the driver" - (is (#'build-driver/driver-checksum-matches? :oracle :oss))) - (testing "WOULD need to build :ee version of the driver" - (is (not (#'build-driver/driver-checksum-matches? :oracle :ee)))))) + (is (not (jar-contains-jdbc-classes?)))))) (deftest build-ee-driver-test (testing "We should be able to build an EE driver" (build-driver/build-driver! :oracle :ee) (is (.exists (java.io.File. (jar-path)))) (testing "JAR *should* contain the JDBC driver classes" - (is (jar-contains-jdbc-classes?))) - (testing "Wouldn't need to rebuild :ee version of the driver" - (is (#'build-driver/driver-checksum-matches? :oracle :ee))) - (testing "WOULD need to build :oss version of the driver" - (is (not (#'build-driver/driver-checksum-matches? :oracle :oss)))))) + (is (jar-contains-jdbc-classes?))))) diff --git a/bin/build-drivers/test/build_drivers/checksum_test.clj b/bin/build-drivers/test/build_drivers/checksum_test.clj deleted file mode 100644 index 5c1bf4964900..000000000000 --- a/bin/build-drivers/test/build_drivers/checksum_test.clj +++ /dev/null @@ -1,11 +0,0 @@ -(ns build-drivers.checksum-test - (:require [build-drivers.checksum :as checksum] - [clojure.test :refer :all])) - -(deftest driver-checksum-test - (testing "OSS/EE checksums should be the same for drivers that don't have different oss/ee profiles" - (is (= (checksum/driver-checksum :sqlite :oss) - (checksum/driver-checksum :sqlite :ee)))) - (testing "OSS/EE checksums should be different for drivers that have different oss/ee profiles" - (is (not= (checksum/driver-checksum :oracle :oss) - (checksum/driver-checksum :oracle :ee))))) diff --git a/bin/build-drivers/test/build_drivers/common_test.clj b/bin/build-drivers/test/build_drivers/common_test.clj deleted file mode 100644 index 8aa517fc3b4c..000000000000 --- a/bin/build-drivers/test/build_drivers/common_test.clj +++ /dev/null @@ -1,15 +0,0 @@ -(ns build-drivers.common-test - (:require [build-drivers.common :as c] - [clojure.test :refer :all])) - -(deftest has-edition-profile?-test - (testing :ee - (is (= true - (c/has-edition-profile? :oracle :ee))) - (is (= false - (c/has-edition-profile? :sqlite :ee)))) - (testing :oss - (is (= false - (c/has-edition-profile? :oracle :oss))) - (is (= false - (c/has-edition-profile? :sqlite :oss))))) diff --git a/bin/build-drivers/test/build_drivers/install_driver_locally_test.clj b/bin/build-drivers/test/build_drivers/install_driver_locally_test.clj deleted file mode 100644 index 3a3dd0e58e17..000000000000 --- a/bin/build-drivers/test/build_drivers/install_driver_locally_test.clj +++ /dev/null @@ -1,16 +0,0 @@ -(ns build-drivers.install-driver-locally-test - (:require [build-drivers.install-driver-locally :as install-driver-locally] - [clojure.string :as str] - [clojure.test :refer :all])) - -(deftest local-install-checksum-filename-test - (is (str/ends-with? - (#'install-driver-locally/local-install-checksum-filename :oracle :ee) - ".m2/repository/metabase/oracle-driver/ee-checksum.md5")) - (is (str/ends-with? - (#'install-driver-locally/local-install-checksum-filename :oracle :oss) - ".m2/repository/metabase/oracle-driver/checksum.md5")) - (doseq [edition [:oss :ee]] - (is (str/ends-with? - (#'install-driver-locally/local-install-checksum-filename :sqlite edition) - ".m2/repository/metabase/sqlite-driver/checksum.md5")))) diff --git a/bin/build-for-test b/bin/build-for-test index d3a1cbfc3dd4..4013d23ff480 100755 --- a/bin/build-for-test +++ b/bin/build-for-test @@ -7,7 +7,7 @@ VERSION_PROPERTY_NAME="src_hash" source-hash() { # hash all the files that might change a backend-only uberjar build (for integration tests) ( - find src project.clj resources/sample-dataset.db.mv.db -type f -print0 | xargs -0 shasum ; + find src deps.edn resources/sample-dataset.db.mv.db -type f -print0 | xargs -0 shasum ; find resources -type f \( -iname \*.clj -o -iname \*.edn -o -iname \*.yaml -o -iname \*.properties -o -iname \*.html \) -not -name "version.properties" -print0 | xargs -0 shasum ; ) | shasum | awk '{ print $1 }' } diff --git a/bin/build-mb/README.md b/bin/build-mb/README.md new file mode 100644 index 000000000000..4d9aafe3fb96 --- /dev/null +++ b/bin/build-mb/README.md @@ -0,0 +1,28 @@ +## Build Metabase Tooling + +This project is to build the Metabase jar. It can be called standalone and is also called from the release project when creating releases. + +## License Information + +We create license information for all of our dependencies, both frontend and backend, and package them in our jar. + +Tests will run in CI that we have license information for all dependencies. If you see these failing, an easy way to get a report of dependencies without license information can be obtained by running + +```shell +build-mb % clojure -X build/list-without-license +$ "lein" "with-profile" "-dev,+ee,+include-all-drivers" "classpath" +All dependencies have licenses +``` + +If there are dependencies with missing license information you will see output like + +```shell +build-mb % clojure -X build/list-without-license +$ "lein" "with-profile" "-dev,+ee,+include-all-drivers" "classpath" +Missing License: /Users/dan/.m2/repository/org/eclipse/jetty/jetty-webapp/9.3.19.v20170502/jetty-webapp-9.3.19.v20170502.jar +Missing License: /Users/dan/.m2/repository/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar +Missing License: /Users/dan/.m2/repository/org/opensaml/opensaml-security-impl/3.4.5/opensaml-security-impl-3.4.5.jar +Missing License: /Users/dan/.m2/repository/colorize/colorize/0.1.1/colorize-0.1.1.jar +``` + +You can check the overrides file (resources/overrides.edn) and add the license information there, or perhaps improve the license discovery mechanism in the code. diff --git a/bin/build-mb/resources/overrides.edn b/bin/build-mb/resources/overrides.edn index a52997818069..716a08df4966 100644 --- a/bin/build-mb/resources/overrides.edn +++ b/bin/build-mb/resources/overrides.edn @@ -5,10 +5,17 @@ "com.google.guava" {:resource "apache2_0.txt"}, "com.fasterxml.jackson.dataformat" {:resource "apache2_0.txt"}, "com.onelogin" {:resource "MIT.txt"}, + "com.vladsch.flexmark" {:resource "BSD.txt"}, "xalan" {:resource "apache2_0.txt"} "org.apache.hadoop" {:resource "apache2_0.txt"} "org.ow2.asm" {:resource "BSD.txt"} - "org.eclipse.jetty" {:resource "apache2_0.txt"}} + "org.eclipse.jetty" {:resource "apache2_0.txt"} + ;; see if we can remove these if/when https://github.com/googleapis/java-core/issues/488 is done + "com.google.cloud" {:resource "apache2_0.txt"} + "com.google.auth" {:resource "apache2_0.txt"} + "com.google.code.gson" {:resource "apache2_0.txt"} + "com.google.protobuf" {:resource "apache2_0.txt"} + "com.google.http-client" {:resource "apache2_0.txt"}} "com.google.http-client" {"google-http-client" {:resource "apache2_0.txt"} "google-http-client-jackson2" {:resource "apache2_0.txt"}} @@ -34,6 +41,7 @@ "org.slf4j" {"slf4j-api" {:resource "MIT.txt"}}, "amalloy" {"ring-gzip-middleware" {:resource "MIT.txt"}}, "jakarta.activation" {"jakarta.activation-api" {:resource "EDL.txt"}}, + "com.sun.activation" {"jakarta.activation" {:resource "EDL.txt"}} "net.jcip" {"jcip-annotations" {:resource "CC_2_5.txt"}}, "hiccup" {"hiccup" {:resource "EPL.txt"}}, "jakarta.xml.bind" {"jakarta.xml.bind-api" {:resource "EDL.txt"}}, @@ -41,7 +49,8 @@ {"java-support" {:resource "apache2_0.txt"}}, "io.dropwizard.metrics" {"metrics-core" {:resource "apache2_0.txt"}}, "stencil" {"stencil" {:resource "EPL.txt"}}, - "org.antlr" {"antlr-runtime" {:resource "BSD.txt"}}, + "org.antlr" {"antlr-runtime" {:resource "BSD.txt"} + "antlr4-runtime" {:resource "BSD.txt"}}, "de.rototor.pdfbox" {"graphics2d" {:resource "apache2_0.txt"}}, "colorize" {"colorize" {:resource "EPL.txt"}}, "org.liquibase" {"liquibase-core" {:resource "apache2_0.txt"}}, diff --git a/bin/build-mb/src/build.clj b/bin/build-mb/src/build.clj index e7b63c7889d9..03f1f2c974cf 100644 --- a/bin/build-mb/src/build.clj +++ b/bin/build-mb/src/build.clj @@ -44,50 +44,57 @@ "NODE_ENV" "production" "MB_EDITION" mb-edition}} "./node_modules/.bin/webpack" "--bail")) + ;; related to the above TODO -- not sure why `yarn build-static-viz` fails here + (u/step "Build static viz" + (u/sh {:dir u/project-root-directory + :env {"PATH" (env/env :path) + "HOME" (env/env :user-home) + "NODE_ENV" "production" + "MB_EDITION" mb-edition}} + "./node_modules/.bin/webpack" "--bail" "--config" "webpack.static-viz.config.js")) (u/announce "Frontend built successfully.")))) +(defn- build-licenses! + [edition] + {:pre [(#{:oss :ee} edition)]} + (u/step "Generate backend license information from jar files" + (let [[classpath] (u/sh {:dir u/project-root-directory + :quiet? true} + "clojure" (str "-A" edition) "-Spath") + output-filename (u/filename u/project-root-directory + "resources" + "license-backend-third-party.txt") + {:keys [without-license]} (license/generate {:classpath classpath + :backfill (edn/read-string + (slurp (io/resource "overrides.edn"))) + :output-filename output-filename + :report? false})] + (when (seq without-license) + (run! (comp (partial u/error "Missing License: %s") first) + without-license)) + (u/announce "License information generated at %s" output-filename))) + + (u/step "Run `yarn licenses generate-disclaimer`" + (let [license-text (str/join \newline + (u/sh {:dir u/project-root-directory + :quiet? true} + "yarn" "licenses" "generate-disclaimer"))] + (spit (u/filename u/project-root-directory + "resources" + "license-frontend-third-party.txt") license-text)))) + (def uberjar-filename (u/filename u/project-root-directory "target" "uberjar" "metabase.jar")) (defn- build-uberjar! [edition] {:pre [(#{:oss :ee} edition)]} (u/delete-file-if-exists! uberjar-filename) (u/step (format "Build uberjar with profile %s" edition) - (u/sh {:dir u/project-root-directory} "lein" "clean") - (u/sh {:dir u/project-root-directory} "lein" "with-profile" (str \+ (name edition)) "uberjar") + ;; TODO -- we (probably) don't need to shell out in order to do this anymore, we should be able to do all this + ;; stuff directly in Clojure land by including this other `build` namespace directly (once we dedupe the names) + (u/sh {:dir u/project-root-directory} "clojure" "-T:build" "uberjar" :edition edition) (u/assert-file-exists uberjar-filename) (u/announce "Uberjar built successfully."))) -(defn- build-backend-licenses-file! [edition] - {:pre [(#{:oss :ee} edition)]} - (let [classpath-and-logs (u/sh {:dir u/project-root-directory - :quiet? true} - "lein" - "with-profile" (str \- "dev" - (str \, \+ (name edition)) - \,"+include-all-drivers") - "classpath") - classpath (last - classpath-and-logs) - output-filename (u/filename u/project-root-directory "license-backend-third-party") - {:keys [with-license - without-license]} (license/generate {:classpath classpath - :backfill (edn/read-string - (slurp (io/resource "overrides.edn"))) - :output-filename output-filename - :report? false})] - (when (seq without-license) - (run! (comp (partial u/error "Missing License: %s") first) - without-license)) - (u/announce "License information generated at %s" output-filename))) - -(defn- build-frontend-licenses-file! - [] - (let [license-text (str/join \newline - (u/sh {:dir u/project-root-directory - :quiet? true} - "yarn" "licenses" "generate-disclaimer"))] - (spit (u/filename u/project-root-directory "license-frontend-third-party") license-text))) - (def all-steps (ordered-map/ordered-map :version (fn [{:keys [edition version]}] @@ -96,12 +103,10 @@ (i18n/create-all-artifacts!)) :frontend (fn [{:keys [edition]}] (build-frontend! edition)) + :licenses (fn [{:keys [edition]}] + (build-licenses! edition)) :drivers (fn [{:keys [edition]}] (build-drivers/build-drivers! edition)) - :backend-licenses (fn [{:keys [edition]}] - (build-backend-licenses-file! edition)) - :frontend-licenses (fn [{:keys []}] - (build-frontend-licenses-file!)) :uberjar (fn [{:keys [edition]}] (build-uberjar! edition)))) @@ -121,7 +126,7 @@ version (str/join ", " (map name steps))) (doseq [step-name steps - :let [step-fn (or (get all-steps (keyword step-name)) + :let [step-fn (or (get all-steps (u/parse-as-keyword step-name)) (throw (ex-info (format "Invalid step: %s" step-name) {:step step-name :valid-steps (keys all-steps)})))]] @@ -133,3 +138,20 @@ (build! (merge {:edition (edition-from-env-var)} (when-let [steps (not-empty steps)] {:steps steps}))))) + +;; useful to call from command line `cd bin/build-mb && clojure -X build/list-without-license` +(defn list-without-license [{:keys []}] + (let [[classpath] (u/sh {:dir u/project-root-directory + :quiet? true} + "clojure" "-A:ee" "-Spath") + classpath-entries (license/jar-entries classpath) + {:keys [without-license]} (license/process* + {:classpath-entries classpath-entries + :backfill (edn/read-string + (slurp (io/resource "overrides.edn")))})] + (if (seq without-license) + (run! (comp (partial u/error "Missing License: %s") first) + without-license) + (u/announce "All dependencies have licenses")) + (shutdown-agents) + (System/exit (if (seq without-license) 1 0)))) diff --git a/bin/build-mb/src/build/licenses.clj b/bin/build-mb/src/build/licenses.clj index e5794f34d340..7acc5699a1e8 100644 --- a/bin/build-mb/src/build/licenses.clj +++ b/bin/build-mb/src/build/licenses.clj @@ -185,10 +185,17 @@ {:with-license (categorized true) :without-license (categorized false)})) +(defn jar-entries + "Returns a seq of jar entries on the classpath" + [classpath] + (->> (str/split classpath (re-pattern classpath-separator)) + (filter jar-file?))) + (defn generate - "Process a classpath, creating a file of all license information, writing to `:output-filename`. Backfill is a clojure - data structure or a filename of an edn file of a clojure datastructure providing for backfilling license information - if it is not discernable from the jar. Should be of the form (note keys are strings not symbols) + "Process a classpath, creating a file of all license information, writing to `:output-filename`. `classpath-entries` + should be a seq of classpath roots. Split a classpath on the classpath separator. Backfill is a clojure data + structure or a filename of an edn file of a clojure datastructure providing for backfilling license information if + it is not discernable from the jar. Should be of the form (note keys are strings not symbols) {\"group\" {\"artifact\" \"license text\"} \"group\" {\"artifact\" {:resource \"filename-of-license\"}} @@ -210,25 +217,22 @@ :without-license [ [jar-filename {:coords {:group :artifact :version} :error }] ... ]}" [{:keys [classpath backfill output-filename report?] :or {report? true}}] (let [backfill (if (string? backfill) - (edn/read-string (slurp backfill)) + (edn/read-string (slurp (io/resource backfill))) (or backfill {})) - entries (->> (str/split classpath (re-pattern classpath-separator)) - (filter jar-file?))] - (let [{:keys [with-license without-license] :as license-info} - (process* {:classpath-entries entries - :backfill backfill})] + entries (jar-entries classpath) + {:keys [with-license without-license] :as license-info} + (process* {:classpath-entries entries + :backfill backfill})] + (when (seq with-license) + (with-open [os (io/writer output-filename)] + (run! #(write-license os %) with-license))) + (when report? + (when (seq without-license) + (run! #(report-missing *err* %) without-license)) (when (seq with-license) - (with-open [os (io/writer output-filename)] - (run! #(write-license os %) with-license))) - (when report? - (when (seq without-license) - (run! #(report-missing *err* %) without-license)) - (when (seq with-license) - (println "License information for" (count with-license) "libraries written to " - output-filename) - ;; we call this from the build script. if we switch to the shell we can reenable this and figure out the - ;; best defaults. Want to make sure we never kill our build script - #_(System/exit (if (seq without-license) 1 0)))) - license-info))) - -;; clj -X build.licenses/generate :classpath \"$(cd ../.. && lein with-profile -dev,+ee,+include-all-drivers classpath | tail -n1)\" :backfill "\"resources/overrides.edn\"" :output-filename "\"backend-licenses-ee.txt\"" + (println "License information for" (count with-license) "libraries written to " + output-filename) + ;; we call this from the build script. if we switch to the shell we can reenable this and figure out the + ;; best defaults. Want to make sure we never kill our build script + #_(System/exit (if (seq without-license) 1 0)))) + license-info)) diff --git a/bin/build-mb/test/build/licenses_test.clj b/bin/build-mb/test/build/licenses_test.clj index c38f8b12fe56..6d4465586338 100644 --- a/bin/build-mb/test/build/licenses_test.clj +++ b/bin/build-mb/test/build/licenses_test.clj @@ -207,26 +207,26 @@ (deftest all-deps-have-licenses (testing "All deps on the classpath have licenses" - (loop-until-success #(u/sh {:dir u/project-root-directory} "lein" "with-profile" "+include-all-drivers,+oss,+ee" "deps") 3 "download deps") - (doseq [edition [:oss :ee]] - (let [classpath (u/sh {:dir u/project-root-directory - :quiet? true} - "lein" - "with-profile" (str \- "dev" - (str \, \+ (name edition)) - \,"+include-all-drivers") - "classpath") - classpath-entries (->> (str/split (last classpath) (re-pattern lic/classpath-separator)) - (filter lic/jar-file?))] - (let [results (lic/process* {:classpath-entries classpath-entries - :backfill (edn/read-string - (slurp (io/resource "overrides.edn")))})] - (is (nil? (:without-license results)) "Some deps don't have identifiable licenses") - (is (= (set classpath-entries) - (into #{} (->> results :with-license (map first)))))) - (is (some? (:without-license - (lic/process* {:classpath-entries classpath-entries - :backfill {}})))))))) + (loop-until-success #(u/sh {:dir u/project-root-directory} "clojure" "-A:ee" "-P") 3 "download deps") + (let [edition :ee + classpath (u/sh {:dir u/project-root-directory + :quiet? true} + "clojure" + "-A:ee" + "-Spath") + classpath-entries (->> (str/split (last classpath) (re-pattern lic/classpath-separator)) + (filter lic/jar-file?))] + (let [results (lic/process* {:classpath-entries classpath-entries + :backfill (edn/read-string + (slurp (io/resource "overrides.edn")))})] + (is (nil? (:without-license results)) + (str "Deps without license information:\n" + (str/join "\n" (map first (:without-license results))))) + (is (= (set classpath-entries) + (into #{} (->> results :with-license (map first)))))) + (is (some? (:without-license + (lic/process* {:classpath-entries classpath-entries + :backfill {}}))))))) (comment (run-tests) (binding [clojure.test/*test-out* *out*] (run-tests)) diff --git a/bin/check-clojure-cli.sh b/bin/check-clojure-cli.sh index 0fdb8cd5df44..d78b774713f6 100755 --- a/bin/check-clojure-cli.sh +++ b/bin/check-clojure-cli.sh @@ -1,9 +1,7 @@ #! /usr/bin/env bash -set -eou pipefail - you_need_to_upgrade() { - echo "Clojure CLI must be at least version 1.10.1.708. Your version is $version." + echo "Clojure CLI must be at least version 1.10.3.905. Your version is $version." echo "See https://www.clojure.org/guides/getting_started for upgrade instructions." exit -3 } @@ -24,8 +22,8 @@ check_clojure_cli() { elif [ "$minor_version" -eq "10" ]; then if [ "$patch_version" -lt "1" ]; then you_need_to_upgrade - elif [ "$patch_version" -eq "1" ]; then - if [ "$build_version" -lt "708" ]; then + elif [ "$patch_version" -eq "3" ]; then + if [ "$build_version" -lt "905" ]; then you_need_to_upgrade fi fi diff --git a/bin/clear-outdated-cpcaches.sh b/bin/clear-outdated-cpcaches.sh new file mode 100755 index 000000000000..89c2e43cd3d9 --- /dev/null +++ b/bin/clear-outdated-cpcaches.sh @@ -0,0 +1,51 @@ +#! /usr/bin/env bash + +set -euo pipefail + +script_directory=`dirname "${BASH_SOURCE[0]}"` + +# This function will clear all the .cpcache directories if any deps.edn file is newer than any of them. +clear_outdated_cpcaches() { + echo "Clearing outdated .cpcache directories if needed..." + + # switch to project root directory if we're not already there + cd "$script_directory/.." + project_root=`pwd` + + cpcaches=`find bin java modules -type d -name .cpcache` + if [ -d .cpcache ]; then + cpcaches=".cpcache $cpcaches" + fi + if [ -z "$cpcaches" ]; then + echo "No .cpcache directories found; nothing to do" + return 0 + fi + + deps_edns="deps.edn $(find bin java modules -type f -name deps.edn)" + + # find the OLDEST cpcache and NEWEST deps.edn files. + oldest_cpcache="" + for cpcache in $cpcaches; do + if [ -z "$oldest_cpcache" ] || [ "$cpcache" -ot "$oldest_cpcache" ]; then + oldest_cpcache="$cpcache" + fi + done + + newest_deps_edn="" + for deps_edn in $deps_edns; do + if [ -z "$newest_deps_edn" ] || [ "$deps_edn" -nt "$newest_deps_edn" ]; then + newest_deps_edn="$deps_edn" + fi + done + + # if the newest deps.edn is newer than the *ANY* of the cpcaches, clear all the cpcaches. + if [ "$newest_deps_edn" -nt "$oldest_cpcache" ]; then + echo "$newest_deps_edn is newer than $oldest_cpcache; deleting all .cpcache directories" + for cpcache in $cpcaches; do + echo "rm -rf $cpcache" + rm -rf "$cpcache" + done + else + echo ".cpcache directories are up to date." + fi +} diff --git a/bin/common/src/metabuild_common/core.clj b/bin/common/src/metabuild_common/core.clj index 08b592c67b93..770d95efa219 100644 --- a/bin/common/src/metabuild_common/core.clj +++ b/bin/common/src/metabuild_common/core.clj @@ -35,6 +35,7 @@ env-or-throw] [files + absolute? assert-file-exists copy-file! create-directory-unless-exists! @@ -56,12 +57,12 @@ yes-or-no-prompt] [misc + parse-as-keyword varargs] [output announce error - format-bytes pretty-print-exception safe-println] diff --git a/bin/common/src/metabuild_common/files.clj b/bin/common/src/metabuild_common/files.clj index cff9f9407d90..318e43d6f049 100644 --- a/bin/common/src/metabuild_common/files.clj +++ b/bin/common/src/metabuild_common/files.clj @@ -1,6 +1,6 @@ (ns metabuild-common.files - (:require [clojure.string :as str] - [environ.core :as env] + (:require [clojure.java.io :as io] + [clojure.string :as str] [metabuild-common.misc :as misc] [metabuild-common.output :as out] [metabuild-common.shell :as sh] @@ -97,20 +97,16 @@ (str/join File/separatorChar path-components)) (def ^String project-root-directory - "Root directory of the Metabase repo, e.g. `/users/cam/metabase`. Determined by finding the directory that has - `project.clj` in it." - (loop [^File dir (File. ^String (env/env :user-dir))] - (cond - (file-exists? (filename (.getAbsolutePath dir) "project.clj")) - (.getAbsolutePath dir) - - (.getParentFile dir) - (recur (.getParentFile dir)) - - :else - (throw (ex-info (format "Can't find project root directory: no parent directory of %s has a project.clj file" - (env/env :user-dir)) - {:dir (env/env :user-dir)}))))) + "Root directory of the Metabase repo, e.g. `/users/cam/metabase`. Determined based on its location relative to this + source file." + (.. (Paths/get (.toURI (io/resource "metabuild_common/files.clj"))) + toFile + getParentFile ; /home/cam/metabase/bin/common/src/metabuild_common + getParentFile ; /home/cam/metabase/bin/common/src/ + getParentFile ; /home/cam/metabase/bin/common/ + getParentFile ; /home/cam/metabase/bin/ + getParentFile ; /home/cam/metabase/ + getCanonicalPath)) (defn download-file! "Download a file from `url` to `dest-path` using `wget`." @@ -139,3 +135,8 @@ (when delete-on-exit? (.deleteOnExit file)) file))) + +(defn absolute? + "Whether `file` is an absolute path." + [file] + (.isAbsolute (io/file file))) diff --git a/bin/common/src/metabuild_common/misc.clj b/bin/common/src/metabuild_common/misc.clj index 02c1779c5fba..a71a1143817c 100644 --- a/bin/common/src/metabuild_common/misc.clj +++ b/bin/common/src/metabuild_common/misc.clj @@ -1,10 +1,23 @@ -(ns metabuild-common.misc) +(ns metabuild-common.misc + (:require [clojure.string :as str])) (defmacro varargs "Utility macro for passing varargs of a certain `klass` to a Java method. (Files/createTempFile \"driver\" \".jar\" (varargs FileAttribute))" - {:style/indent 1, :arglists '([klass] [klass xs])} + {:arglists '([klass] [klass xs])} [klass & [objects]] (vary-meta `(into-array ~klass ~objects) assoc :tag (format "[L%s;" (.getCanonicalName ^Class (ns-resolve *ns* klass))))) + +(defn parse-as-keyword + "Like [[clojure.core/keyword]], but with a couple of tweaks to make it better for parsing command-line args: + + * empty strings get parsed to `nil` instead of an empty keyword `:` + * strings starting with `:` e.g. `\":driver\"` get parsed to normal keywords e.g. `:driver` instead of `::driver` + (which is super confusing, because it's an _unnamespaced_ keyword whose the _name_ is `:driver`)" + [s] + (cond + (keyword? s) s + (not (str/blank? s)) (keyword (cond-> s + (str/starts-with? s ":") (.substring 1))))) diff --git a/bin/common/src/metabuild_common/output.clj b/bin/common/src/metabuild_common/output.clj index 3e70771d0343..2e49b694be06 100644 --- a/bin/common/src/metabuild_common/output.clj +++ b/bin/common/src/metabuild_common/output.clj @@ -40,12 +40,3 @@ (println (colorize/red (str "Step failed: " (.getMessage e)))) (binding [pprint/*print-right-margin* 120] (pprint/pprint e-map)))) - -(defn format-bytes - "Nicely format `num-bytes` in a human-readable way (e.g. KB/MB/etc.)" - [num-bytes] - (loop [n num-bytes [suffix & more] ["B" "KB" "MB" "GB"]] - (if (and (seq more) - (>= n 1024)) - (recur (/ n 1024.0) more) - (format "%.1f %s" n suffix)))) diff --git a/bin/common/test/metabuild_common/misc_test.clj b/bin/common/test/metabuild_common/misc_test.clj new file mode 100644 index 000000000000..9f3d004ebb4c --- /dev/null +++ b/bin/common/test/metabuild_common/misc_test.clj @@ -0,0 +1,12 @@ +(ns metabuild-common.misc-test + (:require [clojure.test :refer :all] + [metabuild-common.misc :as misc])) + +(deftest parse-as-keyword-test + (are [input expected] (= expected (misc/parse-as-keyword input)) + "abc" :abc + ":abc" :abc + "" nil + " " nil + :abc :abc + nil nil)) diff --git a/bin/compare-screenshots b/bin/compare-screenshots deleted file mode 100755 index 4f7e2535a482..000000000000 --- a/bin/compare-screenshots +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env babel-node - -import fetch from "isomorphic-fetch"; -import path from "path"; -import fs from "fs-promise" -import imageDiff_ from "image-diff"; -import https from "https"; -import os from "os"; - -import { WebClient } from "@slack/client"; - -const CIRCLECI_TOKEN = process.env["CIRCLECI_TOKEN"]; -const SLACK_TOKEN = process.env["SLACK_TOKEN"]; -const SLACK_CHANNEL = "#ci-screenshots"; - -const CIRCLE_PROJECT = "github/metabase/metabase"; -const CIRCLE_BRANCH = "master"; - -const CIRCLE_SCREENSHOT_PATH = "/home/ubuntu/metabase/screenshots/"; - -const slack = new WebClient(SLACK_TOKEN); - -async function circleci(path) { - const response = await fetch( - `https://circleci.com/api/v1.1/${path}?circle-token=${encodeURIComponent(CIRCLECI_TOKEN)}` - ); - return response.json(); -} - -function imageDiff(options) { - return new Promise((resolve, reject) => { - imageDiff_.getFullResult(options, (err, result) => - err ? reject(err) : resolve(result) - ); - }); -} - -function download(url, path) { - return new Promise((resolve, reject) => { - https.get(url, response => { - response.pipe(fs.createWriteStream(path)).on("finish", resolve); - }).on('error', reject); - }); -} - -async function getCircleArtifactScreenshots(buildPath) { - let artifacts = await circleci(`project/${buildPath}/artifacts`); - let results = {}; - for (const artifact of artifacts) { - if (artifact.pretty_path.startsWith(CIRCLE_SCREENSHOT_PATH)) { - const downloadPath = path.join(os.tmpdir(), path.basename(artifact.pretty_path)); - console.log("Downloading ", artifact.url, "to", downloadPath); - await download(artifact.url, downloadPath); - results[artifact.pretty_path.slice(CIRCLE_SCREENSHOT_PATH.length)] = downloadPath; - } - } - return results; -} - -async function getLocalScreenshots(directory) { - const filenames = await fs.readdir(directory); - let results = {}; - for (const filename of filenames) { - results[filename] = path.resolve(directory, filename); - } - return results; -} - -async function getScreenshots(target) { - if (target.circleProject && target.circleBranch) { - let builds = await circleci(`project/${target.circleProject}/tree/${target.circleBranch}`); - let ok = builds.filter(build => build.status === "success" || build.status === "fixed"); - let screenshots = await getCircleArtifactScreenshots(`${target.circleProject}/${ok[0].build_num}`); - return screenshots; - } else if (target.localDirectory) { - return await getLocalScreenshots(target.localDirectory); - } else { - throw "unknown target type"; - } -} - -async function run(expectedTarget, actualTarget) { - try { - const expectedScreenshots = await getScreenshots(expectedTarget); - console.log("Expected", Object.keys(expectedScreenshots)); - const actualScreenshots = await getScreenshots(actualTarget); - console.log("Actual", Object.keys(expectedScreenshots)); - let images = Object.keys({ ...expectedScreenshots, ...actualScreenshots }); - - for (const image of images) { - const expectedImage = expectedScreenshots[image]; - const actualImage = actualScreenshots[image]; - const diffImage = path.join(os.tmpdir(), "diff-"+image); - if (!actualImage) { - console.log("Added", image); - await slack.files.upload(image, { - title: "Added " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(actualImage) - }); - } else if (!expectedImage) { - console.log("Removed", image); - await slack.files.upload(image, { - title: "Removed " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(expectedImage) - }); - } else { - const result = await imageDiff({ - expectedImage, - actualImage, - diffImage, - shadow: true - }) - if (result.percentage === 0.0) { - console.log("No difference", image); - } else { - console.log("Changed", result.percentage.toFixed(2), image); - await slack.files.upload(image, { - title: "Diff (" + result.percentage.toFixed(2) + ") " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(diffImage) - }); - await slack.files.upload(image, { - title: "Expected " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(expectedImage) - }); - await slack.files.upload(image, { - title: "Actual " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(actualImage) - }); - } - } - } - } catch (e) { - console.error(e); - } -} - -run( - { circleProject: CIRCLE_PROJECT, circleBranch: CIRCLE_BRANCH }, - { localDirectory: "screenshots" } -); diff --git a/bin/i18n/src/i18n/create_artifacts.clj b/bin/i18n/src/i18n/create_artifacts.clj index 23cf7de8ad3b..26187f0aafba 100644 --- a/bin/i18n/src/i18n/create_artifacts.clj +++ b/bin/i18n/src/i18n/create_artifacts.clj @@ -25,6 +25,9 @@ (u/announce "Artifacts for locale %s created successfully." (pr-str locale)))) (defn- create-artifacts-for-all-locales! [] + ;; Empty directory in case some locales were removed + (u/delete-file-if-exists! backend/target-directory) + (u/delete-file-if-exists! frontend/target-directory) (doseq [locale (i18n/locales)] (create-artifacts-for-locale! locale))) diff --git a/bin/i18n/src/i18n/create_artifacts/frontend.clj b/bin/i18n/src/i18n/create_artifacts/frontend.clj index dd94a35e7a8f..3e326b754c09 100644 --- a/bin/i18n/src/i18n/create_artifacts/frontend.clj +++ b/bin/i18n/src/i18n/create_artifacts/frontend.clj @@ -43,7 +43,7 @@ (defn- i18n-map [locale] (->i18n-map (i18n/po-contents locale))) -(def ^:private target-directory +(def target-directory (u/filename u/project-root-directory "resources" "frontend_client" "app" "locales")) (defn- target-filename [locale] diff --git a/bin/i18n/update-translation-template b/bin/i18n/update-translation-template index 4a4b979dbe4a..e327cda37213 100755 --- a/bin/i18n/update-translation-template +++ b/bin/i18n/update-translation-template @@ -1,6 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -eu +set -euo pipefail # gettext installed via homebrew is "keg-only", add it to the PATH if [ -d "/usr/local/opt/gettext/bin" ]; then @@ -13,6 +13,16 @@ if ! command -v xgettext > /dev/null; then exit 1 fi +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/../.." + +source "./bin/check-clojure-cli.sh" +check_clojure_cli + +source "./bin/prep.sh" +prep_deps + POT_NAME="locales/metabase.pot" POT_BACKEND_NAME="locales/metabase-backend.pot" # NOTE: hardcoded in .babelrc @@ -27,7 +37,7 @@ mkdir -p "locales" ####################### # NOTE: about twice as fast to call babel directly rather than a full webpack build -BABEL_ENV=extract ./node_modules/.bin/babel -q -x .js,.jsx -o /dev/null {enterprise/,}frontend/src +BABEL_ENV=extract ./node_modules/.bin/babel --quiet -x .js,.jsx -o /dev/null {enterprise/,}frontend/src # BABEL_ENV=extract BABEL_DISABLE_CACHE=1 yarn run build # NOTE: replace ttag's "${ 0 }" style references with xgettext "{0}" style references for consistency @@ -68,7 +78,7 @@ rm "$POT_BACKEND_NAME.bak" # update auto dash pot # ######################## -lein generate-automagic-dashboards-pot +clojure -M:generate-automagic-dashboards-pot ################## # merge all pots # diff --git a/bin/lint-migrations-file.sh b/bin/lint-migrations-file.sh index 419ea7540d28..e649dcdf9af1 100755 --- a/bin/lint-migrations-file.sh +++ b/bin/lint-migrations-file.sh @@ -9,5 +9,11 @@ cd "$script_directory/.." source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/lint-migrations-file clojure -M -m lint-migrations-file $@ diff --git a/bin/lint-migrations-file/deps.edn b/bin/lint-migrations-file/deps.edn index eadce4b742bb..02d58a2d9182 100644 --- a/bin/lint-migrations-file/deps.edn +++ b/bin/lint-migrations-file/deps.edn @@ -1,7 +1,7 @@ {:paths ["src"] :deps - {io.forward/yaml {:mvn/version "1.0.9"} ; don't upgrade to 1.0.10 -- doesn't work on Java 8 (!) + {io.forward/yaml {:mvn/version "1.0.9"} ; Don't upgrade yet, new version doesn't support Java 8 (see https://github.com/owainlewis/yaml/issues/37) org.flatland/ordered {:mvn/version "1.5.9"}} ; used by io.forward/yaml -- need the newer version :aliases diff --git a/bin/prep.sh b/bin/prep.sh new file mode 100755 index 000000000000..0962c281d8d0 --- /dev/null +++ b/bin/prep.sh @@ -0,0 +1,60 @@ +#! /usr/bin/env bash + +# functions for running prep steps to compile Java and AOT source files, needed before running other stuff. + +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." +project_root=`pwd` + +clear_cpcaches() { + cd "$project_root" + for file in `find . -type d -name .cpcache`; do + rm -rf "$file" + done +} + +compile_java_sources() { + cd "$project_root" + + echo "Compile Java source files in $project_root/java if needed..." + if [ ! -d "$project_root/java/target/classes" ]; then + echo 'Compile Java source files' + cd "$project_root" + clojure -Sforce -X:deps prep + else + echo 'Java source files are already compiled' + fi +} + +compile_spark_sql_aot_sources() { + cd "$project_root" + + echo "Compile Spark SQL AOT source files in $project_root/modules/drivers/sparksql if needed..." + if [ ! -d "$project_root/modules/drivers/sparksql/target/classes" ]; then + echo 'Compile Spark SQL AOT source files' + cd "$project_root/modules/drivers" + clojure -Sforce -X:deps prep + else + echo 'Spark SQL AOT source files are already compiled' + fi +} + +prep_deps() { + if compile_java_sources; then + echo "Java sources => OK" + else + echo 'Compilation failed (WHY?!); clearing classpath caches and trying again...' + clear_cpcaches + compile_java_sources + fi + + if compile_spark_sql_aot_sources; then + echo "Spark SQL AOT sources => OK" + else + echo 'Compilation failed (WHY?!); clearing classpath caches and trying again...' + clear_cpcaches + compile_spark_sql_aot_sources + fi + + cd "$project_root" +} diff --git a/bin/reflection-linter b/bin/reflection-linter deleted file mode 100755 index 559267ebb08d..000000000000 --- a/bin/reflection-linter +++ /dev/null @@ -1,13 +0,0 @@ -#! /usr/bin/env bash - -printf "\e[1;34mChecking for reflection warnings. This may take a few minutes, so sit tight...\e[0m\n" - -warnings=`lein with-profile +ci,+ee check-reflection-warnings 2>&1 | grep Reflection | grep metabase | sort | uniq` - -if [ ! -z "$warnings" ]; then - printf "\e[1;31mYour code has introduced some reflection warnings.\e[0m 😞\n" - echo "$warnings"; - exit -1; -fi - -printf "\e[1;32mNo reflection warnings! Success.\e[0m\n" diff --git a/bin/release.sh b/bin/release.sh index 19dd88914e6f..b8fc3c68bb3c 100755 --- a/bin/release.sh +++ b/bin/release.sh @@ -2,8 +2,18 @@ set -euo pipefail +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/release clojure -M -m release $@ diff --git a/bin/release/deps.edn b/bin/release/deps.edn index b9e0e5d2365f..cffa79de3fec 100644 --- a/bin/release/deps.edn +++ b/bin/release/deps.edn @@ -14,8 +14,4 @@ :extra-deps {com.cognitect/test-runner {:git/url "https://github.com/cognitect-labs/test-runner.git" :sha "209b64504cb3bd3b99ecfec7937b358a879f55c1"} org.clojure/data.json {:mvn/version "2.0.2"}} - :main-opts ["-m" "cognitect.test-runner"]} - :nREPL {:extra-paths ["test"] - :extra-deps {nrepl/nrepl {:mvn/version "0.8.3"} - org.clojure/data.json {:mvn/version "2.0.2"}} - :main-opts ["-m" "nrepl.cmdline" "-i"]}}} + :main-opts ["-m" "cognitect.test-runner"]}}} diff --git a/bin/release/src/release.clj b/bin/release/src/release.clj index 96ea1327a4fb..06acdd36afb6 100644 --- a/bin/release/src/release.clj +++ b/bin/release/src/release.clj @@ -53,6 +53,6 @@ (u/exit-when-finished-nonzero-on-exception (check-prereqs/check-prereqs) (set-build-options/prompt-and-set-build-options!) - (let [steps (or (seq (map keyword steps)) + (let [steps (or (seq (map u/parse-as-keyword steps)) (keys steps*))] (do-steps! steps)))) diff --git a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json index a71cc0fbd496..a6ecec67597c 100644 --- a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json +++ b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json @@ -7,39 +7,39 @@ "metrics_collected": { "cpu": { "measurement": [ - "time_active", - "time_guest", - "time_guest_nice", - "time_idle", - "time_iowait", - "time_irq", - "time_nice", - "time_softirq", - "time_steal", - "time_system", - "time_user", - "usage_active", - "usage_guest", - "usage_guest_nice", - "usage_idle", - "usage_iowait", - "usage_irq", - "usage_nice", - "usage_softirq", - "usage_steal", - "usage_system", + "time_active", + "time_guest", + "time_guest_nice", + "time_idle", + "time_iowait", + "time_irq", + "time_nice", + "time_softirq", + "time_steal", + "time_system", + "time_user", + "usage_active", + "usage_guest", + "usage_guest_nice", + "usage_idle", + "usage_iowait", + "usage_irq", + "usage_nice", + "usage_softirq", + "usage_steal", + "usage_system", "usage_user" ], "totalcpu": false }, "disk": { "measurement": [ - "free", - "total", - "used", - "used_percent", - "inodes_free", - "inodes_used", + "free", + "total", + "used", + "used_percent", + "inodes_free", + "inodes_used", "inodes_total" ], "ignore_file_system_types": [ @@ -49,76 +49,76 @@ "measurement": [ "reads", "writes", - "read_bytes", - "write_bytes", - "read_time", - "write_time", - "io_time", + "read_bytes", + "write_bytes", + "read_time", + "write_time", + "io_time", "iops_in_progress" ] }, "swap": { "measurement": [ - "free", - "used", + "free", + "used", "used_percent" ] }, "mem": { "measurement": [ - "active", - "available", - "available_percent", - "buffered", - "cached", - "free", - "inactive", - "total", - "used", + "active", + "available", + "available_percent", + "buffered", + "cached", + "free", + "inactive", + "total", + "used", "used_percent" ] }, "net": { "measurement": [ - "bytes_sent", - "bytes_recv", - "drop_in", - "drop_out", - "err_in", - "err_out", - "packets_sent", + "bytes_sent", + "bytes_recv", + "drop_in", + "drop_out", + "err_in", + "err_out", + "packets_sent", "packets_recv" ] }, "netstat": { "measurement": [ - "tcp_close", - "tcp_close_wait", - "tcp_closing", - "tcp_established", - "tcp_fin_wait1", - "tcp_fin_wait2", - "tcp_last_ack", - "tcp_listen", - "tcp_none", - "tcp_syn_sent", - "tcp_syn_recv", - "tcp_time_wait", + "tcp_close", + "tcp_close_wait", + "tcp_closing", + "tcp_established", + "tcp_fin_wait1", + "tcp_fin_wait2", + "tcp_last_ack", + "tcp_listen", + "tcp_none", + "tcp_syn_sent", + "tcp_syn_recv", + "tcp_time_wait", "udp_socket" ] }, "processes": { "measurement": [ - "blocked", - "dead", - "idle", - "paging", - "running", - "sleeping", - "stopped", - "total", - "total_threads", - "wait", + "blocked", + "dead", + "idle", + "paging", + "running", + "sleeping", + "stopped", + "total", + "total_threads", + "wait", "zombies" ] } @@ -131,4 +131,4 @@ }, "aggregation_dimensions" : [["InstanceId"], ["InstanceType"], ["InstanceId","InstanceType"]] } -} \ No newline at end of file +} diff --git a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh index fab03d46fbfa..71f610d37380 100755 --- a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh +++ b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh @@ -7,48 +7,48 @@ set_up_env_vars () { # /opt/elasticbeanstalk/bin/get-config environment | jq -r 'to_entries | .[] | "export \(.key)=\"\(.value)\""' > /etc/profile.d/sh.local - if grep -q "Amazon Linux 2" /etc/os-release; then - if [ ! -z "$RDS_HOSTNAME" ]; then + if grep -q "Amazon Linux 2" /etc/os-release; then + if [ ! -z "$RDS_HOSTNAME" ]; then # sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /etc/profile.d/sh.local sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /opt/elasticbeanstalk/deployment/env.list # sed -i 's/RDS_USERNAME/MB_DB_USER/' /etc/profile.d/sh.local sed -i 's/RDS_USERNAME/MB_DB_USER/' /opt/elasticbeanstalk/deployment/env.list # sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /etc/profile.d/sh.local sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /opt/elasticbeanstalk/deployment/env.list - # sed -i 's/RDS_PORT/MB_DB_PORT/' /etc/profile.d/sh.local + # sed -i 's/RDS_PORT/MB_DB_PORT/' /etc/profile.d/sh.local sed -i 's/RDS_PORT/MB_DB_PORT/' /opt/elasticbeanstalk/deployment/env.list - # sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /etc/profile.d/sh.local + # sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /etc/profile.d/sh.local sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /opt/elasticbeanstalk/deployment/env.list - if [ "$RDS_PORT" == "3306" ]; then + if [ "$RDS_PORT" == "3306" ]; then # echo 'export MB_DB_TYPE="mysql"' >> /etc/profile.d/sh.local echo 'MB_DB_TYPE=mysql' >> /opt/elasticbeanstalk/deployment/env.list - else + else # echo 'export MB_DB_TYPE="postgres"' >> /etc/profile.d/sh.local echo 'MB_DB_TYPE=postgres' >> /opt/elasticbeanstalk/deployment/env.list - fi + fi fi else - if [ ! -z "$RDS_HOSTNAME" ]; then + if [ ! -z "$RDS_HOSTNAME" ]; then # sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /etc/profile.d/sh.local sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration # sed -i 's/RDS_USERNAME/MB_DB_USER/' /etc/profile.d/sh.local sed -i 's/RDS_USERNAME/MB_DB_USER/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration # sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /etc/profile.d/sh.local sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration - # sed -i 's/RDS_PORT/MB_DB_PORT/' /etc/profile.d/sh.local + # sed -i 's/RDS_PORT/MB_DB_PORT/' /etc/profile.d/sh.local sed -i 's/RDS_PORT/MB_DB_PORT/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration - # sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /etc/profile.d/sh.local + # sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /etc/profile.d/sh.local sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration - if [ "$RDS_PORT" == "3306" ]; then + if [ "$RDS_PORT" == "3306" ]; then # echo 'export MB_DB_TYPE="mysql"' >> /etc/profile.d/sh.local sed -i 's/}}}}/,"MB_DB_TYPE":"mysql"}}}}/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration - else + else # echo 'export MB_DB_TYPE="postgres"' >> /etc/profile.d/sh.local sed -i 's/}}}}/,"MB_DB_TYPE":"postgres"}}}}/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration - fi + fi fi fi - + } # add files to papertrail @@ -101,4 +101,4 @@ set_up_env_vars) install_papertrail) install_papertrail ;; -esac \ No newline at end of file +esac diff --git a/bin/release/src/release/elastic_beanstalk/.platform/confighooks/postdeploy/config_nginx.sh b/bin/release/src/release/elastic_beanstalk/.platform/confighooks/postdeploy/config_nginx.sh index bf5565db1d5f..aa9bfe4cd3f2 100755 --- a/bin/release/src/release/elastic_beanstalk/.platform/confighooks/postdeploy/config_nginx.sh +++ b/bin/release/src/release/elastic_beanstalk/.platform/confighooks/postdeploy/config_nginx.sh @@ -3,4 +3,4 @@ if [[ "x$NGINX_FORCE_SSL" == "x1" ]]; then cp .platform/nginx/nginx-ssl.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart else cp .platform/nginx/nginx.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart -fi \ No newline at end of file +fi diff --git a/bin/release/src/release/elastic_beanstalk/.platform/hooks/postdeploy/config_nginx.sh b/bin/release/src/release/elastic_beanstalk/.platform/hooks/postdeploy/config_nginx.sh index bf5565db1d5f..aa9bfe4cd3f2 100755 --- a/bin/release/src/release/elastic_beanstalk/.platform/hooks/postdeploy/config_nginx.sh +++ b/bin/release/src/release/elastic_beanstalk/.platform/hooks/postdeploy/config_nginx.sh @@ -3,4 +3,4 @@ if [[ "x$NGINX_FORCE_SSL" == "x1" ]]; then cp .platform/nginx/nginx-ssl.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart else cp .platform/nginx/nginx.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart -fi \ No newline at end of file +fi diff --git a/bin/release/test/release/version_info_test.clj b/bin/release/test/release/version_info_test.clj index 8e8c6b783568..73ea4e0febc1 100644 --- a/bin/release/test/release/version_info_test.clj +++ b/bin/release/test/release/version_info_test.clj @@ -44,5 +44,3 @@ (json/read-json true)) expected (make-version-info edition test-versions)] (is (= expected actual))))))) - - diff --git a/build.clj b/build.clj new file mode 100644 index 000000000000..2b52bdf298ea --- /dev/null +++ b/build.clj @@ -0,0 +1,160 @@ +(ns build + (:require [clojure.java.io :as io] + [clojure.string :as str] + [clojure.tools.build.api :as b] + [clojure.tools.build.util.zip :as b.zip] + [clojure.tools.namespace.dependency :as ns.deps] + [clojure.tools.namespace.find :as ns.find] + [clojure.tools.namespace.parse :as ns.parse] + [hf.depstar.api :as d] + [metabuild-common.core :as c]) + (:import java.io.OutputStream + java.net.URI + [java.nio.file Files FileSystems OpenOption StandardOpenOption] + java.util.Collections + java.util.jar.Manifest)) + +(def class-dir "target/classes") +(def uberjar-filename "target/uberjar/metabase.jar") + +(defn do-with-duration-ms [thunk f] + (let [start-time-ms (System/currentTimeMillis) + result (thunk) + duration (- (System/currentTimeMillis) start-time-ms)] + (f duration) + result)) + +(defmacro with-duration-ms [[duration-ms-binding] & body] + (let [[butlast-forms last-form] ((juxt butlast last) body)] + `(do-with-duration-ms + (fn [] ~@butlast-forms) + (fn [~duration-ms-binding] + ~last-form)))) + +(defn create-basis [edition] + {:pre [(#{:ee :oss} edition)]} + (b/create-basis {:project "deps.edn", :aliases #{edition}})) + +(defn all-paths [basis] + (concat (:paths basis) + (get-in basis [:classpath-args :extra-paths]))) + +(defn clean! [] + (c/step "Clean" + (c/step (format "Delete %s" class-dir) + (b/delete {:path class-dir})) + (c/step (format "Delete %s" uberjar-filename) + (b/delete {:path uberjar-filename})))) + +;; this topo sort order stuff is required for stuff to work correctly... I copied it from my Cloverage PR +;; https://github.com/cloverage/cloverage/pull/303 +(defn- dependencies-graph + "Return a `clojure.tools.namespace` dependency graph of namespaces named by `ns-symbol`." + [ns-decls] + (reduce + (fn [graph ns-decl] + (let [ns-symbol (ns.parse/name-from-ns-decl ns-decl)] + (reduce + (fn [graph dep] + (ns.deps/depend graph ns-symbol dep)) + graph + (ns.parse/deps-from-ns-decl ns-decl)))) + (ns.deps/graph) + ns-decls)) + +(defn metabase-namespaces-in-topo-order [basis] + (let [ns-decls (mapcat + (comp ns.find/find-ns-decls-in-dir io/file) + (all-paths basis)) + ns-symbols (set (map ns.parse/name-from-ns-decl ns-decls))] + (->> (dependencies-graph ns-decls) + ns.deps/topo-sort + (filter ns-symbols)))) + +(defn compile-sources! [basis] + (c/step "Compile Clojure source files" + (let [paths (all-paths basis) + _ (c/announce "Compiling Clojure files in %s" (pr-str paths)) + ns-decls (c/step "Determine compilation order for Metabase files" + (metabase-namespaces-in-topo-order basis))] + (with-duration-ms [duration-ms] + (b/compile-clj {:basis basis + :src-dirs paths + :class-dir class-dir + :ns-compile ns-decls}) + (c/announce "Finished compilation in %.1f seconds." (/ duration-ms 1000.0)))))) + +(defn copy-resources! [edition basis] + (c/step "Copy resources" + ;; technically we don't NEED to copy the Clojure source files but it doesn't really hurt anything IMO. + (doseq [path (all-paths basis)] + (c/step (format "Copy %s" path) + (b/copy-dir {:target-dir class-dir, :src-dirs [path]}))))) + +(defn create-uberjar! [basis] + (c/step "Create uberjar" + (with-duration-ms [duration-ms] + (d/uber {:class-dir class-dir + :uber-file uberjar-filename + :basis basis}) + (c/announce "Created uberjar in %.1f seconds." (/ duration-ms 1000.0))))) + +(def manifest-entries + {"Manifest-Version" "1.0" + "Created-By" "Metabase build.clj" + "Build-Jdk-Spec" (System/getProperty "java.specification.version") + "Main-Class" "metabase.core" + "Liquibase-Package" (str/join "," + ["liquibase.change" + "liquibase.changelog" + "liquibase.database" + "liquibase.datatype" + "liquibase.diff" + "liquibase.executor" + "liquibase.ext" + "liquibase.lockservice" + "liquibase.logging" + "liquibase.parser" + "liquibase.precondition" + "liquibase.sdk" + "liquibase.serializer" + "liquibase.snapshot" + "liquibase.sqlgenerator" + "liquibase.structure" + "liquibase.structurecompare"])}) + +(defn manifest ^Manifest [] + (doto (Manifest.) + (b.zip/fill-manifest! manifest-entries))) + +(defn write-manifest! [^OutputStream os] + (.write (manifest) os) + (.flush os)) + +;; the customizations we need to make are not currently supported by tools.build -- see +;; https://ask.clojure.org/index.php/10827/ability-customize-manifest-created-clojure-tools-build-uber -- so we need +;; to do it by hand for the time being. +(defn update-manifest! [] + (c/step "Update META-INF/MANIFEST.MF" + (with-open [fs (FileSystems/newFileSystem (URI. (str "jar:file:" (.getAbsolutePath (io/file "target/uberjar/metabase.jar")))) + Collections/EMPTY_MAP)] + (let [manifest-path (.getPath fs "META-INF" (into-array String ["MANIFEST.MF"]))] + (with-open [os (Files/newOutputStream manifest-path (into-array OpenOption [StandardOpenOption/WRITE + StandardOpenOption/TRUNCATE_EXISTING]))] + (write-manifest! os)))))) + +;; clojure -T:build uberjar :edition +(defn uberjar [{:keys [edition], :or {edition :oss}}] + (c/step (format "Build %s uberjar" edition) + (with-duration-ms [duration-ms] + (clean!) + (let [basis (create-basis edition)] + (compile-sources! basis) + (copy-resources! edition basis) + (create-uberjar! basis) + (update-manifest!)) + (c/announce "Built target/uberjar/metabase.jar in %.1f seconds." + (/ duration-ms 1000.0))))) + +;; TODO -- add `jar` and `install` commands to install Metabase to the local Maven repo (?) could make it easier to +;; build 3rd-party drivers the old way diff --git a/codecov.yml b/codecov.yml index 82aaeaac126e..aa4def51848f 100644 --- a/codecov.yml +++ b/codecov.yml @@ -5,12 +5,32 @@ codecov: coverage: status: project: - default: + back-end: # Project must always have at least 78% coverage (by line) target: 78% # Whole-project test coverage is allowed to drop up to 5%. (For situtations where we delete code with full coverage) threshold: 5% - patch: - default: - # Changes must have at least 75% test coverage (by line) - target: 75% + flags: + - back-end + + front-end: + target: 35% + threshold: 5% + flags: + - front-end + + patch: off + +flags: + back-end: + paths: + - enterprise/backend + - shared/src + - src/metabase + carryforward: true + + front-end: + paths: + - enterprise/frontend + - frontend + carryforward: true diff --git a/deps.edn b/deps.edn new file mode 100644 index 000000000000..9c0fbad4dd10 --- /dev/null +++ b/deps.edn @@ -0,0 +1,430 @@ +;; -*- comment-column: 80; -*- +{:deps + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + ;; !! PLEASE KEEP THESE ORGANIZED ALPHABETICALLY !! + ;; !! AND ADD A COMMENT EXPLAINING THEIR PURPOSE !! + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + {aleph/aleph {:mvn/version "0.4.6" ; Async HTTP library; WebSockets + :exclusions [org.clojure/tools.logging]} + amalloy/ring-buffer {:mvn/version "1.3.1" ; fixed length queue implementation, used in log buffering + :exclusions [org.clojure/clojure + org.clojure/clojurescript]} + amalloy/ring-gzip-middleware {:mvn/version "0.1.4"} ; Ring middleware to GZIP responses if client can handle it + bigml/histogram {:mvn/version "4.1.3"} ; Histogram data structure + buddy/buddy-core {:mvn/version "1.10.1" ; various cryptograhpic functions + :exclusions [commons-codec/commons-codec + org.bouncycastle/bcpkix-jdk15on + org.bouncycastle/bcprov-jdk15on]} + buddy/buddy-sign {:mvn/version "3.4.1"} ; JSON Web Tokens; High-Level message signing library + cheshire/cheshire {:mvn/version "5.10.1"} ; fast JSON encoding (used by Ring JSON middleware) + clj-http/clj-http {:mvn/version "3.12.3" ; HTTP client + :exclusions [commons-codec/commons-codec + commons-io/commons-io + slingshot/slingshot]} + clojurewerkz/quartzite {:mvn/version "2.1.0" ; scheduling library + :exclusions [c3p0/c3p0]} + colorize/colorize {:mvn/version "0.1.1" ; string output with ANSI color codes (for logging) + :exclusions [org.clojure/clojure]} + com.cemerick/friend {:mvn/version "0.2.3" ; auth library + :exclusions [commons-codec/commons-codec + net.sourceforge.nekohtml/nekohtml + org.apache.httpcomponents/httpclient + ring/ring-core + slingshot/slingshot]} + com.clearspring.analytics/stream {:mvn/version "2.9.8" ; Various sketching algorithms + :exclusions [it.unimi.dsi/fastutil + org.slf4j/slf4j-api]} + com.draines/postal {:mvn/version "2.0.4"} ; SMTP library + com.google.guava/guava {:mvn/version "30.1.1-jre"} ; dep for BigQuery, Spark, and GA. Require here rather than letting different dep versions stomp on each other — see comments on #9697 + com.h2database/h2 {:mvn/version "1.4.197"} ; embedded SQL database + com.taoensso/nippy {:mvn/version "3.1.1"} ; Fast serialization (i.e., GZIP) library for Clojure + com.vladsch.flexmark/flexmark {:mvn/version "0.62.2"} ; Markdown parsing + commons-codec/commons-codec {:mvn/version "1.15"} ; Apache Commons -- useful codec util fns + commons-io/commons-io {:mvn/version "2.11.0"} ; Apache Commons -- useful IO util fns + commons-validator/commons-validator {:mvn/version "1.7" ; Apache Commons -- useful validation util fns + :exclusions [commons-beanutils/commons-beanutils + commons-digester/commons-digester + commons-logging/commons-logging]} + compojure/compojure {:mvn/version "1.6.2" ; HTTP Routing library built on Ring + :exclusions [ring/ring-codec]} + dk.ative/docjure {:mvn/version "1.16.0" ; excel export + crypto-random/crypto-random {:mvn/version "1.2.1"} ; library for generating cryptographically secure random bytes and strings + :exclusions [org.apache.poi/poi + org.apache.poi/poi-ooxml]} + environ/environ {:mvn/version "1.2.0"} ; env vars/Java properties abstraction + hiccup/hiccup {:mvn/version "1.0.5"} ; HTML templating + honeysql/honeysql {:mvn/version "1.0.461" ; Transform Clojure data structures to SQL + :exclusions [org.clojure/clojurescript]} + instaparse/instaparse {:mvn/version "1.4.10"} ; Make your own parser + io.forward/yaml {:mvn/version "1.0.9" ; Clojure wrapper for YAML library SnakeYAML. Don't upgrade yet, new version doesn't support Java 8 (see https://github.com/owainlewis/yaml/issues/37) + :exclusions [org.clojure/clojure + org.flatland/ordered + org.yaml/snakeyaml]} + javax.xml.bind/jaxb-api {:mvn/version "2.4.0-b180830.0359"} ; add the `javax.xml.bind` classes which we're still using but were removed in Java 11 + joda-time/joda-time {:mvn/version "2.10.10"} + kixi/stats {:mvn/version "0.4.4" ; Various statistic measures implemented as transducers + :exclusions [org.clojure/data.avl]} + me.raynes/fs {:mvn/version "1.4.6" ; Filesystem tools + :exclusions [org.apache.commons/commons-compress]} + medley/medley {:mvn/version "1.3.0"} ; lightweight lib of useful functions + metabase/connection-pool {:mvn/version "1.1.1"} ; simple wrapper around C3P0. JDBC connection pools + metabase/saml20-clj {:mvn/version "2.0.0"} ; EE SAML integration + metabase/throttle {:mvn/version "1.0.2"} ; Tools for throttling access to API endpoints and other code pathways + net.cgrand/macrovich {:mvn/version "0.2.1"} ; utils for writing macros for both Clojure & ClojureScript + net.redhogs.cronparser/cron-parser-core {:mvn/version "3.5" ; describe Cron schedule in human-readable language + :exclusions [joda-time/joda-time ; exclude joda time 2.3 which has outdated timezone information + org.slf4j/slf4j-api]} + net.sf.cssbox/cssbox {:mvn/version "5.0.0" ; HTML / CSS rendering + :exclusions [org.slf4j/slf4j-api]} + org.apache.commons/commons-compress {:mvn/version "1.21"} ; compression utils + org.apache.commons/commons-lang3 {:mvn/version "3.12.0"} ; helper methods for working with java.lang stuff + org.apache.logging.log4j/log4j-1.2-api {:mvn/version "2.14.1"} ; apache logging framework + org.apache.logging.log4j/log4j-api {:mvn/version "2.14.1"} ; add compatibility with log4j 1.2 + org.apache.logging.log4j/log4j-core {:mvn/version "2.14.1"} ; apache logging framework + org.apache.logging.log4j/log4j-jcl {:mvn/version "2.14.1"} ; allows the commons-logging API to work with log4j 2 + org.apache.logging.log4j/log4j-liquibase {:mvn/version "2.14.1"} ; liquibase logging via log4j 2 + org.apache.logging.log4j/log4j-slf4j-impl {:mvn/version "2.14.1"} ; allows the slf4j API to work with log4j 2 + org.apache.poi/poi {:mvn/version "5.0.0"} ; Work with Office documents (e.g. Excel spreadsheets) -- newer version than one specified by Docjure + org.apache.poi/poi-ooxml {:mvn/version "5.0.0" + :exclusions [org.bouncycastle/bcpkix-jdk15on + org.bouncycastle/bcprov-jdk15on]} + org.apache.sshd/sshd-core {:mvn/version "2.7.0"} ; ssh tunneling and test server + org.apache.xmlgraphics/batik-all {:mvn/version "1.14"} ; SVG -> image + org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"} ; LDAP client + org.bouncycastle/bcpkix-jdk15on {:mvn/version "1.69"} ; Bouncy Castle crypto library -- explicit version of BC specified to resolve illegal reflective access errors + org.bouncycastle/bcprov-jdk15on {:mvn/version "1.69"} + org.clojure/clojure {:mvn/version "1.10.3"} + org.clojure/core.async {:mvn/version "1.3.618" + :exclusions [org.clojure/tools.reader]} + org.clojure/core.logic {:mvn/version "1.0.0"} ; optimized pattern matching library for Clojure + org.clojure/core.match {:mvn/version "1.0.0"} + org.clojure/core.memoize {:mvn/version "1.0.250"} ; useful FIFO, LRU, etc. caching mechanisms + org.clojure/data.csv {:mvn/version "1.0.0"} ; CSV parsing / generation + org.clojure/java.classpath {:mvn/version "1.0.0"} ; examine the Java classpath from Clojure programs + org.clojure/java.jdbc {:mvn/version "0.7.12"} ; basic JDBC access from Clojure + org.clojure/java.jmx {:mvn/version "1.0.0"} ; JMX bean library, for exporting diagnostic info + org.clojure/math.combinatorics {:mvn/version "0.1.6"} ; combinatorics functions + org.clojure/math.numeric-tower {:mvn/version "0.0.4"} ; math functions like `ceil` + org.clojure/tools.logging {:mvn/version "1.1.0"} ; logging framework + org.clojure/tools.namespace {:mvn/version "1.1.0"} + org.clojure/tools.reader {:mvn/version "1.3.6"} + org.clojure/tools.trace {:mvn/version "0.7.11"} ; function tracing + org.eclipse.jetty/jetty-server {:mvn/version "9.4.43.v20210629"} ; web server + org.flatland/ordered {:mvn/version "1.5.9"} ; ordered maps & sets + org.graalvm.js/js {:mvn/version "21.2.0"} ; JavaScript engine + org.liquibase/liquibase-core {:mvn/version "3.6.3" ; migration management (Java lib) + :exclusions [ch.qos.logback/logback-classic]} + org.mariadb.jdbc/mariadb-java-client {:mvn/version "2.6.2"} ; MySQL/MariaDB driver + org.postgresql/postgresql {:mvn/version "42.2.23"} ; Postgres driver + org.slf4j/slf4j-api {:mvn/version "1.7.32"} ; abstraction for logging frameworks -- allows end user to plug in desired logging framework at deployment time + org.tcrawley/dynapath {:mvn/version "1.1.0"} ; Dynamically add Jars (e.g. Oracle or Vertica) to classpath + org.threeten/threeten-extra {:mvn/version "1.7.0"} ; extra Java 8 java.time classes like DayOfMonth and Quarter + org.yaml/snakeyaml {:mvn/version "1.29"} ; YAML parser (required by liquibase) + potemkin/potemkin {:mvn/version "0.4.5" ; utility macros & fns + :exclusions [riddley/riddley]} + pretty/pretty {:mvn/version "1.0.5"} ; protocol for defining how custom types should be pretty printed + prismatic/schema {:mvn/version "1.1.12"} ; Data schema declaration and validation library + redux/redux {:mvn/version "0.1.4"} ; Utility functions for building and composing transducers + riddley/riddley {:mvn/version "0.2.0"} ; code walking lib -- used interally by Potemkin, manifold, etc. + ring/ring-core {:mvn/version "1.9.4"} ; web server (Jetty wrapper) + ring/ring-jetty-adapter {:mvn/version "1.9.4"} ; Ring adapter using Jetty webserver + ring/ring-json {:mvn/version "0.5.1"} ; Ring middleware for reading/writing JSON automatically + robdaemon/clojure.java-time {:mvn/version "0.3.3-SNAPSHOT"} ; Java 8 java.time wrapper. Fork to address #13102 - see upstream PR: https://github.com/dm3/clojure.java-time/pull/60 + slingshot/slingshot {:mvn/version "0.12.2"} ; enhanced throw/catch, used by other deps + stencil/stencil {:mvn/version "0.5.0"} ; Mustache templates for Clojure + toucan/toucan {:mvn/version "1.15.4" ; Model layer, hydration, and DB utilities + :exclusions [honeysql/honeysql + org.clojure/java.jdbc + org.clojure/tools.logging + org.clojure/tools.namespace]} + user-agent/user-agent {:mvn/version "0.1.0"} ; User-Agent string parser, for Login History page & elsewhere + weavejester/dependency {:mvn/version "0.2.1"} ; Dependency graphs and topological sorting + + ;; dummy dependency for the Java source file(s) + metabase/java-deps {:local/root "java"}} + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + ;; !! PLEASE KEEP NEW DEPENDENCIES ABOVE ALPHABETICALLY ORGANIZED AND ADD COMMENTS EXPLAINING THEM. !! + ;; !! *PLEASE DO NOT* ADD NEW ONES TO THE BOTTOM OF THE LIST. !! + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + + :paths + ["src" "shared/src" "resources" "java/target/classes"] + + :aliases + { +;;; Local Dev & test profiles + + ;; for local development: start a REPL with + ;; + ;; clojure -A:dev (basic dev REPL that includes test namespaces) + ;; clojure -A:dev:drivers:drivers-dev (dev REPL w/ drivers + tests) + ;; clojure -A:dev:ee:ee-dev (dev REPL w/ EE code including tests) + ;; + ;; You can start a web server from this REPL with + ;; + ;; (require 'dev) + ;; (dev/start!) + :dev + {:extra-deps + {clj-http-fake/clj-http-fake {:mvn/version "1.0.3" + :exclusions [slingshot/slingshot]} + cloverage/cloverage {:mvn/version "1.2.2"} + eftest/eftest {:mvn/version "0.5.9"} + jonase/eastwood {:mvn/version "0.9.6"} + lein-check-namespace-decls/lein-check-namespace-decls {:mvn/version "1.0.4"} ; misnomer since this works on Clojure CLI now too + pjstadig/humane-test-output {:mvn/version "0.11.0"} + reifyhealth/specmonstah {:mvn/version "2.0.0"} + ring/ring-mock {:mvn/version "0.4.0"} + talltale/talltale {:mvn/version "0.5.4"}} + + :extra-paths ["dev/src" "local/src" "test" "shared/test" "test_resources"] + :jvm-opts ["-Dmb.run.mode=dev" + "-Dmb.field.filter.operators.enabled=true" + "-Dmb.test.env.setting=ABCDEFG" + "-Duser.timezone=UTC" + "-Dfile.encoding=UTF-8" + "-Duser.language=en" + ;; Exceptions that get thrown repeatedly are created without stacktraces as a performance + ;; optimization in newer Java versions. This makes debugging pretty hard when working on stuff + ;; locally -- prefer debuggability over performance for local dev work. + "-XX:-OmitStackTraceInFastThrow" + ;; prevent Java icon from randomly popping up in macOS dock + "-Djava.awt.headless=true"]} + + ;; includes test code as source paths. Run tests with clojure -X:dev:test + :test + {:extra-paths ["test_config"] + :exec-fn metabase.test-runner/run-tests + :jvm-opts ["-Dmb.run.mode=test" + "-Dmb.db.in.memory=true" + "-Dmb.jetty.join=false" + "-Dmb.field.filter.operators.enabled=true" + "-Dmb.api.key=test-api-key" + ;; Different port from normal `:dev` so you can run tests on a different server. + ;; TODO -- figure out how to do a random port like in the old project.clj? + "-Dmb.jetty.port=3001"]} + + ;; run the dev server with + ;; clojure -M:run + ;; clojure -M:run:drivers (include all drivers) + ;; clojure -M:run:ee (include EE code) + :run + {:main-opts ["-m" "metabase.core"] + :jvm-opts ["-Dmb.run.mode=dev" + "-Djava.awt.headless=true" ; prevent Java icon from randomly popping up in macOS dock + "-Dmb.jetty.port=3000"]} + + ;; alias for CI-specific options. + :ci + {:jvm-opts ["-Xmx2g" + ;; normally CircleCI sets `CI` as an env var, so this is mostly to replicate that locally. + "-Dci=TRUE"]} + + ;; include EE source code. + :ee + {:extra-paths ["enterprise/backend/src"]} + + ;; Include EE tests. + ;; for ee dev: :dev:ee:ee-dev + ;; for ee tests: clojure -X:dev:ee:ee-dev:test + :ee-dev + {:extra-paths ["enterprise/backend/test"]} + + ;; these aliases exist for symmetry with the ee aliases. Empty for now. + :oss + {} + + :oss-dev + {} + + ;; for local dev -- include the drivers locally with :dev:drivers + :drivers + {:extra-deps + {metabase/driver-modules {:local/root "modules/drivers"}}} + + ;; for local dev: include drivers as well as their tests. + ;; + ;; clojure -X:dev:drivers:drivers-dev:test + ;; + ;; or + ;; + ;; clojure -X:dev:ee:ee-dev:drivers:drivers-dev:test (for EE) + :drivers-dev + {:extra-paths + ["modules/drivers/bigquery/test" + "modules/drivers/bigquery-cloud-sdk/test" + "modules/drivers/druid/test" + "modules/drivers/google/test" + "modules/drivers/googleanalytics/test" + "modules/drivers/mongo/test" + "modules/drivers/oracle/test" + "modules/drivers/presto/test" + "modules/drivers/presto-common/test" + "modules/drivers/presto-jdbc/test" + "modules/drivers/redshift/test" + "modules/drivers/snowflake/test" + "modules/drivers/sparksql/test" + "modules/drivers/sqlite/test" + "modules/drivers/sqlserver/test" + "modules/drivers/vertica/test"]} + +;;; Linters + + ;; clojure -X:dev:ee:ee-dev:drivers:drivers-dev:namespace-checker + :namespace-checker + {:exec-fn metabase.linters.namespace-checker/check-namespace-decls + :exec-args {:prefix-rewriting false}} + + ;; clojure -M:dev:ee:ee-dev:drivers:drivers-dev:check + :check + {:extra-deps {athos/clj-check {:git/url "https://github.com/athos/clj-check.git" + :sha "518d5a1cbfcd7c952f548e6dbfcb9a4a5faf9062"}} + :main-opts ["-m" "clj-check.check"]} + + ;; clojure -X:dev:ee:ee-dev:drivers:drivers-dev:eastwood + :eastwood + {:exec-fn metabase.linters.eastwood/eastwood + :exec-args {;; manually specify the source paths for the time being (exclude test paths) until we fix Eastwood + ;; errors in the test paths (once PR #17193 is merged) + :source-paths ["src" + "shared/src" + "enterprise/backend/src" + "modules/drivers/bigquery/src" + "modules/drivers/bigquery-cloud-sdk/src" + "modules/drivers/druid/src" + "modules/drivers/google/src" + "modules/drivers/googleanalytics/src" + "modules/drivers/mongo/src" + "modules/drivers/oracle/src" + "modules/drivers/presto/src" + "modules/drivers/presto-common/src" + "modules/drivers/presto-jdbc/src" + "modules/drivers/redshift/src" + "modules/drivers/snowflake/src" + "modules/drivers/sparksql/src" + "modules/drivers/sqlite/src" + "modules/drivers/sqlserver/src" + "modules/drivers/vertica/src"] + :add-linters [:unused-private-vars + ;; These linters are pretty useful but give a few false + ;; positives and can't be selectively disabled (yet) + ;; + ;; For example see https://github.com/jonase/eastwood/issues/193 + ;; + ;; It's still useful to re-enable them and run them every once + ;; in a while because they catch a lot of actual errors too. + ;; Keep an eye on the issue above and re-enable them if we can + ;; get them to work + #_:unused-fn-args + #_:unused-locals] + :exclude-linters [ ;; Turn this off temporarily until we finish removing + ;; self-deprecated functions & macros + :deprecations + ;; this has a fit in libs that use Potemkin `import-vars` such + ;; as `java-time` + :implicit-dependencies + ;; too many false positives for now + :unused-ret-vals]}} + + ;; clojure -T:whitespace-linter + :whitespace-linter + {:deps {com.github.camsaul/whitespace-linter {:sha "b25716745f5875194bc38364e498d0ddda51f4b0"}} + :ns-default whitespace-linter + :exec-args {:paths ["./.dir-locals.el" + "./deps.edn" + "./package.json" + "./shadow-cljs.edn" + ".circleci" + ".github" + "bin" + "enterprise" + "frontend" + "resources" + "shared" + "src" + "test"] + :include-patterns ["\\.clj.?$" + "\\.edn$" + "\\.el$" + "\\.html$" + "\\.json$" + "\\.jsx?$" + "\\.sh$" + "\\.yaml$" + "\\.yml$"] + :exclude-patterns ["resources/i18n/.*\\.edn$" + "resources/frontend_client" + "resources/frontend_shared" + "resources/html-entities.edn" + "frontend/src/cljs" + "frontend/test/metabase/lib/urls\\.unit\\.spec\\.js$" + "frontend/test/metabase/lib/formatting\\.unit\\.spec\\.js$" + "shared/src/metabase/shared/util/currency\\.cljc$"]}} + + ;; clojure -X:dev:ee:ee-dev:test:cloverage + :cloverage + {:exec-fn metabase.cloverage-runner/run-project + :exec-args {:fail-threshold 69 + :codecov? true + ;; don't instrument logging forms, since they won't get executed as part of tests anyway + ;; log calls expand to these + :exclude-call + [clojure.tools.logging/logf + clojure.tools.logging/logp] + + :src-ns-path + ["src" "enterprise/backend/src" "shared/src"] + + :test-ns-path + ["test" "enterprise/backend/test" "shared/test"] + + :ns-regex + ["^metabase.*" "^metabase-enterprise.*"] + + ;; don't instrument Postgres/MySQL driver namespaces, because we don't current run tests for them + ;; as part of recording test coverage, which means they can give us false positives. + ;; + ;; regex literals aren't allowed in EDN. We parse them in `./test/cloverage.clj` + :ns-exclude-regex + ["metabase\\.driver\\.mysql" "metabase\\.driver\\.postgres"]} + ;; different port from `:test` so you can run it at the same time as `:test`. + :jvm-opts ["-Dmb.jetty.port=3002"]} + +;;; building Uberjar + + ;; clojure -T:build uberjar + ;; clojure -T:build uberjar :edition :ee + :build + {:deps {io.github.clojure/tools.build {:git/tag "v0.1.6", :git/sha "5636e61"} + com.github.seancorfield/depstar {:mvn/version "2.1.278"} + metabase/build.common {:local/root "bin/common"} + metabase/buid-mb {:local/root "bin/build-mb"}} + :ns-default build} + +;;; Other misc convenience aliases + + ;; Profile Metabase start time with clojure -M:profile + :profile + {:main-opts ["-m" "metabase.core" "profile"] + :jvm-opts ["-XX:+CITime" ; print time spent in JIT compiler + "-XX:+PrintGC"]} + + ;; get the H2 shell with clojure -M:h2 + :h2 + {:main-opts ["-m" "org.h2.tools.Shell"]} + + ;; clojure -M:generate-automagic-dashboards-pot + :generate-automagic-dashboards-pot + {:main-opts ["-m" "metabase.automagic-dashboards.rules"]} + + ;; Start a Network REPL (nrepl) that you can connect your editor to. + ;; + ;; clojure -M:dev:nrepl (etc.) + :nrepl + {:extra-deps {nrepl/nrepl {:mvn/version "0.8.3"}} + :main-opts ["-m" "nrepl.cmdline"]} + + ;; TODO -- consider creating an alias that includes the `./bin` build-drivers & release code as well so we can work + ;; on them all from a single REPL process. + }} diff --git a/dev/src/dev/readme.md b/dev/src/dev/readme.md new file mode 100644 index 000000000000..ba90a1408e6d --- /dev/null +++ b/dev/src/dev/readme.md @@ -0,0 +1,54 @@ +## Render png + +Has some helper functions to help out with rendering debugging. + +In this namespace, you can run `(help)` to get a bit of help. It's principle usage right now is to render and debug the svg images. + +### NOTE! + +You must build the js bundle used to create the svgs with visx. Run `yarn build-static-viz` to ensure that this bundle is created and the file `resources/frontend_client/app/dist/lib-static-viz.bundle.js` exists + +Example usage below: + +```clojure +dev=> (require 'dev.render-png) +nil +dev=> (in-ns 'dev.render-png) +#object[clojure.lang.Namespace 0x14fef810 "dev.render-png"] +dev.render-png=> (help) + +To render some html, call the function `preview-html`. This takes one argument, a map. +The keys in the map are `:chart` and either `:html-file` or `:html-inline`. +(preview-html {:chart :donut :html-inline some-html-to-render}) +or +(preview-html {:chart :donut :html-file some-file-with-html}) + +This function will render the html and open an image. +Valid charts are `:donut`, `:line`, and `:bar`. + +You can use {{chart}} in your html to indicate where the image of the chart should be embedded. +It will be + +nil +dev.render-png=> (preview-html {:chart :donut :html-file "chart.html"}) +nil +dev.render-png=> +``` + +The steps were +1. require the namespace, so the code is loaded +2. `in-ns` to go "in" the namespace so we can easily call the functions +3. call the function we care about. It will open up an image preview. + +An example chart.html is + +```html +
+

behold the donut

+ {{chart}} +
+

the donut has been beholden

+
+``` + +This file should be saved at the root of the repository for the call to `preview-html` to find it. diff --git a/dev/src/dev/render_png.clj b/dev/src/dev/render_png.clj index 5a8142ca6b96..fe291a0bf3b3 100644 --- a/dev/src/dev/render_png.clj +++ b/dev/src/dev/render_png.clj @@ -1,14 +1,20 @@ (ns dev.render-png - "Improve feedback loop for dealing with png rendering code" + "Improve feedback loop for dealing with png rendering code. Will create images using the rendering that underpins + pulses and subscriptions and open those images without needing to send them to slack or email." (:require [clojure.java.io :as io] [clojure.java.shell :as sh] + [clojure.string :as str] + [hiccup.core :as h] [metabase.models.card :as card] [metabase.models.user :as user] [metabase.pulse :as pulse] [metabase.pulse.render :as pulse-render] + [metabase.pulse.render.js-svg :as js-svg] + [metabase.pulse.render.png :as png] [metabase.query-processor :as qp] [metabase.query-processor.middleware.permissions :as qp.perms] - [toucan.db :as tdb])) + [toucan.db :as tdb]) + (:import org.fit.cssbox.misc.Base64Coder)) ;; taken from https://github.com/aysylu/loom/blob/master/src/loom/io.clj (defn- os @@ -46,15 +52,102 @@ user (tdb/select-one user/User) query-results (binding [qp.perms/*card-id* nil] (qp/process-query-and-save-execution! - (assoc dataset_query :async? false) + (-> dataset_query + (assoc :async? false) + (assoc-in [:middleware :process-viz-settings?] true)) {:executed-by (:id user) :context :pulse :card-id card-id})) png-bytes (pulse-render/render-pulse-card-to-png (pulse/defaulted-timezone card) card - query-results) + query-results + 1000) tmp-file (java.io.File/createTempFile "card-png" ".png")] (with-open [w (java.io.FileOutputStream. tmp-file)] (.write w ^bytes png-bytes)) (.deleteOnExit tmp-file) (open tmp-file))) + +(defn open-png-bytes [bytes] + (let [tmp-file (java.io.File/createTempFile "card-png" ".png")] + (with-open [w (java.io.FileOutputStream. tmp-file)] + (.write w ^bytes bytes)) + (.deleteOnExit tmp-file) + (open tmp-file))) + +(defn render-img-data-uri + "Takes a PNG byte array and returns a Base64 encoded URI" + [img-bytes] + (str "data:image/png;base64," (String. (Base64Coder/encode img-bytes)))) + +(defn svg-image [kind] + (let [line|bar-data [["2015-02-01T00:00:00-08:00" 443] + ["2015-03-01T00:00:00-08:00" 875] + ["2015-04-01T00:00:00-07:00" 483] + ["2015-05-01T00:00:00-07:00" 421]] + donut-data [["alpha" 32] + ["beta" 49] + ["gamma" 23] + ["delta" 67]] + donut-colors {"alpha" "red" + "beta" "green" + "gamma" "blue" + "delta" "yellow"}] + (case kind + :line (js-svg/timelineseries-line line|bar-data) + :bar (js-svg/timelineseries-bar line|bar-data) + :donut (js-svg/categorical-donut donut-data donut-colors) + (throw (ex-info (str "Invalid chart type: " kind "\n Valid choices are :line, :bar, :donut") + {}))))) + +(defn preview-html + "Chart type is one of :line, :bar, :donut. Html is a string with a placeholder {{chart}} which will be replaced with + the [:img {:src chart-placeholder}] and the resulting html will be opened." + [{:keys [chart html-file html-inline]}] + (let [chart-image (render-img-data-uri (svg-image chart)) + chart-html (h/html [:img {:src chart-image :style "display: block; width: 100%"}]) + html (cond html-file + (slurp html-file) + html-inline + (str "" + html-inline + "")) + html (h/html (str/replace html #"\{\{chart\}\}" chart-html))] + (with-open [os (java.io.ByteArrayOutputStream.)] + (let [image-bytes (do (#'png/render-to-png! html os 1000) + (.toByteArray os))] + (open-png-bytes image-bytes))))) + +(defn help [] + (println + " +To render some html, call the function `preview-html`. This takes one argument, a map. +The keys in the map are `:chart` and either `:html-file` or `:html-inline`. +(preview-html {:chart :donut :html-inline some-html-to-render}) +or +(preview-html {:chart :donut :html-file some-file-with-html}) + +This function will render the html and open an image. +Valid charts are `:donut`, `:line`, and `:bar`. + +You can use {{chart}} in your html to indicate where the image of the chart should be embedded. +It will be + +For instance +(preview-html {:chart :donut + :html-inline \"

behold the donut

{{chart}}
\"}) +")) + +(comment + (preview-html {:chart :donut :html-inline " +
+ {{chart}} + + + + + + + +
500600
MarchApril
+
"})) diff --git a/docs/README.md b/docs/README.md index ab659b9f7d0b..c222bdede88b 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,5 +1,6 @@ -## Getting Help +# Metabase documentation and resources +<<<<<<< HEAD ### [Troubleshooting Guide](troubleshooting-guide/index.md) Problems, their causes, how to detect them, and how to fix them. @@ -41,3 +42,106 @@ This guide lists existing community drivers, and shows how to get started with d ### [Anonymous Information Collection Reference](information-collection.md) This page describes the anonymous usage information we collect (only if you opt-in), why we collect it, and how we use it to improve Metabase. +======= +## Tutorials + +### [Learn Metabase][learn] + +Learn how to set Metabase up, build out your analytics, organize things and set permissions, and how to do data ops right. + +## Getting help + +### [Troubleshooting Guide][troubleshooting] + +Problems, their causes, how to detect them, and how to fix them. + +### [Metabase forum][forum] + +A place to get help on installation, setting up as well as sharing tips and tricks. + +### [FAQs][faq] + +Frequently asked questions about Metabase. + +## Metabase reference guides + +Documentation guides for the Metabase application. + +### [Users Guide][users-guide] + +How to ask questions, how to visualize answers, as well as how to share questions and create dashboards. + +### [Admin Guide][admin-guide] + +How to set up Metabase, configure common settings, manage accounts and permissions, and add databases. + +### [Operations Guide][operations-guide] + +Learn how to install Metabase for production use. The guide covers topics like SSL termination, deploying via Docker Containers vs. JAR files, as well as the tradeoffs involved. + +### [Enterprise Guide][enterprise] + +Here’s where to go for help using the features included in [Metabase Enterprise Edition][enterprise-landing]. + +## Metabase for developers + +### [Developers Guide][developers] + +Learn how to contribute to the Metabase open source project. + +### [Driver Development][drivers] + +This guide lists existing community drivers, and shows how to get started with driver development. + +### [Embedding reference apps][embedding-ref-apps] + +Code examples for embedding Metabase in applications. + +## Metabase community + +Connect with others using Metabase and catch up on the latest news. + +### [Metabase forum][forum] + +A place to get help on installation, setting up as well as sharing tips and tricks. + +### [Data stories][data-stories] + +Real stories about teams working and learning with data. You can also share your own stories. + +### [Case studies][case-studies] + +See how other organizations, big and small, have leveled up using Metabase. + +### [Blog][blog] + +Stay up to date on the latest from Metabase. + +### [Source code repository on GitHub][source-code] + +Metabase is open source: come on over and check out the code. + +## Reference + +### [Anonymous Information Collection Reference][info-collection] + +This page describes the anonymous usage information we collect (only if you opt-in), why we collect it, and how we use it to improve Metabase. + +[admin-guide]: administration-guide/start.md +[blog]: /blog +[case-studies]: https://www.metabase.com/case_studies/ +[embedding-ref-apps]: https://github.com/metabase/embedding-reference-apps +[enterprise]: enterprise-guide/start.md +[enterprise-landing]: /enterprise +[data-stories]: /community +[developers]: developers-guide/start.md +[drivers]: developers-guide-drivers.md +[faq]: faq/start.md +[forum]: https://discourse.metabase.com/ +[info-collection]: information-collection.md +[learn]: /learn +[operations-guide]: operations-guide/start.md +[source-code]: https://github.com/metabase/metabase +[troubleshooting]: troubleshooting-guide/index.md +[users-guide]: users-guide/start.md +>>>>>>> tags/v0.41.0 diff --git a/docs/administration-guide/04-managing-users.md b/docs/administration-guide/04-managing-users.md index 65f001ad1347..b727a604d164 100644 --- a/docs/administration-guide/04-managing-users.md +++ b/docs/administration-guide/04-managing-users.md @@ -36,6 +36,12 @@ Right now, the only special role someone can have is Admin. The only difference To make someone an admin, click on the Groups dropdown and click the check mark next to the Administrators group. +### Unsubscribe from all subscriptions / alerts + +This action will delete any dashboard subscriptions or alerts the person has created, and remove them as a recipient from any other subscriptions or alerts. + +This action doesn't affect email distribution lists that are managed outside of Metabase. + ### Adding people to Groups Adding people to groups allows you to assign [data access](05-setting-permissions.md) and [collection permissions](06-collections.md) to them. To add someone to one or more groups, just click the Groups dropdown and click the checkboxes next to the group(s) you want to add the person to. diff --git a/docs/administration-guide/05-setting-permissions.md b/docs/administration-guide/05-setting-permissions.md index 0951d2e1ebef..d4dccd99f505 100644 --- a/docs/administration-guide/05-setting-permissions.md +++ b/docs/administration-guide/05-setting-permissions.md @@ -1,89 +1,80 @@ -## Setting Data Access Permissions +# Permissions overview There are always going to be sensitive bits of information in your databases and tables, and thankfully Metabase provides a simple way to ensure that people on your team only see the data they’re supposed to. -### How permissions work in Metabase +## How permissions work in Metabase -Metabase uses a group-based approach to set permissions and restrictions on your databases and tables. At a high level, to set up permissions in your Metabase instance you’ll need to create one or more groups, add members to those groups, and then choose what level of database and SQL access those groups should have. +Metabase uses a group-based approach to set permissions. At a high-level, you can set permissions on two things: data and collections. **Data permissions** are about defining what raw data groups are allowed to use when creating new questions (i.e., self-service analytics). **Collection permissions** determine what existing dashboards and questions groups can see. On some plans, you can also sandbox data, which "filters" what data people can see when they view a particular questions, such as limiting the rows or columns they can see. -A user can be a member of multiple groups, and if one of the groups they’re in has access to a particular database or table, but another group they’re a member of does not, then they **will** have access to that database. +You can set permissions on: -In addition to setting permissions on your databases and tables, you can also [set access permissions on the collections](06-collections.md) where your dashboards, questions, and pulses are saved. Collection permissions can be set and edited from the collection itself, or the Admin Panel. +- [Databases connected to Metabase][data-permissions] +- [Tables and schemas in those databases][table-permissions] +- [Rows and columns of a table][data-sandboxing] (only on some plans) +- [Collections of questions and dashboards][collections] -### Groups +For plans that include [SQL Snippet Folders][sql-snippet-folders], you can also set permissions on those folders. -To view and manage your groups, go to the Admin Panel, click on the People section, and then click on Groups from the side menu. +To determine who has access to what, you’ll need to create one or more groups, choose which level of access that group has to different databases, collections, and so on, then add people to that group. -![Groups](images/groups.png) - -#### Special default groups - -You’ll notice that you already have two default groups: Administrators and All Users. These are special groups that can’t be removed. +### Key points regarding permissions -You’ll also see that you’re a member of the **Administrators** group — that’s why you were able to go to the Admin Panel in the first place. So, to make someone an admin of Metabase you just need to add them to this group. Metabase admins can log into the Admin Panel and make changes there, and they always have unrestricted access to all data that you have in your Metabase instance. So be careful who you add to the Administrator group! - -The **All Users** group is another special one. Every Metabase user is always a member of this group, though they can also be a member of as many other groups as you want. We recommend using the All Users group as a way to set default access levels for new Metabase users. If you have [Google single sign-on](10-single-sign-on.md) enabled, new users who join that way will be automatically added to the All Users group. +Some key things to keep in mind when thinking about permissions in Metabase: -#### An important note on the All Users group +- Permissions are granted to groups, not people. +- People can be in more than one group. +- If a person is in multiple groups, they will have the most permissive access granted to them across all of their groups. For example, if they are part of three groups, and two of those groups don't have permissions to a database, but the third group they're in can query that database, then that person will have access to that database. -As we mentioned above, a user is given the _most permissive_ setting she has for a given database/schema/table across _all_ groups she is in. Because of that, it is important that your All Users group should never have _greater_ access for an item than a group for which you're trying to restrict access — otherwise the more permissive setting will win out. This goes for both data access as well as [collection permission](06-collections.md) settings. +## Groups -If you’ve set up the [Slack integration](09-setting-up-slack.md) and enabled [Metabot](../users-guide/11-metabot.md), you’ll also see a special **Metabot** group, which will allow you to restrict which questions your users will be able to access in Slack via Metabot. - -#### Managing groups - -From the Groups section, click the `Add a group` button to create a new group. We recommend creating groups that correspond to the teams your company or organization has, such as Human Resources, Engineering, Finance, etc. Click the X icon to the right of a group in the list to remove it (remember, you can’t remove the special default groups). By default, newly created groups don’t have access to anything. - -Click into a group and then click `Add members` to add users to that group. Click on the X on the right side of a group member to remove them from that group. You can also add and remove users from groups from the People list using the dropdown in the Groups column. +To view and manage your groups, go to the __Admin Panel__ > __People__, and then click on __Groups__ from the side menu. -### Permissions view - -Now that you have some groups, you’ll want to control their data access by going to the `Permissions` section of the Admin Panel. You’ll see an interactive table that displays all of your databases and all of your groups, and the level of access your groups have for each database. - -![Permissions view](images/permissions.png) - -You can click on any cell in the table to change a group’s access level. When you’re done making your changes, just click the `save changes` button in the top-right, and you’ll see a confirmation dialog summarizing the changes. - -![Changing access level](images/change-access.png) +![Groups](images/groups.png) -At the database level, there are two different kinds of access you can set: data access, and SQL (or native query) access. +### Special default groups -#### Data access +Every Metabase has two default groups: Administrators and All Users. These are special groups that can’t be removed. -- **Unrestricted access:** can access data from all tables (within all namespaces/schemas, if your database uses those), including any tables that might get added to this database in the future. -- **Limited access:** can only access the tables that you explicitly select within namespaces/schemas you explicitly select. If a new table gets added to this database in the future, access to it will not automatically be given. Saved questions based on tables the user doesn’t have access to won’t show up in the list of saved questions, dashboard cards based on those questions won’t appear, and they won’t be able to ask new questions about those tables. If every card on a dashboard is hidden for a user, then that dashboard won’t be shown to them in the dashboard list. -- **No access:** can’t see anything based on data contained in this database. Won’t see saved questions based on tables contained in this database, and won’t be able to ask new questions about those tables. +#### Administrators -#### SQL (or native query) access +You’re a member of the **Administrators** group — that’s why you were able to go to the Admin Panel in the first place. To make someone an admin of Metabase, you just need to add them to this group. Metabase admins can log into the Admin Panel and make changes there, and they always have unrestricted access to all data that you have in your Metabase instance. So be careful who you add to the Administrator group! -- **Write raw queries:** can write new SQL/native queries using the SQL editor. This access level requires the group to additionally have Unrestricted data access for the database in question, since SQL queries can circumvent table-level permissions. -- **No access**: can't view, write, or edit SQL/native queries. Users will still be able to view the results of questions created from SQL/native queries, but not the code itself. They also won't see the "View the SQL" button when composing custom questions in the notebook editor. +#### All users -If you select `Limit access` for one of your databases, your view will change to show the contents of that database. If the database utilizes namespaces or schemas, you’ll see a list of all the schemas in the database, and the level of data access each group has for them. Similarly, if you select `Limit access` on one of your schemas, you’ll drill down a level and see all the tables within it. From these views, you can navigate back by using the breadcrumb links in the top-left, and you can always drill down into a database or schema using the link under its name in the left column. +The **All Users** group is another special one. Every Metabase user is always a member of this group, though they can also be a member of as many other groups as you want. We recommend using the All Users group as a way to set default access levels for new Metabase users. If you have [Google single sign-on](10-single-sign-on.md) enabled, new users who join that way will be automatically added to the All Users group. -![Table permissions](images/table-permissions.png) +As we mentioned above, a person is given the _most permissive_ setting she has for a given database/schema/table across _all_ groups she's in. Because of that, it's important that your All Users group should never have _greater_ access for an item than a group for which you're trying to restrict access — otherwise the more permissive setting will win out. This goes for both data access as well as [collection permission](06-collections.md) settings. -Data access levels for schemas follows the same pattern as for databases: +### Managing groups -- **Unrestricted access:** can access all tables in this schema, including any tables that might get added in the future. -- **Limited access:** can only access the tables that you explicitly select. -- **No access:** can’t access any tables in this schema. +#### Creating a group and adding people to it -Lastly, data access levels for tables are almost exactly the same as well: +From the Admin > Groups tab, click the **Add a group** button to create a new group. We recommend creating groups that correspond to the teams your company or organization has, such as Human Resources, Engineering, Finance, and so on. By default, newly created groups don’t have access to anything. -- **Unrestricted access:** can ask questions about this table and see saved questions and dashboard cards using this table. -- **No access:** can’t ask questions about this table or see saved questions or dashboard cards using this table. +Click into a group and then click `Add members` to add users to that group. Click on the X on the right side of a group member to remove them from that group. You can also add and remove users from groups from the People list using the dropdown in the Groups column. -_Note: you’ll notice that tables don’t have the option for limited access. If you need to set column-level or row-level data permissions, check out the [data sandboxing](https://www.metabase.com/docs/latest/enterprise-guide/data-sandboxes.html) feature of the [Enterprise Edition](https://www.metabase.com/enterprise/)._ +#### Removing a group -For more, check out our [Guide to data permissions](https://www.metabase.com/learn/organization/organization/data-permissions.html). +To remove a group, click the X icon to the right of a group in the list to remove it (remember, you can’t remove the special default groups). -### A note about Pulses +## Further reading -Pulses act a bit differently with regard to permissions. When a user creates a new Pulse, they will only have the option to include saved questions that they have permission to view. Note, however, that they are not prevented from emailing that Pulse to anyone, or posting that Pulse to a Slack channel (if you have Slack integration set up), regardless of the recipients’ permissions. Unlike dashboards, where individual cards are blocked based on a user’s permissions, a Pulse will always render all of its cards. +Checkout our track on [Permissions][permissions] in Learn Metabase. --- -## Next: collections - -Metabase lets you create and set permissions on collections of dashboards and questions. [Learn how](06-collections.md). +## Next: Data permissions + +Metabase lets you [set permissions on databases and their tables][data-permissions]. + +[collections]: 06-collections.md +[dashboard-subscriptions]: ../users-guide/dashboard-subscriptions.md +[data-permissions]: data-permissions.md +[pulses]: ../users-guide/10-pulses.md +[data-sandboxing]: ../enterprise-guide/data-sandboxes.md +[permissions]: /learn/permissions/ +[sandbox-columns]: /learn/permissions/data-sandboxing-column-permissions.html +[sandbox-rows]: /learn/permissions/data-sandboxing-row-permissions.html +[slack-integration]: 09-setting-up-slack.md +[sql-snippet-folders]: ../enterprise-guide/sql-snippets.md +[table-permissions]: data-permissions.md#table-permissions \ No newline at end of file diff --git a/docs/administration-guide/06-collections.md b/docs/administration-guide/06-collections.md index 6ef496f359a3..7f62d12eaf32 100644 --- a/docs/administration-guide/06-collections.md +++ b/docs/administration-guide/06-collections.md @@ -1,14 +1,15 @@ -## Creating Collections for Your Saved Questions +# Collection permissions ![Collection detail](images/collections/collection-detail.png) Collections are a great way to organize your dashboards, saved questions, and pulses, and to decide who gets to see and edit things. Collections could be things like, "Important Metrics," "Product Team," "Marketing KPIs," or "Questions about users." Collections can even contain other collections, allowing you to create an organizational structure that fits your team. You can also choose which user groups should have what level of access to your collections (more on that below). -Metabase starts out with a default top-level collection which is called "Our analytics," which every other collection is saved inside of. +Metabase starts out with a default top-level collection which is called __Our analytics__, which every other collection is saved inside of. This page will teach you how to create and manage your collections. For more information on organizing saved questions and using collections, [check out this section of the User's Guide](../users-guide/06-sharing-answers.md). ### Creating and editing collections + If a user has Curate access for a collection, they can create new sub-collections inside it and edit the contents of the collection. From the detail view of any collection, click on the `Create a collection` button to make a new one. Give your collection a name, choose where it should live, and give it a description if you'd like. ![Create collection](images/collections/create-collection.png) @@ -16,6 +17,7 @@ If a user has Curate access for a collection, they can create new sub-collection By default, new collections will have the same permissions settings as the collection it was created in (its "parent" collection), but you can change those settings from the Edit menu. ### Pinning things in collections + ![Pins](images/collections/pinned-items.png) One great feature in Metabase is that you can pin the most important couple of items in each of your collections to the top. Pinning an item in a collection turns it into a big, eye-catching card that will help make sure that folks who are browsing your Metabase instance will always know what's most important. @@ -23,10 +25,11 @@ One great feature in Metabase is that you can pin the most important couple of i Any user with curate permissions for a collection can pin items in it, making it easy to delegate curation responsibilities to other members of your team. To pin something, you can either click and drag it to the top of the page, or click on its menu and choose the pin action. (Note that collections themselves can't be pinned.) ### Setting permissions for collections + Collection permissions are similar to [data access permissions](05-setting-permissions.md). Rather than going to the Admin Panel, you set permissions on collections by clicking on the lock icon in the top-right of the screen while viewing the collection and clicking on `Edit permissions`. Only Administrators can edit collection permissions. Each [user group](05-setting-permissions.md) can have either View, Curate, or No access to a collection: - **Curate access:** the user can edit, move, and archive items saved in this collection, and can save or move new items into it. They can also create new sub-collections within this collection. In order to archive a sub-collection within this collection, they'll need to have Curate access for it and any and all collections within it. -- **View access:** the user can see all the questions, dashboards, and pulses in the collection. If the user does not have permission to view some or all of the questions included in a given dashboard or pulse then those questions will not be visible to them; but any questions that are saved in this collection *will* be visible to them, *even if the user doesn't have access to the underlying data used to in the question.* +- **View access:** the user can see all the questions, dashboards, and pulses in the collection. If the user does not have permission to view some or all of the questions included in a given dashboard or pulse then those questions will not be visible to them; but any questions that are saved in this collection _will_ be visible to them, _even if the user doesn't have access to the underlying data used to in the question._ - **No access:** the user won't see this collection listed, and doesn't have access to any of the items saved within it. ![Permissions](images/collections/collection-permissions.png) @@ -35,12 +38,13 @@ If you want to see the bigger picture of what permissions your user groups have ![Full permissions grid](images/collections/permission-grid.png) -Just like with data access permissions, collection permissions are *additive*, meaning that if a user belongs to more than one group, if one of their groups has a more restrictive setting for a collection than another one of their groups, they'll be given the *more permissive* setting. This is especially important to remember when dealing with the All Users group: since all users are members of this group, if you give the All Users group Curate access to a collection, then *all* users will be given Curate access for that collection, even if they also belong to a group with *less* access than that. +Just like with data access permissions, collection permissions are _additive_, meaning that if a user belongs to more than one group, if one of their groups has a more restrictive setting for a collection than another one of their groups, they'll be given the _more permissive_ setting. This is especially important to remember when dealing with the All Users group: since all users are members of this group, if you give the All Users group Curate access to a collection, then _all_ users will be given Curate access for that collection, even if they also belong to a group with _less_ access than that. ### Permissions and sub-collections -One nuance with how collections permissions work has to do with sub-collections. A user group can be given access to a collection located somewhere within one or more sub-collections *without* having to have access to every collection "above" it. E.g., if a user group had access to the "Super Secret Collection" that's saved several layers deep within a "Marketing" collection that the group does *not* have access to, the "Super Secret Collection" would show up at the top-most level that the group *does* have access to. -To learn more, check out our Learn article on [working with collection permissions](https://www.metabase.com/learn/organization/organization/collection-permissions.html). +One nuance with how collections permissions work has to do with sub-collections. A user group can be given access to a collection located somewhere within one or more sub-collections _without_ having to have access to every collection "above" it. E.g., if a user group had access to the "Super Secret Collection" that's saved several layers deep within a "Marketing" collection that the group does _not_ have access to, the "Super Secret Collection" would show up at the top-most level that the group _does_ have access to. + +To learn more, check out our Learn article on [working with collection permissions][working-with-collection-permissions]. ### Personal collections @@ -49,13 +53,38 @@ Each user has a personal collection where they're always allowed to save things, A personal collection works just like any other collection except that its permissions can't be changed. If a sub-collection within a personal collection is moved to a different collection, it will inherit the permissions of that collection. ### Archiving collections + Users with curate permission for a collection can archive collections. Click the edit icon in the top-right of the collection screen and select `Archive this collection` to archive it. This will also archive all questions, dashboards, pulses, and all other sub-collections and their contents. Importantly, this will also remove any archived questions from all dashboards and Pulses that use them. **Note:** the "Our analytics" collection and personal collections can't be archived. -You can always *unarchive* things by clicking on the More menu from a collection and selecting `View the archive`, then clicking the un-archive button next to an archived item. Questions within archived collections are not individually listed in the archive, so if you want to unarchive a specific question from an archived collection, you have to unarchive that whole collection. +You can always _unarchive_ items. In the Collections list sidebar, at the bottom, click on __View archive__. Search for the item you'd like to unarchive (you'll either need to scroll down the page, or use the browser's find in page functionality, as archived items won't appear in Metabase's search results). Select the open box with an up arrow icon to "Unarchive this". + +## Dashboard subscriptions + +You don't explicitly set permissions on [dashboards subscriptions][dashboard-subscriptions], as the subscriptions are a feature of a dashboard. And access to dashboards falls under Collection permissions. + +Here's what you can do with dashboard subscriptions based on Collection permissions for the collection the dashboard is in: + +- **Curate access**: You can view and edit all subscriptions for the dashboard, including subscriptions created by other people. +- **View access**: You can view all subscriptions for that dashboard. You can also create subscriptions and edit ones that you’ve created, but you can’t edit ones that other people created. You can also unsubscribe from a subscription that somebody else created. +- **No access**: You can’t view any of the dashboard's subscriptions, including, for example, subscriptions you created before an administrator revoked your access to the collection. + +### Metabot group + +If you’ve set up the [Slack integration][slack-integration] and enabled Metabot, you’ll also see a special Metabot group when assigning permissions to collections, which will allow you to restrict which questions your users will be able to access in Slack via Metabot. + +## A note about Pulses + +If you're using [Pulses][pulses], we recommend switching to [dashboard subscriptions][dashboard-subscriptions]. + +Pulses act a bit differently with regard to permissions. When a user creates a new Pulse, they will only have the option to include saved questions that they have permission to view. Note, however, that they are not prevented from emailing that Pulse to anyone, or posting that Pulse to a Slack channel (if you have Slack integration set up), regardless of the recipients’ permissions. Unlike dashboards, where individual cards are blocked based on a user’s permissions, a Pulse will always render all of its cards. --- ## Next: sharing and embedding with public links + Want to share certain dashboards or questions with the world? You can do that with [public links](12-public-links.md). + + +[working-with-collection-permissisons]: /learn/permissions/collection-permissions.html \ No newline at end of file diff --git a/docs/administration-guide/data-permissions.md b/docs/administration-guide/data-permissions.md new file mode 100644 index 000000000000..0201791a5d37 --- /dev/null +++ b/docs/administration-guide/data-permissions.md @@ -0,0 +1,92 @@ +# Data permissions + +This page covers permissions for databases and tables. If you haven't already, check out our [Permissions overview][permissons-overview]. + +## Permissions view + +Now that you have some groups, you’ll want to control their data access by going to the **Permissions** section of the Admin Panel. You’ll see an interactive table that displays all of your databases and all of your groups, and the level of access your groups have for each database. + +![Permissions view](images/permissions.png) + +You can click on any cell in the table to change a group’s access level. When you’re done making your changes, just click the `save changes` button in the top-right, and you’ll see a confirmation dialog summarizing the changes. + +### Unrestricted access + +Members of the group can access data from all tables (within all namespaces/schemas, if your database uses those), including any tables that might get added to this database in the future. + +### Granular access + +__Granular access__ allows administrators to explicitly set access to tables or schemas within a database. In practice, this means that: + +- Admins can set the groups access to individual tables to either __Unrestricted__, __No self-service__, or __Sandboxed__ access. +- If a new table gets added to this database in the future, the group won't get access to that new table. An administrator would need to explicitly grant access to that table. + +### No self-service access + +__No self-service__ prevents people in a group from creating new ad hoc queries or questions based on this data, or from seeing this data in the Browse Data screen. Groups with this level of access can still see saved questions and charts based on this data in Collections they have access to. + +### Block + +{% include plans-blockquote.html %} + +__Block__ ensures people can’t ever see the data from this database, regardless of their permissions at the Collection level. So if they want to see a question in a collection that have access to, but that question uses data from a database that's been blocked for that person's group, then they won't be able to see that question. + +Keep in mind people can be in multiple groups. If a person belongs to _another_ group that _does_ have access to that database, that more privileged access will take precedence (overruling the block), and they'll be able to view that question. + +### Native query editing + +Members of a group with native query editing set to Yes can write new SQL/native queries using the native query editor. This access level requires the group to additionally have Unrestricted data access for the database in question, since SQL queries can circumvent table-level permissions. +Members in groups without native query editing access can't view, write, or edit SQL/native queries. People who are not in groups with native query editing permissions will still be able to view the results of questions created from SQL/native queries, but not the code itself. They also won't see the "View the SQL" button when composing custom questions in the notebook editor. + +## Table permissions + +When you select [Granular access](#granular-access) for a database, you'll be prompted to set permissions on the tables (or schemas) within that database. Here you'll have two or three options, depending on your Metabase plan. + +### Unrestricted access to the table + +Groups with unrestricted access can ask questions about this table and see saved questions and dashboard cards that use the table. + +### No self-service access to the table + +Groups with no self-service access to a table can’t access the table at all. They can, however, view questions that use data from that table, provided the group has access to the question's collection. + +### Sandboxed access to the table + +Only available in paid plans, Sandboxed access to a table can restrict access to columns and rows of a table. Check out [data sandboxing][data-sandboxing]. + +## Permissions and dashboard subscriptions + +You don't explicitly set permissions on [dashboards subscriptions][dashboard-subscriptions], as the subscriptions are a feature of a dashboard. Which means that What you can do j + +If a person is in a group that has __Curate access__ to the collection containing the dashboard, they can view and edit all subscriptions for the dashboard, including subscriptions created by other people. +If a group has read-only access to a dashboard (based on its collection permissions), they can view all subscriptions for that dashboard. They can also create subscriptions and edit ones that they’ve created, but they can’t edit ones that other users created. (That last point is enforced by the BE only, the FE still needs to be updated to show the subscriptions as read-only.) +If a group has no access to a dashboard, they can’t view any of its subscriptions, including ones that they may have created in the past, prior to having access revoked. + +If you have read-only access to a dashboard, you can also unsubscribe yourself from a subscription that somebody else created via the new page in account settings. + +## A note about Pulses + +If you're using [Pulses][pulses], we recommend switching to [dashboard subscriptions][dashboard-subscriptions]. + +Pulses act a bit differently with regard to permissions. When someone creates a new Pulse, they will only have the option to include saved questions that they have permission to view. Note, however, that they are not prevented from emailing that Pulse to anyone, or posting that Pulse to a Slack channel (if you have Slack integration set up), regardless of the recipients’ permissions. Unlike dashboards, where individual cards are blocked based on a person's permissions, a Pulse will always render all of its cards. + +## Further reading + +- [Guide to data permissions](https://www.metabase.com/learn/organization/organization/data-permissions.html). +- [Data sandboxing: setting row-level permissions][sandbox-rows] +- [Advanced data sandboxing: limiting access to columns][sandbox-columns] + +--- + +## Next: Collection permissions + +Metabase lets you create and set permissions on collections of dashboards and questions. [Learn how][collections]. + +[collections]: 06-collections.md +[dashboard-subscriptions]: ../users-guide/dashboard-subscriptions.md +[data-sandboxing]: ../enterprise-guide/data-sandboxes.md +[permissions-overview]: 05-setting-permissions.md +[pulses]: ../users-guide/10-pulses.md +[sandbox-columns]: /learn/permissions/data-sandboxing-column-permissions.html +[sandbox-rows]: /learn/permissions/data-sandboxing-row-permissions.html +[sql-snippet-folders]: ../enterprise-guide/sql-snippets.md diff --git a/docs/administration-guide/images/change-access.png b/docs/administration-guide/images/change-access.png deleted file mode 100644 index 11f766fc7ea9..000000000000 Binary files a/docs/administration-guide/images/change-access.png and /dev/null differ diff --git a/docs/administration-guide/images/permissions.png b/docs/administration-guide/images/permissions.png index 83ad3dcd39d2..344b3fc13764 100644 Binary files a/docs/administration-guide/images/permissions.png and b/docs/administration-guide/images/permissions.png differ diff --git a/docs/administration-guide/images/table-permissions.png b/docs/administration-guide/images/table-permissions.png deleted file mode 100644 index a96d8750217d..000000000000 Binary files a/docs/administration-guide/images/table-permissions.png and /dev/null differ diff --git a/docs/api-documentation.md b/docs/api-documentation.md index d84047117e17..fd15a285f712 100644 --- a/docs/api-documentation.md +++ b/docs/api-documentation.md @@ -2,46 +2,72 @@ _This file was generated from source comments by `clojure -M:run api-documentation`_. -Check out an introduction to the [Metabase API](https://www.metabase.com/learn/developing-applications/advanced-metabase/metabase-api.html). +Check out an introduction to the [Metabase API](https://www.metabase.com/learn/administration/metabase-api.html). -## `GET /api/activity/` -Get recent activity. +## Activity + + - [GET /api/activity/](#get-apiactivity) + - [GET /api/activity/recent_views](#get-apiactivityrecent_views) + +### `GET /api/activity/` +Get recent activity. -## `GET /api/activity/recent_views` +### `GET /api/activity/recent_views` Get the list of 10 things the current user has been viewing most recently. -## `DELETE /api/alert/:id` +## Alert + +/api/alert endpoints. + + - [DELETE /api/alert/:id](#delete-apialertid) + - [GET /api/alert/](#get-apialert) + - [GET /api/alert/:id](#get-apialertid) + - [GET /api/alert/question/:id](#get-apialertquestionid) + - [POST /api/alert/](#post-apialert) + - [PUT /api/alert/:id](#put-apialertid) + - [PUT /api/alert/:id/unsubscribe](#put-apialertidunsubscribe) -Delete an Alert. (DEPRECATED -- don't delete a Alert anymore -- archive it instead.) +### `DELETE /api/alert/:id` + +Delete an Alert. (DEPRECATED -- don't delete a Alert anymore -- archive it instead.). ##### PARAMS: * **`id`** +### `GET /api/alert/` -## `GET /api/alert/` - -Fetch all alerts +Fetch all alerts. ##### PARAMS: * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). +* **`user_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `GET /api/alert/question/:id` +### `GET /api/alert/:id` -Fetch all questions for the given question (`Card`) id +Fetch an alert by ID. ##### PARAMS: * **`id`** +### `GET /api/alert/question/:id` + +Fetch all questions for the given question (`Card`) id. + +##### PARAMS: + +* **`id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). -## `POST /api/alert/` +### `POST /api/alert/` Create a new Alert. @@ -59,8 +85,7 @@ Create a new Alert. * **`new-alert-request-body`** - -## `PUT /api/alert/:id` +### `PUT /api/alert/:id` Update a `Alert` with ID. @@ -82,17 +107,28 @@ Update a `Alert` with ID. * **`alert-updates`** +### `PUT /api/alert/:id/unsubscribe` -## `PUT /api/alert/:id/unsubscribe` - -Unsubscribes a user from the given alert +Unsubscribes a user from the given alert. ##### PARAMS: * **`id`** -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query` +## Automagic dashboards + + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-querycomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-queryruleprefixrule) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-queryruleprefixrulecomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-querycomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule](#get-apiautomagic-dashboardsentityentity-id-or-queryruleprefixrule) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-queryruleprefixrulecomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/database/:id/candidates](#get-apiautomagic-dashboardsdatabaseidcandidates) + +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query` Return an automagic dashboard for entity `entity` with id `ìd`. @@ -104,8 +140,7 @@ Return an automagic dashboard for entity `entity` with id `ìd`. * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query` Return an automagic dashboard analyzing cell in automagic dashboard for entity `entity` defined by @@ -121,8 +156,7 @@ Return an automagic dashboard analyzing cell in automagic dashboard for entity * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for cell in automagic dashboard for entity `entity` with id `ìd` defined by query `cell-querry`; compared with entity `comparison-entity` with id @@ -142,8 +176,7 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule` Return an automagic dashboard analyzing cell in question with id `id` defined by query `cell-querry` using rule `rule`. @@ -162,8 +195,7 @@ Return an automagic dashboard analyzing cell in question with id `id` defined b * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for cell in automagic dashboard for entity `entity` with id `ìd` defined by query `cell-querry` using rule `rule`; compared with entity @@ -187,11 +219,10 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for entity `entity` with id `ìd` compared with entity - `comparison-entity` with id `comparison-entity-id-or-query.` + `comparison-entity` with id `comparison-entity-id-or-query.`. ##### PARAMS: @@ -205,8 +236,7 @@ Return an automagic comparison dashboard for entity `entity` with id `ìd` compa * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule` Return an automagic dashboard for entity `entity` with id `ìd` using rule `rule`. @@ -222,8 +252,7 @@ Return an automagic dashboard for entity `entity` with id `ìd` using rule `rule * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for entity `entity` with id `ìd` using rule `rule`; compared with entity `comparison-entity` with id `comparison-entity-id-or-query.`. @@ -244,8 +273,7 @@ Return an automagic comparison dashboard for entity `entity` with id `ìd` using * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/database/:id/candidates` +### `GET /api/automagic-dashboards/database/:id/candidates` Return a list of candidates for automagic dashboards orderd by interestingness. @@ -254,7 +282,29 @@ Return a list of candidates for automagic dashboards orderd by interestingness. * **`id`** -## `DELETE /api/card/:card-id/favorite` +## Card + +/api/card endpoints. + + - [DELETE /api/card/:card-id/favorite](#delete-apicardcard-idfavorite) + - [DELETE /api/card/:card-id/public_link](#delete-apicardcard-idpublic_link) + - [DELETE /api/card/:id](#delete-apicardid) + - [GET /api/card/](#get-apicard) + - [GET /api/card/:id](#get-apicardid) + - [GET /api/card/:id/related](#get-apicardidrelated) + - [GET /api/card/embeddable](#get-apicardembeddable) + - [GET /api/card/public](#get-apicardpublic) + - [POST /api/card/](#post-apicard) + - [POST /api/card/:card-id/favorite](#post-apicardcard-idfavorite) + - [POST /api/card/:card-id/public_link](#post-apicardcard-idpublic_link) + - [POST /api/card/:card-id/query](#post-apicardcard-idquery) + - [POST /api/card/:card-id/query/:export-format](#post-apicardcard-idqueryexport-format) + - [POST /api/card/collections](#post-apicardcollections) + - [POST /api/card/pivot/:card-id/query](#post-apicardpivotcard-idquery) + - [POST /api/card/related](#post-apicardrelated) + - [PUT /api/card/:id](#put-apicardid) + +### `DELETE /api/card/:card-id/favorite` Unfavorite a Card. @@ -262,8 +312,7 @@ Unfavorite a Card. * **`card-id`** - -## `DELETE /api/card/:card-id/public_link` +### `DELETE /api/card/:card-id/public_link` Delete the publicly-accessible link to this Card. @@ -273,21 +322,19 @@ You must be a superuser to do this. * **`card-id`** +### `DELETE /api/card/:id` -## `DELETE /api/card/:id` - -Delete a Card. (DEPRECATED -- don't delete a Card anymore -- archive it instead.) +Delete a Card. (DEPRECATED -- don't delete a Card anymore -- archive it instead.). ##### PARAMS: * **`id`** - -## `GET /api/card/` +### `GET /api/card/` Get all the Cards. Option filter param `f` can be used to change the set of Cards that are returned; default is `all`, but other options include `mine`, `fav`, `database`, `table`, `recent`, `popular`, and `archived`. See - corresponding implementation functions above for the specific behavior of each filter option. :card_index: + corresponding implementation functions above for the specific behavior of each filter option. :card_index:. ##### PARAMS: @@ -295,8 +342,7 @@ Get all the Cards. Option filter param `f` can be used to change the set of Card * **`model_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `GET /api/card/:id` +### `GET /api/card/:id` Get `Card` with ID. @@ -304,8 +350,7 @@ Get `Card` with ID. * **`id`** - -## `GET /api/card/:id/related` +### `GET /api/card/:id/related` Return related entities. @@ -313,23 +358,20 @@ Return related entities. * **`id`** - -## `GET /api/card/embeddable` +### `GET /api/card/embeddable` Fetch a list of Cards where `enable_embedding` is `true`. The cards can be embedded using the embedding endpoints and a signed JWT. You must be a superuser to do this. - -## `GET /api/card/public` +### `GET /api/card/public` Fetch a list of Cards with public UUIDs. These cards are publicly-accessible *if* public sharing is enabled. You must be a superuser to do this. - -## `POST /api/card/` +### `POST /api/card/` Create a new `Card`. @@ -349,12 +391,13 @@ Create a new `Card`. * **`name`** value must be a non-blank string. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`dataset_query`** * **`display`** value must be a non-blank string. - -## `POST /api/card/:card-id/favorite` +### `POST /api/card/:card-id/favorite` Favorite a Card. @@ -362,8 +405,7 @@ Favorite a Card. * **`card-id`** - -## `POST /api/card/:card-id/public_link` +### `POST /api/card/:card-id/public_link` Generate publicly-accessible links for this Card. Returns UUID to be used in public links. (If this Card has already been shared, it will return the existing public link rather than creating a new one.) Public sharing must @@ -375,8 +417,7 @@ You must be a superuser to do this. * **`card-id`** - -## `POST /api/card/:card-id/query` +### `POST /api/card/:card-id/query` Run the query associated with a Card. @@ -388,11 +429,12 @@ Run the query associated with a Card. * **`ignore_cache`** value may be nil, or if non-nil, value must be a boolean. +* **`dashboard_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `POST /api/card/:card-id/query/:export-format` +### `POST /api/card/:card-id/query/:export-format` Run the query associated with a Card, and return its results as a file in the specified format. Note that this - expects the parameters as serialized JSON in the 'parameters' parameter + expects the parameters as serialized JSON in the 'parameters' parameter. ##### PARAMS: @@ -402,8 +444,7 @@ Run the query associated with a Card, and return its results as a file in the sp * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `POST /api/card/collections` +### `POST /api/card/collections` Bulk update endpoint for Card Collections. Move a set of `Cards` with CARD_IDS into a `Collection` with COLLECTION_ID, or remove them from any Collections by passing a `null` COLLECTION_ID. @@ -414,8 +455,7 @@ Bulk update endpoint for Card Collections. Move a set of `Cards` with CARD_IDS i * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `POST /api/card/pivot/:card-id/query` +### `POST /api/card/pivot/:card-id/query` Run the query associated with a Card. @@ -427,8 +467,7 @@ Run the query associated with a Card. * **`ignore_cache`** value may be nil, or if non-nil, value must be a boolean. - -## `POST /api/card/related` +### `POST /api/card/related` Return related entities for an ad-hoc query. @@ -436,8 +475,7 @@ Return related entities for an ad-hoc query. * **`query`** - -## `PUT /api/card/:id` +### `PUT /api/card/:id` Update a `Card`. @@ -465,6 +503,8 @@ Update a `Card`. * **`embedding_params`** value may be nil, or if non-nil, value must be a valid embedding params map. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`dataset_query`** value may be nil, or if non-nil, value must be a map. * **`id`** @@ -472,7 +512,26 @@ Update a `Card`. * **`display`** value may be nil, or if non-nil, value must be a non-blank string. -## `GET /api/collection/` +## Collection + +`/api/collection` endpoints. By default, these endpoints operate on Collections in the 'default' namespace, which is + the one that has things like Dashboards and Cards. Other namespaces of Collections exist as well, such as the + `:snippet` namespace, (called 'Snippet folders' in the UI). These namespaces are completely independent hierarchies. + To use these endpoints for other Collections namespaces, you can pass the `?namespace=` parameter (e.g. + `?namespace=snippet`). + + - [GET /api/collection/](#get-apicollection) + - [GET /api/collection/:id](#get-apicollectionid) + - [GET /api/collection/:id/items](#get-apicollectioniditems) + - [GET /api/collection/graph](#get-apicollectiongraph) + - [GET /api/collection/root](#get-apicollectionroot) + - [GET /api/collection/root/items](#get-apicollectionrootitems) + - [GET /api/collection/tree](#get-apicollectiontree) + - [POST /api/collection/](#post-apicollection) + - [PUT /api/collection/:id](#put-apicollectionid) + - [PUT /api/collection/graph](#put-apicollectiongraph) + +### `GET /api/collection/` Fetch a list of all Collections that the current user has read permissions for (`:can_write` is returned as an additional property of each Collection so you can tell which of these you have write permissions for.) @@ -486,17 +545,15 @@ Fetch a list of all Collections that the current user has read permissions for ( * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. +### `GET /api/collection/:id` -## `GET /api/collection/:id` - -Fetch a specific Collection with standard details added +Fetch a specific Collection with standard details added. ##### PARAMS: * **`id`** - -## `GET /api/collection/:id/items` +### `GET /api/collection/:id/items` Fetch a specific Collection's items with the following options: @@ -504,7 +561,11 @@ Fetch a specific Collection's items with the following options: * `archived` - when `true`, return archived objects *instead* of unarchived ones. Defaults to `false`. * `pinned_state` - when `is_pinned`, return pinned objects only. when `is_not_pinned`, return non pinned objects only. +<<<<<<< HEAD when `all`, return everything. By default returns everything +======= + when `all`, return everything. By default returns everything. +>>>>>>> tags/v0.41.0 ##### PARAMS: @@ -515,13 +576,20 @@ Fetch a specific Collection's items with the following options: * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). * **`pinned_state`** value may be nil, or if non-nil, value must be one of: `all`, `is_not_pinned`, `is_pinned`. +<<<<<<< HEAD * **`sort_column`** value may be nil, or if non-nil, value must be one of: `last_edited_at`, `last_edited_by`, `model`, `name`. * **`sort_direction`** value may be nil, or if non-nil, value must be one of: `asc`, `desc`. +======= +>>>>>>> tags/v0.41.0 + +* **`sort_column`** value may be nil, or if non-nil, value must be one of: `last_edited_at`, `last_edited_by`, `model`, `name`. + +* **`sort_direction`** value may be nil, or if non-nil, value must be one of: `asc`, `desc`. -## `GET /api/collection/graph` +### `GET /api/collection/graph` Fetch a graph of all Collection Permissions. @@ -531,17 +599,15 @@ You must be a superuser to do this. * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. +### `GET /api/collection/root` -## `GET /api/collection/root` - -Return the 'Root' Collection object with standard details added +Return the 'Root' Collection object with standard details added. ##### PARAMS: * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. - -## `GET /api/collection/root/items` +### `GET /api/collection/root/items` Fetch objects that the current user should see at their root level. As mentioned elsewhere, the 'Root' Collection doesn't actually exist as a row in the application DB: it's simply a virtual Collection where things with no @@ -566,13 +632,20 @@ Fetch objects that the current user should see at their root level. As mentioned * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. * **`pinned_state`** value may be nil, or if non-nil, value must be one of: `all`, `is_not_pinned`, `is_pinned`. +<<<<<<< HEAD * **`sort_column`** value may be nil, or if non-nil, value must be one of: `last_edited_at`, `last_edited_by`, `model`, `name`. * **`sort_direction`** value may be nil, or if non-nil, value must be one of: `asc`, `desc`. +======= +>>>>>>> tags/v0.41.0 -## `GET /api/collection/tree` +* **`sort_column`** value may be nil, or if non-nil, value must be one of: `last_edited_at`, `last_edited_by`, `model`, `name`. + +* **`sort_direction`** value may be nil, or if non-nil, value must be one of: `asc`, `desc`. + +### `GET /api/collection/tree` Similar to `GET /`, but returns Collections in a tree structure, e.g. @@ -583,14 +656,19 @@ Similar to `GET /`, but returns Collections in a tree structure, e.g. :children [{:name "E"}]} {:name "F" :children [{:name "G"}]}]}]} - {:name "H"}] + {:name "H"}]. ##### PARAMS: +<<<<<<< HEAD * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. +======= +>>>>>>> tags/v0.41.0 + +* **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. -## `POST /api/collection/` +### `POST /api/collection/` Create a new Collection. @@ -607,9 +685,12 @@ Create a new Collection. * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. * **`authority_level`** value may be nil, or if non-nil, value must be one of: `official`. +<<<<<<< HEAD +======= +>>>>>>> tags/v0.41.0 -## `PUT /api/collection/:id` +### `PUT /api/collection/:id` Modify an existing Collection, including archiving or unarchiving it, or moving it. @@ -622,6 +703,7 @@ Modify an existing Collection, including archiving or unarchiving it, or moving * **`archived`** value may be nil, or if non-nil, value must be a boolean. * **`collection-updates`** +<<<<<<< HEAD * **`color`** value may be nil, or if non-nil, value must be a string that matches the regex `^#[0-9A-Fa-f]{6}$`. @@ -632,9 +714,20 @@ Modify an existing Collection, including archiving or unarchiving it, or moving * **`id`** * **`update_collection_tree_authority_level`** value may be nil, or if non-nil, value must be a boolean. +======= +>>>>>>> tags/v0.41.0 +* **`color`** value may be nil, or if non-nil, value must be a string that matches the regex `^#[0-9A-Fa-f]{6}$`. + +* **`name`** value may be nil, or if non-nil, value must be a non-blank string. -## `PUT /api/collection/graph` +* **`parent_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`id`** + +* **`update_collection_tree_authority_level`** value may be nil, or if non-nil, value must be a boolean. + +### `PUT /api/collection/graph` Do a batch update of Collections Permissions by passing in a modified graph. @@ -647,7 +740,35 @@ You must be a superuser to do this. * **`body`** value must be a map. -## `DELETE /api/dashboard/:dashboard-id/public_link` +## Dashboard + +/api/dashboard endpoints. + + - [DELETE /api/dashboard/:dashboard-id/public_link](#delete-apidashboarddashboard-idpublic_link) + - [DELETE /api/dashboard/:id](#delete-apidashboardid) + - [DELETE /api/dashboard/:id/cards](#delete-apidashboardidcards) + - [DELETE /api/dashboard/:id/favorite](#delete-apidashboardidfavorite) + - [GET /api/dashboard/](#get-apidashboard) + - [GET /api/dashboard/:id](#get-apidashboardid) + - [GET /api/dashboard/:id/params/:param-key/search/:query](#get-apidashboardidparamsparam-keysearchquery) + - [GET /api/dashboard/:id/params/:param-key/values](#get-apidashboardidparamsparam-keyvalues) + - [GET /api/dashboard/:id/related](#get-apidashboardidrelated) + - [GET /api/dashboard/:id/revisions](#get-apidashboardidrevisions) + - [GET /api/dashboard/embeddable](#get-apidashboardembeddable) + - [GET /api/dashboard/params/valid-filter-fields](#get-apidashboardparamsvalid-filter-fields) + - [GET /api/dashboard/public](#get-apidashboardpublic) + - [POST /api/dashboard/](#post-apidashboard) + - [POST /api/dashboard/:dashboard-id/public_link](#post-apidashboarddashboard-idpublic_link) + - [POST /api/dashboard/:from-dashboard-id/copy](#post-apidashboardfrom-dashboard-idcopy) + - [POST /api/dashboard/:id/cards](#post-apidashboardidcards) + - [POST /api/dashboard/:id/favorite](#post-apidashboardidfavorite) + - [POST /api/dashboard/:id/revert](#post-apidashboardidrevert) + - [POST /api/dashboard/save](#post-apidashboardsave) + - [POST /api/dashboard/save/collection/:parent-collection-id](#post-apidashboardsavecollectionparent-collection-id) + - [PUT /api/dashboard/:id](#put-apidashboardid) + - [PUT /api/dashboard/:id/cards](#put-apidashboardidcards) + +### `DELETE /api/dashboard/:dashboard-id/public_link` Delete the publicly-accessible link to this Dashboard. @@ -657,8 +778,7 @@ You must be a superuser to do this. * **`dashboard-id`** - -## `DELETE /api/dashboard/:id` +### `DELETE /api/dashboard/:id` Delete a Dashboard. @@ -666,8 +786,7 @@ Delete a Dashboard. * **`id`** - -## `DELETE /api/dashboard/:id/cards` +### `DELETE /api/dashboard/:id/cards` Remove a `DashboardCard` from a Dashboard. @@ -677,8 +796,7 @@ Remove a `DashboardCard` from a Dashboard. * **`dashcardId`** value must be a valid integer greater than zero. - -## `DELETE /api/dashboard/:id/favorite` +### `DELETE /api/dashboard/:id/favorite` Unfavorite a Dashboard. @@ -686,21 +804,19 @@ Unfavorite a Dashboard. * **`id`** - -## `GET /api/dashboard/` +### `GET /api/dashboard/` Get `Dashboards`. With filter option `f` (default `all`), restrict results as follows: * `all` - Return all Dashboards. * `mine` - Return Dashboards created by the current user. - * `archived` - Return Dashboards that have been archived. (By default, these are *excluded*.) + * `archived` - Return Dashboards that have been archived. (By default, these are *excluded*.). ##### PARAMS: * **`f`** value may be nil, or if non-nil, value must be one of: `all`, `archived`, `mine`. - -## `GET /api/dashboard/:id` +### `GET /api/dashboard/:id` Get Dashboard with ID. @@ -708,8 +824,7 @@ Get Dashboard with ID. * **`id`** - -## `GET /api/dashboard/:id/params/:param-key/search/:query` +### `GET /api/dashboard/:id/params/:param-key/search/:query` Fetch possible values of the parameter whose ID is `:param-key` that contain `:query`. Optionally restrict these values by passing query parameters like `other-parameter=value` e.g. @@ -730,14 +845,13 @@ Fetch possible values of the parameter whose ID is `:param-key` that contain `:q * **`query-params`** - -## `GET /api/dashboard/:id/params/:param-key/values` +### `GET /api/dashboard/:id/params/:param-key/values` Fetch possible values of the parameter whose ID is `:param-key`. Optionally restrict these values by passing query parameters like `other-parameter=value` e.g. ;; fetch values for Dashboard 1 parameter 'abc' that are possible when parameter 'def' is set to 100 - GET /api/dashboard/1/params/abc/values?def=100 + GET /api/dashboard/1/params/abc/values?def=100. ##### PARAMS: @@ -747,8 +861,7 @@ Fetch possible values of the parameter whose ID is `:param-key`. Optionally rest * **`query-params`** - -## `GET /api/dashboard/:id/related` +### `GET /api/dashboard/:id/related` Return related entities. @@ -756,8 +869,7 @@ Return related entities. * **`id`** - -## `GET /api/dashboard/:id/revisions` +### `GET /api/dashboard/:id/revisions` Fetch `Revisions` for Dashboard with ID. @@ -765,16 +877,14 @@ Fetch `Revisions` for Dashboard with ID. * **`id`** - -## `GET /api/dashboard/embeddable` +### `GET /api/dashboard/embeddable` Fetch a list of Dashboards where `enable_embedding` is `true`. The dashboards can be embedded using the embedding endpoints and a signed JWT. You must be a superuser to do this. - -## `GET /api/dashboard/params/valid-filter-fields` +### `GET /api/dashboard/params/valid-filter-fields` Utility endpoint for powering Dashboard UI. Given some set of `filtered` Field IDs (presumably Fields used in parameters) and a set of `filtering` Field IDs that will be used to restrict values of `filtered` Fields, for each @@ -795,7 +905,7 @@ Utility endpoint for powering Dashboard UI. Given some set of `filtered` Field I Results are returned as a map of - `filtered` Field ID -> subset of `filtering` Field IDs that would be used in chain filter query + `filtered` Field ID -> subset of `filtering` Field IDs that would be used in chain filter query. ##### PARAMS: @@ -803,16 +913,14 @@ Utility endpoint for powering Dashboard UI. Given some set of `filtered` Field I * **`filtering`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be a valid integer greater than zero. 2) value must be an array. Each value must be a valid integer greater than zero. The array cannot be empty. - -## `GET /api/dashboard/public` +### `GET /api/dashboard/public` Fetch a list of Dashboards with public UUIDs. These dashboards are publicly-accessible *if* public sharing is enabled. You must be a superuser to do this. - -## `POST /api/dashboard/` +### `POST /api/dashboard/` Create a new Dashboard. @@ -824,14 +932,15 @@ Create a new Dashboard. * **`parameters`** value must be an array. Each value must be a map. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. * **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. * **`dashboard`** - -## `POST /api/dashboard/:dashboard-id/public_link` +### `POST /api/dashboard/:dashboard-id/public_link` Generate publicly-accessible links for this Dashboard. Returns UUID to be used in public links. (If this Dashboard has already been shared, it will return the existing public link rather than creating a new one.) Public @@ -843,8 +952,7 @@ You must be a superuser to do this. * **`dashboard-id`** - -## `POST /api/dashboard/:from-dashboard-id/copy` +### `POST /api/dashboard/:from-dashboard-id/copy` Copy a Dashboard. @@ -862,8 +970,7 @@ Copy a Dashboard. * **`dashboard`** - -## `POST /api/dashboard/:id/cards` +### `POST /api/dashboard/:id/cards` Add a `Card` to a Dashboard. @@ -879,8 +986,7 @@ Add a `Card` to a Dashboard. * **`dashboard-card`** - -## `POST /api/dashboard/:id/favorite` +### `POST /api/dashboard/:id/favorite` Favorite a Dashboard. @@ -888,8 +994,7 @@ Favorite a Dashboard. * **`id`** - -## `POST /api/dashboard/:id/revert` +### `POST /api/dashboard/:id/revert` Revert a Dashboard to a prior `Revision`. @@ -899,8 +1004,7 @@ Revert a Dashboard to a prior `Revision`. * **`revision_id`** value must be an integer greater than zero. - -## `POST /api/dashboard/save` +### `POST /api/dashboard/save` Save a denormalized description of dashboard. @@ -908,8 +1012,7 @@ Save a denormalized description of dashboard. * **`dashboard`** - -## `POST /api/dashboard/save/collection/:parent-collection-id` +### `POST /api/dashboard/save/collection/:parent-collection-id` Save a denormalized description of dashboard into collection with ID `:parent-collection-id`. @@ -919,8 +1022,7 @@ Save a denormalized description of dashboard into collection with ID `:parent-co * **`dashboard`** - -## `PUT /api/dashboard/:id` +### `PUT /api/dashboard/:id` Update a Dashboard. @@ -954,12 +1056,13 @@ Update a Dashboard. * **`embedding_params`** value may be nil, or if non-nil, value must be a valid embedding params map. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`id`** * **`position`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `PUT /api/dashboard/:id/cards` +### `PUT /api/dashboard/:id/cards` Update `Cards` on a Dashboard. Request body should have the form: @@ -969,7 +1072,7 @@ Update `Cards` on a Dashboard. Request body should have the form: :row ... :col ... :series [{:id 123 - ...}]} ...]} + ...}]} ...]}. ##### PARAMS: @@ -978,7 +1081,34 @@ Update `Cards` on a Dashboard. Request body should have the form: * **`cards`** -## `DELETE /api/database/:id` +## Database + +/api/database endpoints. + + - [DELETE /api/database/:id](#delete-apidatabaseid) + - [GET /api/database/](#get-apidatabase) + - [GET /api/database/:id](#get-apidatabaseid) + - [GET /api/database/:id/autocomplete_suggestions](#get-apidatabaseidautocomplete_suggestions) + - [GET /api/database/:id/fields](#get-apidatabaseidfields) + - [GET /api/database/:id/idfields](#get-apidatabaseididfields) + - [GET /api/database/:id/metadata](#get-apidatabaseidmetadata) + - [GET /api/database/:id/schema/](#get-apidatabaseidschema) + - [GET /api/database/:id/schema/:schema](#get-apidatabaseidschemaschema) + - [GET /api/database/:id/schemas](#get-apidatabaseidschemas) + - [GET /api/database/:virtual-db/metadata](#get-apidatabasevirtual-dbmetadata) + - [GET /api/database/:virtual-db/schema/:schema](#get-apidatabasevirtual-dbschemaschema) + - [GET /api/database/:virtual-db/schemas](#get-apidatabasevirtual-dbschemas) + - [GET /api/database/db-ids-with-deprecated-drivers](#get-apidatabasedb-ids-with-deprecated-drivers) + - [POST /api/database/](#post-apidatabase) + - [POST /api/database/:id/discard_values](#post-apidatabaseiddiscard_values) + - [POST /api/database/:id/rescan_values](#post-apidatabaseidrescan_values) + - [POST /api/database/:id/sync](#post-apidatabaseidsync) + - [POST /api/database/:id/sync_schema](#post-apidatabaseidsync_schema) + - [POST /api/database/sample_dataset](#post-apidatabasesample_dataset) + - [POST /api/database/validate](#post-apidatabasevalidate) + - [PUT /api/database/:id](#put-apidatabaseid) + +### `DELETE /api/database/:id` Delete a `Database`. @@ -986,8 +1116,7 @@ Delete a `Database`. * **`id`** - -## `GET /api/database/` +### `GET /api/database/` Fetch all `Databases`. @@ -1000,7 +1129,7 @@ Fetch all `Databases`. * `include_cards` here means we should also include virtual Table entries for saved Questions, e.g. so we can easily use them as source Tables in queries. This is a deprecated alias for `saved=true` + `include=tables` (for the saved - questions virtual DB). Prefer using `include` and `saved` instead. + questions virtual DB). Prefer using `include` and `saved` instead. . ##### PARAMS: @@ -1012,8 +1141,7 @@ Fetch all `Databases`. * **`saved`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/database/:id` +### `GET /api/database/:id` Get a single Database with `id`. Optionally pass `?include=tables` or `?include=tables.fields` to include the Tables belonging to this database, or the Tables and Fields, respectively. @@ -1024,8 +1152,7 @@ Get a single Database with `id`. Optionally pass `?include=tables` or `?include= * **`include`** value may be nil, or if non-nil, value must be one of: `tables`, `tables.fields`. - -## `GET /api/database/:id/autocomplete_suggestions` +### `GET /api/database/:id/autocomplete_suggestions` Return a list of autocomplete suggestions for a given `prefix`. @@ -1033,7 +1160,7 @@ Return a list of autocomplete suggestions for a given `prefix`. and `Fields` in this `Database`. Tables are returned in the format `[table_name "Table"]`; - Fields are returned in the format `[field_name "table_name base_type semantic_type"]` + Fields are returned in the format `[field_name "table_name base_type semantic_type"]`. ##### PARAMS: @@ -1041,8 +1168,7 @@ Return a list of autocomplete suggestions for a given `prefix`. * **`prefix`** value must be a non-blank string. - -## `GET /api/database/:id/fields` +### `GET /api/database/:id/fields` Get a list of all `Fields` in `Database`. @@ -1050,8 +1176,7 @@ Get a list of all `Fields` in `Database`. * **`id`** - -## `GET /api/database/:id/idfields` +### `GET /api/database/:id/idfields` Get a list of all primary key `Fields` for `Database`. @@ -1059,8 +1184,7 @@ Get a list of all primary key `Fields` for `Database`. * **`id`** - -## `GET /api/database/:id/metadata` +### `GET /api/database/:id/metadata` Get metadata about a `Database`, including all of its `Tables` and `Fields`. By default only non-hidden tables and fields are returned. Passing include_hidden=true includes them. @@ -1072,8 +1196,7 @@ Get metadata about a `Database`, including all of its `Tables` and `Fields`. * **`include_hidden`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/database/:id/schema/` +### `GET /api/database/:id/schema/` Return a list of Tables for a Database whose `schema` is `nil` or an empty string. @@ -1081,10 +1204,9 @@ Return a list of Tables for a Database whose `schema` is `nil` or an empty strin * **`id`** +### `GET /api/database/:id/schema/:schema` -## `GET /api/database/:id/schema/:schema` - -Returns a list of Tables for the given Database `id` and `schema` +Returns a list of Tables for the given Database `id` and `schema`. ##### PARAMS: @@ -1092,23 +1214,20 @@ Returns a list of Tables for the given Database `id` and `schema` * **`schema`** +### `GET /api/database/:id/schemas` -## `GET /api/database/:id/schemas` - -Returns a list of all the schemas found for the database `id` +Returns a list of all the schemas found for the database `id`. ##### PARAMS: * **`id`** - -## `GET /api/database/:virtual-db/metadata` +### `GET /api/database/:virtual-db/metadata` Endpoint that provides metadata for the Saved Questions 'virtual' database. Used for fooling the frontend and allowing it to treat the Saved Questions virtual DB just like any other database. - -## `GET /api/database/:virtual-db/schema/:schema` +### `GET /api/database/:virtual-db/schema/:schema` Returns a list of Tables for the saved questions virtual database. @@ -1116,13 +1235,15 @@ Returns a list of Tables for the saved questions virtual database. * **`schema`** - -## `GET /api/database/:virtual-db/schemas` +### `GET /api/database/:virtual-db/schemas` Returns a list of all the schemas found for the saved questions virtual database. +### `GET /api/database/db-ids-with-deprecated-drivers` + +Return a list of database IDs using currently deprecated drivers. -## `POST /api/database/` +### `POST /api/database/` Add a new `Database`. @@ -1144,8 +1265,9 @@ You must be a superuser to do this. * **`auto_run_queries`** value may be nil, or if non-nil, value must be a boolean. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `POST /api/database/:id/discard_values` +### `POST /api/database/:id/discard_values` Discards all saved field values for this `Database`. @@ -1155,8 +1277,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/database/:id/rescan_values` +### `POST /api/database/:id/rescan_values` Trigger a manual scan of the field values for this `Database`. @@ -1166,8 +1287,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/database/:id/sync` +### `POST /api/database/:id/sync` Update the metadata for this `Database`. This happens asynchronously. @@ -1175,8 +1295,7 @@ Update the metadata for this `Database`. This happens asynchronously. * **`id`** - -## `POST /api/database/:id/sync_schema` +### `POST /api/database/:id/sync_schema` Trigger a manual update of the schema metadata for this `Database`. @@ -1186,15 +1305,13 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/database/sample_dataset` +### `POST /api/database/sample_dataset` Add the sample dataset as a new `Database`. You must be a superuser to do this. - -## `POST /api/database/validate` +### `POST /api/database/validate` Validate that we can connect to a database given a set of details. @@ -1206,8 +1323,7 @@ You must be a superuser to do this. * **`details`** value must be a map. - -## `PUT /api/database/:id` +### `PUT /api/database/:id` Update a `Database`. @@ -1233,6 +1349,8 @@ You must be a superuser to do this. * **`is_full_sync`** +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`details`** value may be nil, or if non-nil, value must be a map. * **`id`** @@ -1240,7 +1358,17 @@ You must be a superuser to do this. * **`is_on_demand`** -## `POST /api/dataset/` +## Dataset + +/api/dataset endpoints. + + - [POST /api/dataset/](#post-apidataset) + - [POST /api/dataset/:export-format](#post-apidatasetexport-format) + - [POST /api/dataset/duration](#post-apidatasetduration) + - [POST /api/dataset/native](#post-apidatasetnative) + - [POST /api/dataset/pivot](#post-apidatasetpivot) + +### `POST /api/dataset/` Execute a query and retrieve the results in the usual format. @@ -1252,8 +1380,7 @@ Execute a query and retrieve the results in the usual format. * **`query`** - -## `POST /api/dataset/:export-format` +### `POST /api/dataset/:export-format` Execute a query and download the result data as a file in the specified format. @@ -1263,8 +1390,9 @@ Execute a query and download the result data as a file in the specified format. * **`query`** value must be a valid JSON string. +* **`visualization_settings`** value must be a valid JSON string. -## `POST /api/dataset/duration` +### `POST /api/dataset/duration` Get historical query execution duration. @@ -1274,8 +1402,7 @@ Get historical query execution duration. * **`query`** - -## `POST /api/dataset/native` +### `POST /api/dataset/native` Fetch a native version of an MBQL query. @@ -1283,10 +1410,9 @@ Fetch a native version of an MBQL query. * **`query`** +### `POST /api/dataset/pivot` -## `POST /api/dataset/pivot` - -Generate a pivoted dataset for an ad-hoc query +Generate a pivoted dataset for an ad-hoc query. ##### PARAMS: @@ -1297,21 +1423,28 @@ Generate a pivoted dataset for an ad-hoc query * **`query`** -## `DELETE /api/email/` +## Email -Clear all email related settings. You must be a superuser to ddo this - -You must be a superuser to do this. +/api/email endpoints. + - [DELETE /api/email/](#delete-apiemail) + - [POST /api/email/test](#post-apiemailtest) + - [PUT /api/email/](#put-apiemail) -## `POST /api/email/test` +### `DELETE /api/email/` -Send a test email. You must be a superuser to do this. +Clear all email related settings. You must be a superuser to ddo this. You must be a superuser to do this. +### `POST /api/email/test` + +Send a test email using the SMTP Settings. You must be a superuser to do this. Returns `{:ok true}` if we were able + to send the message successfully, otherwise a standard 400 error response. + +You must be a superuser to do this. -## `PUT /api/email/` +### `PUT /api/email/` Update multiple email Settings. You must be a superuser to do this. @@ -1322,20 +1455,54 @@ You must be a superuser to do this. * **`settings`** value must be a map. -## `GET /api/embed/card/:token` +## Embed + +Various endpoints that use [JSON web tokens](https://jwt.io/introduction/) to fetch Cards and Dashboards. + The endpoints are the same as the ones in `api/public/`, and differ only in the way they are authorized. + + To use these endpoints: + + 1. Set the `embedding-secret-key` Setting to a hexadecimal-encoded 32-byte sequence (i.e., a 64-character string). + You can use `/api/util/random_token` to get a cryptographically-secure value for this. + 2. Sign/base-64 encode a JSON Web Token using the secret key and pass it as the relevant part of the URL path + to the various endpoints here. + + Tokens can have the following fields: + + {:resource {:question + :dashboard } + :params }. + + - [GET /api/embed/card/:token](#get-apiembedcardtoken) + - [GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id](#get-apiembedcardtokenfieldfield-idremappingremapped-id) + - [GET /api/embed/card/:token/field/:field-id/search/:search-field-id](#get-apiembedcardtokenfieldfield-idsearchsearch-field-id) + - [GET /api/embed/card/:token/field/:field-id/values](#get-apiembedcardtokenfieldfield-idvalues) + - [GET /api/embed/card/:token/query](#get-apiembedcardtokenquery) + - [GET /api/embed/card/:token/query/:export-format](#get-apiembedcardtokenqueryexport-format) + - [GET /api/embed/dashboard/:token](#get-apiembeddashboardtoken) + - [GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apiembeddashboardtokendashcarddashcard-idcardcard-id) + - [GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format](#get-apiembeddashboardtokendashcarddashcard-idcardcard-idexport-format) + - [GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id](#get-apiembeddashboardtokenfieldfield-idremappingremapped-id) + - [GET /api/embed/dashboard/:token/field/:field-id/search/:search-field-id](#get-apiembeddashboardtokenfieldfield-idsearchsearch-field-id) + - [GET /api/embed/dashboard/:token/field/:field-id/values](#get-apiembeddashboardtokenfieldfield-idvalues) + - [GET /api/embed/dashboard/:token/params/:param-key/search/:prefix](#get-apiembeddashboardtokenparamsparam-keysearchprefix) + - [GET /api/embed/dashboard/:token/params/:param-key/values](#get-apiembeddashboardtokenparamsparam-keyvalues) + - [GET /api/embed/pivot/card/:token/query](#get-apiembedpivotcardtokenquery) + - [GET /api/embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apiembedpivotdashboardtokendashcarddashcard-idcardcard-id) + +### `GET /api/embed/card/:token` Fetch a Card via a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: - {:resource {:question }} + {:resource {:question }}. ##### PARAMS: * **`token`** - -## `GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id` +### `GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with embedded Cards. @@ -1350,8 +1517,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/embed/card/:token/field/:field-id/search/:search-field-id` +### `GET /api/embed/card/:token/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by an embedded Card. @@ -1367,8 +1533,7 @@ Search for values of a Field that is referenced by an embedded Card. * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/embed/card/:token/field/:field-id/values` +### `GET /api/embed/card/:token/field/:field-id/values` Fetch FieldValues for a Field that is referenced by an embedded Card. @@ -1378,15 +1543,14 @@ Fetch FieldValues for a Field that is referenced by an embedded Card. * **`field-id`** - -## `GET /api/embed/card/:token/query` +### `GET /api/embed/card/:token/query` Fetch the results of running a Card using a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: {:resource {:question } - :params } + :params }. ##### PARAMS: @@ -1396,8 +1560,7 @@ Fetch the results of running a Card using a JSON Web Token signed with the `embe * **`query-params`** - -## `GET /api/embed/card/:token/query/:export-format` +### `GET /api/embed/card/:token/query/:export-format` Like `GET /api/embed/card/query`, but returns the results as a file in the specified format. @@ -1409,24 +1572,22 @@ Like `GET /api/embed/card/query`, but returns the results as a file in the speci * **`query-params`** - -## `GET /api/embed/dashboard/:token` +### `GET /api/embed/dashboard/:token` Fetch a Dashboard via a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: - {:resource {:dashboard }} + {:resource {:dashboard }}. ##### PARAMS: * **`token`** - -## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the - `embedding-secret-key` + `embedding-secret-key`. ##### PARAMS: @@ -1440,11 +1601,10 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To * **`query-params`** - -## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format` +### `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format` Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the - `embedding-secret-key` return the data in one of the export formats + `embedding-secret-key` return the data in one of the export formats. ##### PARAMS: @@ -1458,8 +1618,7 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To * **`query-params`** - -## `GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id` +### `GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with embedded Dashboards. @@ -1474,8 +1633,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/embed/dashboard/:token/field/:field-id/search/:search-field-id` +### `GET /api/embed/dashboard/:token/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by a Card in an embedded Dashboard. @@ -1491,8 +1649,7 @@ Search for values of a Field that is referenced by a Card in an embedded Dashboa * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/embed/dashboard/:token/field/:field-id/values` +### `GET /api/embed/dashboard/:token/field/:field-id/values` Fetch FieldValues for a Field that is used as a param in an embedded Dashboard. @@ -1502,8 +1659,7 @@ Fetch FieldValues for a Field that is used as a param in an embedded Dashboard. * **`field-id`** - -## `GET /api/embed/dashboard/:token/params/:param-key/search/:prefix` +### `GET /api/embed/dashboard/:token/params/:param-key/search/:prefix` Embedded version of chain filter search endpoint. @@ -1517,8 +1673,7 @@ Embedded version of chain filter search endpoint. * **`query-params`** - -## `GET /api/embed/dashboard/:token/params/:param-key/values` +### `GET /api/embed/dashboard/:token/params/:param-key/values` Embedded version of chain filter values endpoint. @@ -1530,15 +1685,14 @@ Embedded version of chain filter values endpoint. * **`query-params`** - -## `GET /api/embed/pivot/card/:token/query` +### `GET /api/embed/pivot/card/:token/query` Fetch the results of running a Card using a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: {:resource {:question } - :params } + :params }. ##### PARAMS: @@ -1548,11 +1702,10 @@ Fetch the results of running a Card using a JSON Web Token signed with the `embe * **`query-params`** - -## `GET /api/embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the - `embedding-secret-key` + `embedding-secret-key`. ##### PARAMS: @@ -1567,16 +1720,31 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To * **`query-params`** -## `DELETE /api/field/:id/dimension` +## Field + + - [DELETE /api/field/:id/dimension](#delete-apifieldiddimension) + - [GET /api/field/:id](#get-apifieldid) + - [GET /api/field/:id/related](#get-apifieldidrelated) + - [GET /api/field/:id/remapping/:remapped-id](#get-apifieldidremappingremapped-id) + - [GET /api/field/:id/search/:search-id](#get-apifieldidsearchsearch-id) + - [GET /api/field/:id/summary](#get-apifieldidsummary) + - [GET /api/field/:id/values](#get-apifieldidvalues) + - [GET /api/field/field%2C:field-name%2C:options/values](#get-apifieldfield2cfield-name2coptionsvalues) + - [POST /api/field/:id/dimension](#post-apifieldiddimension) + - [POST /api/field/:id/discard_values](#post-apifieldiddiscard_values) + - [POST /api/field/:id/rescan_values](#post-apifieldidrescan_values) + - [POST /api/field/:id/values](#post-apifieldidvalues) + - [PUT /api/field/:id](#put-apifieldid) + +### `DELETE /api/field/:id/dimension` -Remove the dimension associated to field at ID +Remove the dimension associated to field at ID. ##### PARAMS: * **`id`** - -## `GET /api/field/:id` +### `GET /api/field/:id` Get `Field` with ID. @@ -1584,8 +1752,7 @@ Get `Field` with ID. * **`id`** - -## `GET /api/field/:id/related` +### `GET /api/field/:id/related` Return related entities. @@ -1593,8 +1760,7 @@ Return related entities. * **`id`** - -## `GET /api/field/:id/remapping/:remapped-id` +### `GET /api/field/:id/remapping/:remapped-id` Fetch remapped Field values. @@ -1606,8 +1772,7 @@ Fetch remapped Field values. * **`value`** - -## `GET /api/field/:id/search/:search-id` +### `GET /api/field/:id/search/:search-id` Search for values of a Field with `search-id` that start with `value`. See docstring for `metabase.api.field/search-values` for a more detailed explanation. @@ -1620,10 +1785,7 @@ Search for values of a Field with `search-id` that start with `value`. See docst * **`value`** value must be a non-blank string. -* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - - -## `GET /api/field/:id/summary` +### `GET /api/field/:id/summary` Get the count and distinct count of `Field` with ID. @@ -1631,8 +1793,7 @@ Get the count and distinct count of `Field` with ID. * **`id`** - -## `GET /api/field/:id/values` +### `GET /api/field/:id/values` If a Field's value of `has_field_values` is `list`, return a list of all the distinct values of the Field, and (if defined by a User) a map of human-readable remapped values. @@ -1641,8 +1802,7 @@ If a Field's value of `has_field_values` is `list`, return a list of all the dis * **`id`** - -## `GET /api/field/field%2C:field-name%2C:options/values` +### `GET /api/field/field%2C:field-name%2C:options/values` Implementation of the field values endpoint for fields in the Saved Questions 'virtual' DB. This endpoint is just a convenience to simplify the frontend code. It just returns the standard 'empty' field values response. @@ -1651,10 +1811,9 @@ Implementation of the field values endpoint for fields in the Saved Questions 'v * **`_`** +### `POST /api/field/:id/dimension` -## `POST /api/field/:id/dimension` - -Sets the dimension for the given field at ID +Sets the dimension for the given field at ID. ##### PARAMS: @@ -1666,8 +1825,7 @@ Sets the dimension for the given field at ID * **`human_readable_field_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `POST /api/field/:id/discard_values` +### `POST /api/field/:id/discard_values` Discard the FieldValues belonging to this Field. Only applies to fields that have FieldValues. If this Field's Database is set up to automatically sync FieldValues, they will be recreated during the next cycle. @@ -1678,8 +1836,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/field/:id/rescan_values` +### `POST /api/field/:id/rescan_values` Manually trigger an update for the FieldValues for this Field. Only applies to Fields that are eligible for FieldValues. @@ -1690,8 +1847,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/field/:id/values` +### `POST /api/field/:id/values` Update the fields values and human-readable values for a `Field` whose semantic type is `category`/`city`/`state`/`country` or whose base type is `type/Boolean`. The human-readable values are optional. @@ -1702,8 +1858,7 @@ Update the fields values and human-readable values for a `Field` whose semantic * **`value-pairs`** value must be an array. Each value must be an array. - -## `PUT /api/field/:id` +### `PUT /api/field/:id` Update `Field` with ID. @@ -1732,7 +1887,16 @@ Update `Field` with ID. * **`id`** +<<<<<<< HEAD ## `GET /api/geojson/` +======= +## Geojson + + - [GET /api/geojson/](#get-apigeojson) + - [GET /api/geojson/:key](#get-apigeojsonkey) + +### `GET /api/geojson/` +>>>>>>> tags/v0.41.0 Load a custom GeoJSON file based on a URL or file path provided as a query parameter. This behaves similarly to /api/geojson/:key but doesn't require the custom map to be saved to the DB first. @@ -1745,8 +1909,12 @@ Load a custom GeoJSON file based on a URL or file path provided as a query param * **`raise`** +<<<<<<< HEAD ## `GET /api/geojson/:key` +======= +### `GET /api/geojson/:key` +>>>>>>> tags/v0.41.0 Fetch a custom GeoJSON file as defined in the `custom-geojson` setting. (This just acts as a simple proxy for the file specified for `key`). @@ -1760,7 +1928,13 @@ Fetch a custom GeoJSON file as defined in the `custom-geojson` setting. (This ju * **`raise`** -## `PUT /api/ldap/settings` +## Ldap + +/api/ldap endpoints. + + - [PUT /api/ldap/settings](#put-apildapsettings) + +### `PUT /api/ldap/settings` Update LDAP related settings. You must be a superuser to do this. @@ -1771,20 +1945,32 @@ You must be a superuser to do this. * **`settings`** value must be a map. -## `GET /api/login-history/current` +## Login history + + - [GET /api/login-history/current](#get-apilogin-historycurrent) + +### `GET /api/login-history/current` Fetch recent logins for the current user. -## `GET /api/metastore/token/status` +## Metric -Fetch info about the current MetaStore premium features token including whether it is `valid`, a `trial` token, its - `features`, and when it is `valid_thru`. +/api/metric endpoints. + - [DELETE /api/metric/:id](#delete-apimetricid) + - [GET /api/metric/](#get-apimetric) + - [GET /api/metric/:id](#get-apimetricid) + - [GET /api/metric/:id/related](#get-apimetricidrelated) + - [GET /api/metric/:id/revisions](#get-apimetricidrevisions) + - [POST /api/metric/](#post-apimetric) + - [POST /api/metric/:id/revert](#post-apimetricidrevert) + - [PUT /api/metric/:id](#put-apimetricid) + - [PUT /api/metric/:id/important_fields](#put-apimetricidimportant_fields) -## `DELETE /api/metric/:id` +### `DELETE /api/metric/:id` -Archive a Metric. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.) +Archive a Metric. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.). ##### PARAMS: @@ -1792,8 +1978,7 @@ Archive a Metric. (DEPRECATED -- Just pass updated value of `:archived` to the ` * **`revision_message`** value must be a non-blank string. - -## `GET /api/metric/` +### `GET /api/metric/` Fetch *all* `Metrics`. @@ -1801,8 +1986,7 @@ Fetch *all* `Metrics`. * **`id`** - -## `GET /api/metric/:id` +### `GET /api/metric/:id` Fetch `Metric` with ID. @@ -1810,8 +1994,7 @@ Fetch `Metric` with ID. * **`id`** - -## `GET /api/metric/:id/related` +### `GET /api/metric/:id/related` Return related entities. @@ -1819,8 +2002,7 @@ Return related entities. * **`id`** - -## `GET /api/metric/:id/revisions` +### `GET /api/metric/:id/revisions` Fetch `Revisions` for `Metric` with ID. @@ -1828,8 +2010,7 @@ Fetch `Revisions` for `Metric` with ID. * **`id`** - -## `POST /api/metric/` +### `POST /api/metric/` Create a new `Metric`. @@ -1843,8 +2024,7 @@ Create a new `Metric`. * **`definition`** value must be a map. - -## `POST /api/metric/:id/revert` +### `POST /api/metric/:id/revert` Revert a `Metric` to a prior `Revision`. @@ -1854,8 +2034,7 @@ Revert a `Metric` to a prior `Revision`. * **`revision_id`** value must be an integer greater than zero. - -## `PUT /api/metric/:id` +### `PUT /api/metric/:id` Update a `Metric` with ID. @@ -1881,8 +2060,7 @@ Update a `Metric` with ID. * **`how_is_this_calculated`** value may be nil, or if non-nil, value must be a string. - -## `PUT /api/metric/:id/important_fields` +### `PUT /api/metric/:id/important_fields` Update the important `Fields` for a `Metric` with ID. (This is used for the Getting Started guide). @@ -1896,16 +2074,24 @@ You must be a superuser to do this. * **`important_field_ids`** value must be an array. Each value must be an integer greater than zero. -## `GET /api/native-query-snippet/` +## Native query snippet + +Native query snippet (/api/native-query-snippet) endpoints. + + - [GET /api/native-query-snippet/](#get-apinative-query-snippet) + - [GET /api/native-query-snippet/:id](#get-apinative-query-snippetid) + - [POST /api/native-query-snippet/](#post-apinative-query-snippet) + - [PUT /api/native-query-snippet/:id](#put-apinative-query-snippetid) -Fetch all snippets +### `GET /api/native-query-snippet/` + +Fetch all snippets. ##### PARAMS: * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/native-query-snippet/:id` +### `GET /api/native-query-snippet/:id` Fetch native query snippet with ID. @@ -1913,8 +2099,7 @@ Fetch native query snippet with ID. * **`id`** - -## `POST /api/native-query-snippet/` +### `POST /api/native-query-snippet/` Create a new `NativeQuerySnippet`. @@ -1928,8 +2113,7 @@ Create a new `NativeQuerySnippet`. * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `PUT /api/native-query-snippet/:id` +### `PUT /api/native-query-snippet/:id` Update an existing `NativeQuerySnippet`. @@ -1948,14 +2132,20 @@ Update an existing `NativeQuerySnippet`. * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `POST /api/notify/db/:id` +## Notify + +/api/notify/* endpoints which receive inbound etl server notifications. + + - [POST /api/notify/db/:id](#post-apinotifydbid) + +### `POST /api/notify/db/:id` Notification about a potential schema change to one of our `Databases`. Caller can optionally specify a `:table_id` or `:table_name` in the body to limit updates to a single `Table`. Optional Parameter `:scan` can be `"full"` or `"schema"` for a full sync or a schema sync, available regardless if a `:table_id` or `:table_name` is passed. This endpoint is secured by an API key that needs to be passed as a `X-METABASE-APIKEY` header which needs to be defined in - the `MB_API_KEY` [environment variable](https://www.metabase.com/docs/latest/operations-guide/environment-variables.html#mb_api_key) + the `MB_API_KEY` [environment variable](https://www.metabase.com/docs/latest/operations-guide/environment-variables.html#mb_api_key). ##### PARAMS: @@ -1968,7 +2158,22 @@ Notification about a potential schema change to one of our `Databases`. * **`scan`** value may be nil, or if non-nil, value must be one of: `full`, `schema`. -## `DELETE /api/permissions/group/:group-id` +## Permissions + +/api/permissions endpoints. + + - [DELETE /api/permissions/group/:group-id](#delete-apipermissionsgroupgroup-id) + - [DELETE /api/permissions/membership/:id](#delete-apipermissionsmembershipid) + - [GET /api/permissions/graph](#get-apipermissionsgraph) + - [GET /api/permissions/group](#get-apipermissionsgroup) + - [GET /api/permissions/group/:id](#get-apipermissionsgroupid) + - [GET /api/permissions/membership](#get-apipermissionsmembership) + - [POST /api/permissions/group](#post-apipermissionsgroup) + - [POST /api/permissions/membership](#post-apipermissionsmembership) + - [PUT /api/permissions/graph](#put-apipermissionsgraph) + - [PUT /api/permissions/group/:group-id](#put-apipermissionsgroupgroup-id) + +### `DELETE /api/permissions/group/:group-id` Delete a specific `PermissionsGroup`. @@ -1978,8 +2183,7 @@ You must be a superuser to do this. * **`group-id`** - -## `DELETE /api/permissions/membership/:id` +### `DELETE /api/permissions/membership/:id` Remove a User from a PermissionsGroup (delete their membership). @@ -1989,22 +2193,19 @@ You must be a superuser to do this. * **`id`** - -## `GET /api/permissions/graph` +### `GET /api/permissions/graph` Fetch a graph of all Permissions. You must be a superuser to do this. - -## `GET /api/permissions/group` +### `GET /api/permissions/group` Fetch all `PermissionsGroups`, including a count of the number of `:members` in that group. You must be a superuser to do this. - -## `GET /api/permissions/group/:id` +### `GET /api/permissions/group/:id` Fetch the details for a certain permissions group. @@ -2014,19 +2215,17 @@ You must be a superuser to do this. * **`id`** - -## `GET /api/permissions/membership` +### `GET /api/permissions/membership` Fetch a map describing the group memberships of various users. This map's format is: { [{:membership_id - :group_id }]} + :group_id }]}. You must be a superuser to do this. - -## `POST /api/permissions/group` +### `POST /api/permissions/group` Create a new `PermissionsGroup`. @@ -2036,8 +2235,7 @@ You must be a superuser to do this. * **`name`** value must be a non-blank string. - -## `POST /api/permissions/membership` +### `POST /api/permissions/membership` Add a `User` to a `PermissionsGroup`. Returns updated list of members belonging to the group. @@ -2049,8 +2247,7 @@ You must be a superuser to do this. * **`user_id`** value must be an integer greater than zero. - -## `PUT /api/permissions/graph` +### `PUT /api/permissions/graph` Do a batch update of Permissions by passing in a modified graph. This should return the same graph, in the same format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever necessary. This @@ -2067,8 +2264,7 @@ You must be a superuser to do this. * **`body`** value must be a map. - -## `PUT /api/permissions/group/:group-id` +### `PUT /api/permissions/group/:group-id` Update the name of a `PermissionsGroup`. @@ -2081,7 +2277,36 @@ You must be a superuser to do this. * **`name`** value must be a non-blank string. -## `GET /api/preview-embed/card/:token` +## Premium features + + - [GET /api/premium-features/token/status](#get-apipremium-featurestokenstatus) + +### `GET /api/premium-features/token/status` + +Fetch info about the current Premium-Features premium features token including whether it is `valid`, a `trial` token, its + `features`, and when it is `valid_thru`. + + +## Preview embed + +Endpoints for previewing how Cards and Dashboards will look when embedding them. + These endpoints are basically identical in functionality to the ones in `/api/embed`, but: + + 1. Require admin access + 2. Ignore the values of `:enabled_embedding` for Cards/Dashboards + 3. Ignore the `:embed_params` whitelist for Card/Dashboards, instead using a field called `:_embedding_params` in + the JWT token itself. + + Refer to the documentation for those endpoints for further details. + + - [GET /api/preview-embed/card/:token](#get-apipreview-embedcardtoken) + - [GET /api/preview-embed/card/:token/query](#get-apipreview-embedcardtokenquery) + - [GET /api/preview-embed/dashboard/:token](#get-apipreview-embeddashboardtoken) + - [GET /api/preview-embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apipreview-embeddashboardtokendashcarddashcard-idcardcard-id) + - [GET /api/preview-embed/pivot/card/:token/query](#get-apipreview-embedpivotcardtokenquery) + - [GET /api/preview-embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apipreview-embedpivotdashboardtokendashcarddashcard-idcardcard-id) + +### `GET /api/preview-embed/card/:token` Fetch a Card you're considering embedding by passing a JWT `token`. @@ -2089,8 +2314,7 @@ Fetch a Card you're considering embedding by passing a JWT `token`. * **`token`** - -## `GET /api/preview-embed/card/:token/query` +### `GET /api/preview-embed/card/:token/query` Fetch the query results for a Card you're considering embedding by passing a JWT `token`. @@ -2102,17 +2326,15 @@ Fetch the query results for a Card you're considering embedding by passing a JWT * **`query-params`** +### `GET /api/preview-embed/dashboard/:token` -## `GET /api/preview-embed/dashboard/:token` - -Fetch a Dashboard you're considering embedding by passing a JWT `token`. +Fetch a Dashboard you're considering embedding by passing a JWT `token`. . ##### PARAMS: * **`token`** - -## `GET /api/preview-embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/preview-embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard you're considering embedding with JWT `token`. @@ -2128,8 +2350,7 @@ Fetch the results of running a Card belonging to a Dashboard you're considering * **`query-params`** - -## `GET /api/preview-embed/pivot/card/:token/query` +### `GET /api/preview-embed/pivot/card/:token/query` Fetch the query results for a Card you're considering embedding by passing a JWT `token`. @@ -2141,8 +2362,7 @@ Fetch the query results for a Card you're considering embedding by passing a JWT * **`query-params`** - -## `GET /api/preview-embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/preview-embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard you're considering embedding with JWT `token`. @@ -2159,7 +2379,28 @@ Fetch the results of running a Card belonging to a Dashboard you're considering * **`query-params`** -## `GET /api/public/card/:uuid` +## Public + +Metabase API endpoints for viewing publicly-accessible Cards and Dashboards. + + - [GET /api/public/card/:uuid](#get-apipubliccarduuid) + - [GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id](#get-apipubliccarduuidfieldfield-idremappingremapped-id) + - [GET /api/public/card/:uuid/field/:field-id/search/:search-field-id](#get-apipubliccarduuidfieldfield-idsearchsearch-field-id) + - [GET /api/public/card/:uuid/field/:field-id/values](#get-apipubliccarduuidfieldfield-idvalues) + - [GET /api/public/card/:uuid/query](#get-apipubliccarduuidquery) + - [GET /api/public/card/:uuid/query/:export-format](#get-apipubliccarduuidqueryexport-format) + - [GET /api/public/dashboard/:uuid](#get-apipublicdashboarduuid) + - [GET /api/public/dashboard/:uuid/card/:card-id](#get-apipublicdashboarduuidcardcard-id) + - [GET /api/public/dashboard/:uuid/field/:field-id/remapping/:remapped-id](#get-apipublicdashboarduuidfieldfield-idremappingremapped-id) + - [GET /api/public/dashboard/:uuid/field/:field-id/search/:search-field-id](#get-apipublicdashboarduuidfieldfield-idsearchsearch-field-id) + - [GET /api/public/dashboard/:uuid/field/:field-id/values](#get-apipublicdashboarduuidfieldfield-idvalues) + - [GET /api/public/dashboard/:uuid/params/:param-key/search/:query](#get-apipublicdashboarduuidparamsparam-keysearchquery) + - [GET /api/public/dashboard/:uuid/params/:param-key/values](#get-apipublicdashboarduuidparamsparam-keyvalues) + - [GET /api/public/oembed](#get-apipublicoembed) + - [GET /api/public/pivot/card/:uuid/query](#get-apipublicpivotcarduuidquery) + - [GET /api/public/pivot/dashboard/:uuid/card/:card-id](#get-apipublicpivotdashboarduuidcardcard-id) + +### `GET /api/public/card/:uuid` Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. @@ -2168,8 +2409,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info * **`uuid`** - -## `GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id` +### `GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with public Cards. @@ -2184,8 +2424,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/public/card/:uuid/field/:field-id/search/:search-field-id` +### `GET /api/public/card/:uuid/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by a public Card. @@ -2201,8 +2440,7 @@ Search for values of a Field that is referenced by a public Card. * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/public/card/:uuid/field/:field-id/values` +### `GET /api/public/card/:uuid/field/:field-id/values` Fetch FieldValues for a Field that is referenced by a public Card. @@ -2212,8 +2450,7 @@ Fetch FieldValues for a Field that is referenced by a public Card. * **`field-id`** - -## `GET /api/public/card/:uuid/query` +### `GET /api/public/card/:uuid/query` Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. @@ -2224,8 +2461,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/card/:uuid/query/:export-format` +### `GET /api/public/card/:uuid/query/:export-format` Fetch a publicly-accessible Card and return query results in the specified format. Does not require auth credentials. Public sharing must be enabled. @@ -2238,8 +2474,7 @@ Fetch a publicly-accessible Card and return query results in the specified forma * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/dashboard/:uuid` +### `GET /api/public/dashboard/:uuid` Fetch a publicly-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. @@ -2247,8 +2482,7 @@ Fetch a publicly-accessible Dashboard. Does not require auth credentials. Public * **`uuid`** - -## `GET /api/public/dashboard/:uuid/card/:card-id` +### `GET /api/public/dashboard/:uuid/card/:card-id` Fetch the results for a Card in a publicly-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. @@ -2261,8 +2495,7 @@ Fetch the results for a Card in a publicly-accessible Dashboard. Does not requir * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/dashboard/:uuid/field/:field-id/remapping/:remapped-id` +### `GET /api/public/dashboard/:uuid/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with public Dashboards. @@ -2277,8 +2510,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/public/dashboard/:uuid/field/:field-id/search/:search-field-id` +### `GET /api/public/dashboard/:uuid/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by a Card in a public Dashboard. @@ -2294,8 +2526,7 @@ Search for values of a Field that is referenced by a Card in a public Dashboard. * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/public/dashboard/:uuid/field/:field-id/values` +### `GET /api/public/dashboard/:uuid/field/:field-id/values` Fetch FieldValues for a Field that is referenced by a Card in a public Dashboard. @@ -2305,8 +2536,7 @@ Fetch FieldValues for a Field that is referenced by a Card in a public Dashboard * **`field-id`** - -## `GET /api/public/dashboard/:uuid/params/:param-key/search/:query` +### `GET /api/public/dashboard/:uuid/params/:param-key/search/:query` Fetch filter values for dashboard parameter `param-key`, containing specified `query`. @@ -2320,8 +2550,7 @@ Fetch filter values for dashboard parameter `param-key`, containing specified `q * **`query-params`** - -## `GET /api/public/dashboard/:uuid/params/:param-key/values` +### `GET /api/public/dashboard/:uuid/params/:param-key/values` Fetch filter values for dashboard parameter `param-key`. @@ -2333,8 +2562,7 @@ Fetch filter values for dashboard parameter `param-key`. * **`query-params`** - -## `GET /api/public/oembed` +### `GET /api/public/oembed` oEmbed endpoint used to retreive embed code and metadata for a (public) Metabase URL. @@ -2348,8 +2576,7 @@ oEmbed endpoint used to retreive embed code and metadata for a (public) Metabase * **`maxwidth`** value may be nil, or if non-nil, value must be a valid integer. - -## `GET /api/public/pivot/card/:uuid/query` +### `GET /api/public/pivot/card/:uuid/query` Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. @@ -2360,8 +2587,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/pivot/dashboard/:uuid/card/:card-id` +### `GET /api/public/pivot/dashboard/:uuid/card/:card-id` Fetch the results for a Card in a publicly-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. @@ -2375,16 +2601,22 @@ Fetch the results for a Card in a publicly-accessible Dashboard. Does not requir * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. -## `DELETE /api/pulse/:id` +## Pulse -Delete a Pulse. (DEPRECATED -- don't delete a Pulse anymore -- archive it instead.) +/api/pulse endpoints. -##### PARAMS: - -* **`id`** + - [DELETE /api/pulse/:id/subscription](#delete-apipulseidsubscription) + - [GET /api/pulse/](#get-apipulse) + - [GET /api/pulse/:id](#get-apipulseid) + - [GET /api/pulse/form_input](#get-apipulseform_input) + - [GET /api/pulse/preview_card/:id](#get-apipulsepreview_cardid) + - [GET /api/pulse/preview_card_info/:id](#get-apipulsepreview_card_infoid) + - [GET /api/pulse/preview_card_png/:id](#get-apipulsepreview_card_pngid) + - [POST /api/pulse/](#post-apipulse) + - [POST /api/pulse/test](#post-apipulsetest) + - [PUT /api/pulse/:id](#put-apipulseid) - -## `DELETE /api/pulse/:id/subscription/email` +### `DELETE /api/pulse/:id/subscription` For users to unsubscribe themselves from a pulse subscription. @@ -2392,10 +2624,11 @@ For users to unsubscribe themselves from a pulse subscription. * **`id`** +### `GET /api/pulse/` -## `GET /api/pulse/` - -Fetch all Pulses +Fetch all Pulses. If `dashboard_id` is specified, restricts results to dashboard subscriptions + associated with that dashboard. If `user_id` is specified, restricts results to pulses or subscriptions + created by the user, or for which the user is a known recipient. ##### PARAMS: @@ -2403,8 +2636,9 @@ Fetch all Pulses * **`dashboard_id`** value may be nil, or if non-nil, value must be an integer greater than zero. +* **`user_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `GET /api/pulse/:id` +### `GET /api/pulse/:id` Fetch `Pulse` with ID. @@ -2412,13 +2646,11 @@ Fetch `Pulse` with ID. * **`id`** - -## `GET /api/pulse/form_input` +### `GET /api/pulse/form_input` Provides relevant configuration information and user choices for creating/updating Pulses. - -## `GET /api/pulse/preview_card/:id` +### `GET /api/pulse/preview_card/:id` Get HTML rendering of a Card with `id`. @@ -2426,8 +2658,7 @@ Get HTML rendering of a Card with `id`. * **`id`** - -## `GET /api/pulse/preview_card_info/:id` +### `GET /api/pulse/preview_card_info/:id` Get JSON object containing HTML rendering of a Card with `id` and other information. @@ -2435,8 +2666,7 @@ Get JSON object containing HTML rendering of a Card with `id` and other informat * **`id`** - -## `GET /api/pulse/preview_card_png/:id` +### `GET /api/pulse/preview_card_png/:id` Get PNG rendering of a Card with `id`. @@ -2444,8 +2674,7 @@ Get PNG rendering of a Card with `id`. * **`id`** - -## `POST /api/pulse/` +### `POST /api/pulse/` Create a new `Pulse`. @@ -2467,8 +2696,7 @@ Create a new `Pulse`. * **`parameters`** value must be an array. Each value must be a map. - -## `POST /api/pulse/test` +### `POST /api/pulse/test` Test send an unsaved pulse. @@ -2488,8 +2716,7 @@ Test send an unsaved pulse. * **`dashboard_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `PUT /api/pulse/:id` +### `PUT /api/pulse/:id` Update a Pulse with `id`. @@ -2514,7 +2741,12 @@ Update a Pulse with `id`. * **`pulse-updates`** -## `GET /api/revision/` +## Revision + + - [GET /api/revision/](#get-apirevision) + - [POST /api/revision/revert](#post-apirevisionrevert) + +### `GET /api/revision/` Get revisions of an object. @@ -2524,8 +2756,7 @@ Get revisions of an object. * **`id`** value must be an integer. - -## `POST /api/revision/revert` +### `POST /api/revision/revert` Revert an object to a prior revision. @@ -2538,8 +2769,16 @@ Revert an object to a prior revision. * **`revision_id`** value must be an integer. -## `GET /api/search/` +## Search + +<<<<<<< HEAD +======= + - [GET /api/search/](#get-apisearch) + - [GET /api/search/models](#get-apisearchmodels) +### `GET /api/search/` + +>>>>>>> tags/v0.41.0 Search within a bunch of models for the substring `q`. For the list of models, check `metabase.search.config/searchable-models. @@ -2548,7 +2787,11 @@ Search within a bunch of models for the substring `q`. pass in a DB id value to `table_db_id`. To specify a list of models, pass in an array to `models`. +<<<<<<< HEAD +======= + . +>>>>>>> tags/v0.41.0 ##### PARAMS: @@ -2557,6 +2800,7 @@ Search within a bunch of models for the substring `q`. * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). * **`table_db_id`** value may be nil, or if non-nil, value must be an integer greater than zero. +<<<<<<< HEAD * **`models`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be an array. Each value must be a non-blank string. 2) value must be a non-blank string. @@ -2573,10 +2817,40 @@ Get the set of models that a search query will return * **`table-db-id`** +======= +>>>>>>> tags/v0.41.0 + +* **`models`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be an array. Each value must be a non-blank string. 2) value must be a non-blank string. + +### `GET /api/search/models` + +Get the set of models that a search query will return. + +##### PARAMS: + +* **`q`** + +* **`archived-string`** + +* **`table-db-id`** + + +## Segment + +/api/segment endpoints. -## `DELETE /api/segment/:id` + - [DELETE /api/segment/:id](#delete-apisegmentid) + - [GET /api/segment/](#get-apisegment) + - [GET /api/segment/:id](#get-apisegmentid) + - [GET /api/segment/:id/related](#get-apisegmentidrelated) + - [GET /api/segment/:id/revisions](#get-apisegmentidrevisions) + - [POST /api/segment/](#post-apisegment) + - [POST /api/segment/:id/revert](#post-apisegmentidrevert) + - [PUT /api/segment/:id](#put-apisegmentid) -Archive a Segment. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.) +### `DELETE /api/segment/:id` + +Archive a Segment. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.). ##### PARAMS: @@ -2584,13 +2858,11 @@ Archive a Segment. (DEPRECATED -- Just pass updated value of `:archived` to the * **`revision_message`** value must be a non-blank string. - -## `GET /api/segment/` +### `GET /api/segment/` Fetch *all* `Segments`. - -## `GET /api/segment/:id` +### `GET /api/segment/:id` Fetch `Segment` with ID. @@ -2598,8 +2870,7 @@ Fetch `Segment` with ID. * **`id`** - -## `GET /api/segment/:id/related` +### `GET /api/segment/:id/related` Return related entities. @@ -2607,8 +2878,7 @@ Return related entities. * **`id`** - -## `GET /api/segment/:id/revisions` +### `GET /api/segment/:id/revisions` Fetch `Revisions` for `Segment` with ID. @@ -2616,8 +2886,7 @@ Fetch `Revisions` for `Segment` with ID. * **`id`** - -## `POST /api/segment/` +### `POST /api/segment/` Create a new `Segment`. @@ -2631,8 +2900,7 @@ Create a new `Segment`. * **`definition`** value must be a map. - -## `POST /api/segment/:id/revert` +### `POST /api/segment/:id/revert` Revert a `Segement` to a prior `Revision`. @@ -2642,8 +2910,7 @@ Revert a `Segement` to a prior `Revision`. * **`revision_id`** value must be an integer greater than zero. - -## `PUT /api/segment/:id` +### `PUT /api/segment/:id` Update a `Segment` with ID. @@ -2668,7 +2935,19 @@ Update a `Segment` with ID. * **`id`** -## `DELETE /api/session/` +## Session + +/api/session endpoints. + + - [DELETE /api/session/](#delete-apisession) + - [GET /api/session/password_reset_token_valid](#get-apisessionpassword_reset_token_valid) + - [GET /api/session/properties](#get-apisessionproperties) + - [POST /api/session/](#post-apisession) + - [POST /api/session/forgot_password](#post-apisessionforgot_password) + - [POST /api/session/google_auth](#post-apisessiongoogle_auth) + - [POST /api/session/reset_password](#post-apisessionreset_password) + +### `DELETE /api/session/` Logout. @@ -2676,8 +2955,7 @@ Logout. * **`metabase-session-id`** - -## `GET /api/session/password_reset_token_valid` +### `GET /api/session/password_reset_token_valid` Check is a password reset token is valid and isn't expired. @@ -2685,13 +2963,11 @@ Check is a password reset token is valid and isn't expired. * **`token`** value must be a string. - -## `GET /api/session/properties` +### `GET /api/session/properties` Get all global properties and their values. These are the specific `Settings` which are meant to be public. - -## `POST /api/session/` +### `POST /api/session/` Login. @@ -2703,8 +2979,7 @@ Login. * **`request`** - -## `POST /api/session/forgot_password` +### `POST /api/session/forgot_password` Send a reset email when user has forgotten their password. @@ -2716,8 +2991,7 @@ Send a reset email when user has forgotten their password. * **`request`** - -## `POST /api/session/google_auth` +### `POST /api/session/google_auth` Login with Google Auth. @@ -2727,8 +3001,7 @@ Login with Google Auth. * **`request`** - -## `POST /api/session/reset_password` +### `POST /api/session/reset_password` Reset password with a reset token. @@ -2741,14 +3014,22 @@ Reset password with a reset token. * **`request`** -## `GET /api/setting/` +## Setting + +/api/setting endpoints. + + - [GET /api/setting/](#get-apisetting) + - [GET /api/setting/:key](#get-apisettingkey) + - [PUT /api/setting/](#put-apisetting) + - [PUT /api/setting/:key](#put-apisettingkey) + +### `GET /api/setting/` Get all `Settings` and their values. You must be a superuser to do this. You must be a superuser to do this. - -## `GET /api/setting/:key` +### `GET /api/setting/:key` Fetch a single `Setting`. You must be a superuser to do this. @@ -2758,8 +3039,7 @@ You must be a superuser to do this. * **`key`** value must be a non-blank string. - -## `PUT /api/setting/` +### `PUT /api/setting/` Update multiple `Settings` values. You must be a superuser to do this. @@ -2769,8 +3049,7 @@ You must be a superuser to do this. * **`settings`** - -## `PUT /api/setting/:key` +### `PUT /api/setting/:key` Create/update a `Setting`. You must be a superuser to do this. This endpoint can also be used to delete Settings by passing `nil` for `:value`. @@ -2784,14 +3063,19 @@ You must be a superuser to do this. * **`value`** -## `GET /api/setup/admin_checklist` +## Setup + + - [GET /api/setup/admin_checklist](#get-apisetupadmin_checklist) + - [POST /api/setup/](#post-apisetup) + - [POST /api/setup/validate](#post-apisetupvalidate) + +### `GET /api/setup/admin_checklist` Return various "admin checklist" steps and whether they've been completed. You must be a superuser to see this! You must be a superuser to do this. - -## `POST /api/setup/` +### `POST /api/setup/` Special endpoint for creating the first user during setup. This endpoint both creates the user AND logs them in and returns a session ID. @@ -2832,8 +3116,7 @@ Special endpoint for creating the first user during setup. This endpoint both cr * **`last_name`** value must be a non-blank string. - -## `POST /api/setup/validate` +### `POST /api/setup/validate` Validate that we can connect to a database given a set of details. @@ -2846,7 +3129,13 @@ Validate that we can connect to a database given a set of details. * **`token`** Token does not match the setup token. -## `PUT /api/slack/settings` +## Slack + +/api/slack endpoints. + + - [PUT /api/slack/settings](#put-apislacksettings) + +### `PUT /api/slack/settings` Update Slack related settings. You must be a superuser to do this. @@ -2861,12 +3150,28 @@ You must be a superuser to do this. * **`slack-settings`** -## `GET /api/table/` +## Table -Get all `Tables`. +/api/table endpoints. + + - [GET /api/table/](#get-apitable) + - [GET /api/table/:id](#get-apitableid) + - [GET /api/table/:id/fks](#get-apitableidfks) + - [GET /api/table/:id/query_metadata](#get-apitableidquery_metadata) + - [GET /api/table/:id/related](#get-apitableidrelated) + - [GET /api/table/card__:id/fks](#get-apitablecard__idfks) + - [GET /api/table/card__:id/query_metadata](#get-apitablecard__idquery_metadata) + - [POST /api/table/:id/discard_values](#post-apitableiddiscard_values) + - [POST /api/table/:id/rescan_values](#post-apitableidrescan_values) + - [PUT /api/table/](#put-apitable) + - [PUT /api/table/:id](#put-apitableid) + - [PUT /api/table/:id/fields/order](#put-apitableidfieldsorder) +### `GET /api/table/` -## `GET /api/table/:id` +Get all `Tables`. + +### `GET /api/table/:id` Get `Table` with ID. @@ -2874,8 +3179,7 @@ Get `Table` with ID. * **`id`** - -## `GET /api/table/:id/fks` +### `GET /api/table/:id/fks` Get all foreign keys whose destination is a `Field` that belongs to this `Table`. @@ -2883,8 +3187,7 @@ Get all foreign keys whose destination is a `Field` that belongs to this `Table` * **`id`** - -## `GET /api/table/:id/query_metadata` +### `GET /api/table/:id/query_metadata` Get metadata about a `Table` useful for running queries. Returns DB, fields, field FKs, and field values. @@ -2902,8 +3205,7 @@ Get metadata about a `Table` useful for running queries. * **`include_hidden_fields`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/table/:id/related` +### `GET /api/table/:id/related` Return related entities. @@ -2911,14 +3213,12 @@ Return related entities. * **`id`** - -## `GET /api/table/card__:id/fks` +### `GET /api/table/card__:id/fks` Return FK info for the 'virtual' table for a Card. This is always empty, so this endpoint serves mainly as a placeholder to avoid having to change anything on the frontend. - -## `GET /api/table/card__:id/query_metadata` +### `GET /api/table/card__:id/query_metadata` Return metadata for the 'virtual' table for a Card. @@ -2926,8 +3226,7 @@ Return metadata for the 'virtual' table for a Card. * **`id`** - -## `POST /api/table/:id/discard_values` +### `POST /api/table/:id/discard_values` Discard the FieldValues belonging to the Fields in this Table. Only applies to fields that have FieldValues. If this Table's Database is set up to automatically sync FieldValues, they will be recreated during the next cycle. @@ -2938,8 +3237,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/table/:id/rescan_values` +### `POST /api/table/:id/rescan_values` Manually trigger an update for the FieldValues for the Fields belonging to this Table. Only applies to Fields that are eligible for FieldValues. @@ -2950,8 +3248,7 @@ You must be a superuser to do this. * **`id`** - -## `PUT /api/table/` +### `PUT /api/table/` Update all `Table` in `ids`. @@ -2973,8 +3270,7 @@ Update all `Table` in `ids`. * **`show_in_getting_started`** value may be nil, or if non-nil, value must be a boolean. - -## `PUT /api/table/:id` +### `PUT /api/table/:id` Update `Table` with ID. @@ -2998,10 +3294,9 @@ Update `Table` with ID. * **`id`** +### `PUT /api/table/:id/fields/order` -## `PUT /api/table/:id/fields/order` - -Reorder fields +Reorder fields. You must be a superuser to do this. @@ -3012,14 +3307,26 @@ You must be a superuser to do this. * **`field_order`** value must be an array. Each value must be an integer greater than zero. -## `GET /api/task/` +## Task + +/api/task endpoints. -Fetch a list of recent tasks stored as Task History + - [GET /api/task/](#get-apitask) + - [GET /api/task/:id](#get-apitaskid) + - [GET /api/task/info](#get-apitaskinfo) + +### `GET /api/task/` + +Fetch a list of recent tasks stored as Task History. You must be a superuser to do this. +<<<<<<< HEAD ## `GET /api/task/:id` +======= +### `GET /api/task/:id` +>>>>>>> tags/v0.41.0 Get `TaskHistory` entry with ID. @@ -3027,15 +3334,20 @@ Get `TaskHistory` entry with ID. * **`id`** - -## `GET /api/task/info` +### `GET /api/task/info` Return raw data about all scheduled tasks (i.e., Quartz Jobs and Triggers). You must be a superuser to do this. -## `GET /api/tiles/:zoom/:x/:y/:lat-field-id/:lon-field-id/:lat-col-idx/:lon-col-idx/` +## Tiles + +`/api/tiles` endpoints. + + - [GET /api/tiles/:zoom/:x/:y/:lat-field-id/:lon-field-id/:lat-col-idx/:lon-col-idx/](#get-apitileszoomxylat-field-idlon-field-idlat-col-idxlon-col-idx) + +### `GET /api/tiles/:zoom/:x/:y/:lat-field-id/:lon-field-id/:lat-col-idx/:lon-col-idx/` This endpoints provides an image with the appropriate pins rendered given a MBQL `query` (passed as a GET query string param). We evaluate the query and find the set of lat/lon pairs which are relevant and then render the @@ -3061,9 +3373,13 @@ This endpoints provides an image with the appropriate pins rendered given a MBQL * **`query`** value must be a valid JSON string. -## `GET /api/transform/:db-id/:schema/:transform-name` +## Transform + + - [GET /api/transform/:db-id/:schema/:transform-name](#get-apitransformdb-idschematransform-name) + +### `GET /api/transform/:db-id/:schema/:transform-name` -Look up a database schema transform +Look up a database schema transform. ##### PARAMS: @@ -3074,7 +3390,22 @@ Look up a database schema transform * **`transform-name`** -## `DELETE /api/user/:id` +## User + +/api/user endpoints. + + - [DELETE /api/user/:id](#delete-apiuserid) + - [GET /api/user/](#get-apiuser) + - [GET /api/user/:id](#get-apiuserid) + - [GET /api/user/current](#get-apiusercurrent) + - [POST /api/user/](#post-apiuser) + - [POST /api/user/:id/send_invite](#post-apiuseridsend_invite) + - [PUT /api/user/:id](#put-apiuserid) + - [PUT /api/user/:id/password](#put-apiuseridpassword) + - [PUT /api/user/:id/qbnewb](#put-apiuseridqbnewb) + - [PUT /api/user/:id/reactivate](#put-apiuseridreactivate) + +### `DELETE /api/user/:id` Disable a `User`. This does not remove the `User` from the DB, but instead disables their account. @@ -3084,11 +3415,15 @@ You must be a superuser to do this. * **`id`** +### `GET /api/user/` -## `GET /api/user/` +Fetch a list of `Users`. By default returns every active user but only active users. +<<<<<<< HEAD Fetch a list of `Users`. By default returns every active user but only active users. +======= +>>>>>>> tags/v0.41.0 If `status` is `deactivated`, include deactivated users only. If `status` is `all`, include all users (active and inactive). Also supports `include_deactivated`, which if true, is equivalent to `status=all`. @@ -3110,8 +3445,7 @@ Fetch a list of `Users`. By default returns every active user but only active us * **`include_deactivated`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/user/:id` +### `GET /api/user/:id` Fetch a `User`. You must be fetching yourself *or* be a superuser. @@ -3119,15 +3453,13 @@ Fetch a `User`. You must be fetching yourself *or* be a superuser. * **`id`** - -## `GET /api/user/current` +### `GET /api/user/current` Fetch the current `User`. +### `POST /api/user/` -## `POST /api/user/` - -Create a new `User`, return a 400 if the email address is already taken +Create a new `User`, return a 400 if the email address is already taken. You must be a superuser to do this. @@ -3145,8 +3477,7 @@ You must be a superuser to do this. * **`login_attributes`** value may be nil, or if non-nil, login attribute keys must be a keyword or string - -## `POST /api/user/:id/send_invite` +### `POST /api/user/:id/send_invite` Resend the user invite email for a given user. @@ -3156,8 +3487,7 @@ You must be a superuser to do this. * **`id`** - -## `PUT /api/user/:id` +### `PUT /api/user/:id` Update an existing, active `User`. @@ -3179,8 +3509,7 @@ Update an existing, active `User`. * **`locale`** value may be nil, or if non-nil, String must be a valid two-letter ISO language or language-country code e.g. en or en_US. - -## `PUT /api/user/:id/password` +### `PUT /api/user/:id/password` Update a user's password. @@ -3192,8 +3521,7 @@ Update a user's password. * **`old_password`** - -## `PUT /api/user/:id/qbnewb` +### `PUT /api/user/:id/qbnewb` Indicate that a user has been informed about the vast intricacies of 'the' Query Builder. @@ -3201,10 +3529,9 @@ Indicate that a user has been informed about the vast intricacies of 'the' Query * **`id`** +### `PUT /api/user/:id/reactivate` -## `PUT /api/user/:id/reactivate` - -Reactivate user at `:id` +Reactivate user at `:id`. You must be a superuser to do this. @@ -3213,42 +3540,59 @@ You must be a superuser to do this. * **`id`** -## `GET /api/util/bug_report_details` +## Util + +Random utilty endpoints for things that don't belong anywhere else in particular, e.g. endpoints for certain admin + page tasks. + + - [GET /api/util/bug_report_details](#get-apiutilbug_report_details) + - [GET /api/util/diagnostic_info/connection_pool_info](#get-apiutildiagnostic_infoconnection_pool_info) + - [GET /api/util/logs](#get-apiutillogs) + - [GET /api/util/random_token](#get-apiutilrandom_token) + - [GET /api/util/stats](#get-apiutilstats) + - [POST /api/util/password_check](#post-apiutilpassword_check) + +### `GET /api/util/bug_report_details` Returns version and system information relevant to filing a bug report against Metabase. You must be a superuser to do this. +### `GET /api/util/diagnostic_info/connection_pool_info` +<<<<<<< HEAD ## `GET /api/util/diagnostic_info/connection_pool_info` +======= +>>>>>>> tags/v0.41.0 Returns database connection pool info for the current Metabase instance. You must be a superuser to do this. +<<<<<<< HEAD ## `GET /api/util/logs` +======= +### `GET /api/util/logs` +>>>>>>> tags/v0.41.0 Logs. You must be a superuser to do this. - -## `GET /api/util/random_token` +### `GET /api/util/random_token` Return a cryptographically secure random 32-byte token, encoded as a hexadecimal string. Intended for use when creating a value for `embedding-secret-key`. - -## `GET /api/util/stats` +### `GET /api/util/stats` Anonymous usage stats. Endpoint for testing, and eventually exposing this to instance admins to let them see what is being phoned home. You must be a superuser to do this. - -## `POST /api/util/password_check` +### `POST /api/util/password_check` Endpoint that checks if the supplied password meets the currently configured password complexity rules. diff --git a/docs/developers-guide.md b/docs/developers-guide.md index 0001994c2de8..77a8c5955d1c 100644 --- a/docs/developers-guide.md +++ b/docs/developers-guide.md @@ -1,5 +1,8 @@ -**This guide will teach you:** + +<<<<<<< HEAD - [How to compile your own copy of Metabase](#build-metabase) - [How to set up a development environment](#development-environment) - [How to run the Metabase Server](#development-server-quick-start) @@ -268,3 +271,6 @@ If you see incorrect or missing strings for your language, please visit our [POE Copyright © 2021 Metabase, Inc. Distributed under the terms of the GNU Affero General Public License (AGPL) except as otherwise noted. See individual files for details. +======= +Please refer to the detailed [Developer's Guide](developers-guide/start.md). +>>>>>>> tags/v0.41.0 diff --git a/docs/developers-guide-osx.md b/docs/developers-guide/build.md similarity index 73% rename from docs/developers-guide-osx.md rename to docs/developers-guide/build.md index d53c3255bf2c..d5e6b58e458c 100644 --- a/docs/developers-guide-osx.md +++ b/docs/developers-guide/build.md @@ -1,15 +1,45 @@ -# Metabase OS X App +# Build Metabase -NOTE: These instructions are only for packaging a built Metabase uberjar into `Metabase.app`. They are not useful if your goal is to work on Metabase itself; for development, please see -our [developers' guide](developers-guide.md). +## Install Prerequisites + +These are the tools which are required in order to complete any build of the Metabase code. Follow the links to download and install them on your own before continuing. + +1. [Clojure (https://clojure.org)](https://clojure.org/guides/getting_started) - install the latest release by following the guide depending on your OS +2. [Java Development Kit JDK (https://adoptopenjdk.net/releases.html)](https://adoptopenjdk.net/releases.html) - you need to install JDK 11 ([more info on Java versions](../operations-guide/java-versions.md)) +3. [Node.js (http://nodejs.org/)](http://nodejs.org/) - latest LTS release +4. [Yarn package manager for Node.js](https://yarnpkg.com/) - latest release of version 1.x - you can install it in any OS by doing `npm install --global yarn` + +On a most recent stable Ubuntu/Debian, all the tools above, with the exception of Clojure, can be installed by using: + +``` +sudo apt install openjdk-11-jdk nodejs && sudo npm install --global yarn +``` +If you have multiple JDK versions installed in your machine, be sure to switch your JDK before building by doing `sudo update-alternatives --config java` and selecting Java 11 in the menu + +If you are developing on Windows, make sure to use Ubuntu on Windows and follow instructions for Ubuntu/Linux instead of installing ordinary Windows versions. + +Alternatively, without the need to explicitly install the above dependencies, follow the guide [on using Visual Studio Code](deven.md#developing-with-visual-studio-code.md) and its remote container support. + +## Build Metabase Uberjar + +The entire Metabase application is compiled and assembled into a single .jar file which can run on any modern JVM. There is a script which will execute all steps in the process and output the final artifact for you. You can pass the environment variable MB_EDITION before running the build script to choose the version that you want to build. If you don't provide a value, the default is `oss` which will build the Community Edition. + + ./bin/build + +After running the build script simply look in `target/uberjar` for the output .jar file and you are ready to go. + +## Building macOS App (`Metabase.app`) + +NOTE: These instructions are only for packaging a built Metabase uberjar into `Metabase.app`. They are not useful if your goal is to work on Metabase itself; for development, please see above. + +### First-Time Configuration -## First-Time Configuration
Steps -### Building +#### Building The following steps need to be done before building the Mac App: @@ -60,7 +90,7 @@ The following steps need to be done before building the Mac App: At this point, you should try opening up the Xcode project and building the Mac App in Xcode by clicking the run button. The app should build and launch at this point. If it doesn't, ask Cam for help! -### Releasing +#### Releasing The following steps are prereqs for *releasing* the Mac App: @@ -132,7 +162,7 @@ clojure.org](https://www.clojure.org/guides/getting_started) for more details.
-## Building & Releasing the Mac App +### Building & Releasing the Mac App After following the configuration steps above, to build and release the app you can use the build script: diff --git a/docs/developers-guide/contributing.md b/docs/developers-guide/contributing.md new file mode 100644 index 000000000000..3a873d4b51fa --- /dev/null +++ b/docs/developers-guide/contributing.md @@ -0,0 +1,7 @@ +# Contributing + +In general, we like to have an open issue for every pull request as a place to discuss the nature of any bug or proposed improvement. Each pull request should address a single issue, and contain both the fix as well as a description of the pull request and tests that validate that the PR fixes the issue in question. + +For significant feature additions, it is expected that discussion will have taken place in the attached issue. Any feature that requires a major decision to be reached will need to have an explicit design document written. The goals of this document are to make explicit the assumptions, constraints and tradeoffs any given feature implementation will contain. The point is not to generate documentation but to allow discussion to reference a specific proposed design and to allow others to consider the implications of a given design. + +We don't like getting sued, so before merging any pull request, we'll need each person contributing code to sign a Contributor License Agreement [here](https://docs.google.com/a/metabase.com/forms/d/1oV38o7b9ONFSwuzwmERRMi9SYrhYeOrkbmNaq9pOJ_E/viewform). \ No newline at end of file diff --git a/docs/developers-guide/devenv.md b/docs/developers-guide/devenv.md new file mode 100644 index 000000000000..ba742b48c648 --- /dev/null +++ b/docs/developers-guide/devenv.md @@ -0,0 +1,252 @@ +# Development Environment + +If you plan to work on the Metabase code and make changes then you'll need to understand a few more things. + +## Overview + +The Metabase application has two basic components: + +1. a backend written in Clojure which contains a REST API as well as all the relevant code for talking to databases and processing queries. +2. a frontend written as a Javascript single-page application which provides the web UI. + +Both components are built and assembled together into a single jar file which runs the entire application. + +## 3rd party dependencies + +Metabase depends on lots of third-party libraries to run, so you'll need to keep those up to date. The Clojure CLI will automatically fetch the dependencies when needed. With JavaScript dependencies, however, you'll need to kick off the installation process manually. + +```sh +# javascript dependencies +$ yarn +``` + +## Development server (quick start) + +Run your backend development server with + + clojure -M:run + +Start the frontend build process with + + yarn build-hot + +## Frontend development + +We use these technologies for our FE build process to allow us to use modules, es6 syntax, and css variables. + +- webpack +- babel +- cssnext + +Frontend tasks are executed using `yarn`. All available tasks can be found in `package.json` under _scripts_. + +To build the frontend client without watching for changes, you can use: + +```sh +$ yarn build +``` + +If you're working on the frontend directly, you'll most likely want to reload changes on save, and in the case of React components, do so while maintaining state. To start a build with hot reloading, use: + +```sh +$ yarn build-hot +``` + +Note that at this time if you change CSS variables, those changes will only be picked up when a build is restarted. + +There is also an option to reload changes on save without hot reloading if you prefer that. + +```sh +$ yarn build-watch +``` + +Some systems may have trouble detecting changes to frontend files. You can enable filesystem polling by uncommenting the `watchOptions` clause in `webpack.config.js`. If you do this it may be worth making git ignore changes to webpack config, using `git update-index --assume-unchanged webpack.config.js` + +## Frontend testing + +All frontend tests are located in `frontend/test` directory. Run all frontend tests with + +``` +yarn test +``` + +which will run unit and Cypress end-to-end tests in sequence. + +## Frontend debugging + +By default, we use a simple source mapping option that is optimized for speed. + +If you run into issues with breakpoints, especially inside jsx, please set env variable `BETTER_SOURCE_MAPS` to true before you run the server. + +Example: + +``` +BETTER_SOURCE_MAPS=true yarn dev +``` + +### Cypress end-to-end tests + +End-to-end tests simulate realistic sequences of user interactions. Read more about how we approach [end-to-end testing with Cypress](./e2e-tests.md). + +Cypress end-to-end tests use an enforced file naming convention `.cy.spec.js` to separate them from unit tests. + +### Jest unit tests + +Unit tests are focused around isolated parts of business logic. + +Unit tests use an enforced file naming convention `.unit.spec.js` to separate them from end-to-end tests. + +``` +yarn test-unit # Run all tests at once +yarn test-unit-watch # Watch for file changes +``` + +## Backend development + +Clojure REPL is the main development tool for the backend. There are some directions below on how to setup your REPL for easier development. + +And of course your Jetty development server is available via + + clojure -M:run + +### Building drivers + +Most of the drivers Metabase uses to connect to external data warehouse databases are separate projects under the +`modules/` subdirectory. When running Metabase via `clojure`, you'll need to build these drivers in order to have access +to them. You can build drivers as follows: + +``` +# Build the 'mongo' driver +./bin/build-driver.sh mongo +``` + +(or) + +``` +# Build all drivers +./bin/build-drivers.sh +``` + +### Including driver source paths for development or other tasks + +For development when running various Clojure tasks you can add the `drivers` and `drivers-dev` aliases to merge the +drivers' dependencies and source paths into the Metabase project: + +``` +# Install dependencies, including for drivers +clojure -P -X:dev:ci:drivers:drivers-dev +``` + +#### Unit Tests / Linting + +Run unit tests with + + # OSS tests only + clojure -X:dev:test + + # OSS + EE tests + clojure -X:dev:ee:ee-dev:test + +or a specific test (or test namespace) with + + # run tests in only one namespace (pass in a symbol) + clojure -X:dev:test :only metabase.api.session-test + + # run one specific test (pass in a qualified symbol) + clojure -X:dev:test :only metabase.api.session-test/my-test + + # run tests in one specific folder (test/metabase/util in this example) + # pass arg in double-quotes so Clojure CLI interprets it as a string; + # our test runner treats strings as directories + clojure -X:dev:test :only '"test/metabase/util"' + +By default, the tests only run against the `h2` driver. You can specify which drivers to run tests against with the env var `DRIVERS`: + + DRIVERS=h2,postgres,mysql,mongo clojure -X:dev:drivers:drivers-dev:test + +Some drivers require additional environment variables when testing since they are impossible to run locally (such as Redshift and Bigquery). The tests will fail on launch and let you know what parameters to supply if needed. + +##### Run the linters: + +`clj-kondo` must be installed separately; see https://github.com/clj-kondo/clj-kondo/blob/master/doc/install.md for +instructions. + + # Run Eastwood + clojure -X:dev:ee:ee-dev:drivers:drivers-dev:eastwood + + # Run the namespace checker + clojure -X:dev:ee:ee-dev:drivers:drivers-dev:namespace-checker + + # Run clj-kondo + clj-kondo --parallel --lint src shared/src enterprise/backend/src --config lint-config.edn + +### Developing with Emacs + +`.dir-locals.el` contains some Emacs Lisp that tells `clojure-mode` how to indent Metabase macros and which arguments are docstrings. Whenever this file is updated, +Emacs will ask you if the code is safe to load. You can answer `!` to save it as safe. + +By default, Emacs will insert this code as a customization at the bottom of your `init.el`. +You'll probably want to tell Emacs to store customizations in a different file. Add the following to your `init.el`: + +```emacs-lisp +(setq custom-file (concat user-emacs-directory ".custom.el")) ; tell Customize to save customizations to ~/.emacs.d/.custom.el +(ignore-errors ; load customizations from ~/.emacs.d/.custom.el + (load-file custom-file)) +``` + +## Developing with Visual Studio Code + +### Debugging + +First, install the following extension: +* [Debugger for Firefox](https://marketplace.visualstudio.com/items?itemName=firefox-devtools.vscode-firefox-debug) + +_Note_: Debugger for Chrome has been deprecated. You can safely delete it as Visual Studio Code now has [a bundled JavaScript Debugger](https://github.com/microsoft/vscode-js-debug) that covers the same functionality. + +Before starting the debugging session, make sure that Metabase is built and running. Choose menu _View_, _Command Palette_, search for and choose _Tasks: Run Build Task_. Alternatively, use the corresponding shortcut `Ctrl+Shift+B`. The built-in terminal will appear to show the progress, wait a few moment until webpack indicates a complete (100%) bundling. + +To begin debugging Metabase, switch to the Debug view (shortcut: `Ctrl+Shift+D`) and then select one of the two launch configurations from the drop-down at the top: + +* Debug with Firefox, or +* Debug with Chrome + +After that, begin the debugging session by choosing menu _Run_, _Start Debugging_ (shortcut: `F5`). + +For more details, please refer to the complete VS Code documentation on [Debugging](https://code.visualstudio.com/docs/editor/debugging). + +### Docker-based Workflow + +These instructions allow you to work on Metabase codebase on Windows, Linux, or macOS using [Visual Studio Code](https://code.visualstudio.com/), **without** manually installing the necessary dependencies. This is possible by leveraging Docker container and the Remote Containers extension from VS Code. + +For more details, please follow the complete VS Code guide on [Developing inside a Container](https://code.visualstudio.com/docs/remote/containers). The summary is as follows. + +Requirements: + +* [Visual Studio Code](https://code.visualstudio.com/) (obviously) +* [Docker](https://www.docker.com/) +* [Remote - Containers extension](vscode:extension/ms-vscode-remote.remote-containers) for VS Code + +_Important_: Ensure that Docker is running properly and it can be used to download an image and launch a container, e.g. by running: + +``` +$ docker run hello-world +``` +If everything goes well, you should see the following message: + +``` +Hello from Docker! +This message shows that your installation appears to be working correctly. +``` + +Steps: + +1. Clone Metabase repository + +2. Launch VS Code and open your cloned Metabase repository + +3. From the _View_ menu, choose _Command Palette..._ and then find _Remote-Container: Reopen in Container_. (VS Code may also prompt you to do this with an "Open in container" popup). +**Note**: VS Code will create the container for the first time and it may take some time. Subsequent loads should be much faster. + +4. Use the menu _View_, _Command Palette_, search for and choose _Tasks: Run Build Task_ (alternatively, use the shortcut `Ctrl+Shift+B`). + +5. After a while (after all JavaScript and Clojure dependencies are completely downloaded), open localhost:3000 with your web browser. diff --git a/docs/developers-guide/e2e-tests.md b/docs/developers-guide/e2e-tests.md new file mode 100644 index 000000000000..20965b135c59 --- /dev/null +++ b/docs/developers-guide/e2e-tests.md @@ -0,0 +1,85 @@ +# End-to-end Tests with Cypress + +Metabase uses Cypress for “end-to-end testing”, that is, tests that are executed against the application as a whole, including the frontend, backend, and application database. These tests are essentially scripts written in JavaScript that run in the web browser: visit different URLs, click various UI elements, type text, and assert that things happen as expected (for example, an element appearing on screen, or a network request occuring). + +## Getting Started + +Metabase’s Cypress tests are located in the `frontend/test/metabase/scenarios` source tree, in a structure that roughly mirrors Metabase’s URL structure. For example, tests for the admin “datamodel” pages are located in `frontend/test/metabase/scenarios/admin/datamodel`. + +During development you will want to run `yarn build-hot` to continuously build the frontend, and `yarn test-cypress-open` to open the Cypress application where you can execute the tests you are working on. + +To run all Cypress tests programmatically in the terminal: +``` +yarn run test-cypress-no-build +``` + +You can run a specific set of scenarios by using the `--folder` flag, which will pick up the chosen scenarios under `frontend/test/metabase/scenarios/`. + +``` +yarn run test-cypress-no-build --folder sharing +``` + +You can quickly test a single file only by using the `--spec` flag. + +``` +yarn test-cypress-no-build --spec frontend/test/metabase/scenarios/question/new.cy.spec.js +``` + +Cypress test files are structured like Mocha tests, where `describe` blocks are used to group related tests, and `it` blocks are the tests themselves. + +```js +describe("homepage",() => { + it('should load the homepage and...', () => { + cy.visit("/metabase/url"); + // ... + }) +}) +``` + +We strongly prefer using selectors like `cy.findByText()` and `cy.findByLabelText()` from [`@testing-library/cypress`](https://github.com/testing-library/cypress-testing-library) since they encourage writing tests that don't depend on implementation details like CSS class names. + +Try to avoid repeatedly testing pieces of the application incidentally. For example, if you want to test something about the query builder, jump straight there using a URL like `cy.visit("/question/new?database=1&table=2");` rather than starting from the home page, clicking "Ask a question", etc. + +## Cypress Documentation + +* Introduction: https://docs.cypress.io/guides/core-concepts/introduction-to-cypress.html#Querying-by-Text-Content +* Commands: https://docs.cypress.io/api/api/table-of-contents.html +* Assertions: https://docs.cypress.io/guides/references/assertions.html + +## Tips/Gotchas + +### `contains` vs `find` vs `get` +(TODO: talk about `@testing-library/cypress`). Cypress has a set of similar commands for selecting elements. Here are some tips for using them: +* `contains` is case-sensitive to the text *in the DOM*. If it’s not matching text you’d expect, check that CSS hasn’t updated the case. +* `contains` matches substrings, so if you see “filter by” and “Add a filter”, `contains(“filter”)` will match both. To avoid these issues, you can either pass a regexp that pins the start/end of the string or pass a selector in addition to the string: `.contains(selector, content)`. +* `find` will let you search within your previous selection. `get` will search the entire page even if chained. + +### Increase viewport size to avoid scrolling +Sometimes Metabase views are a bit large for Cypress’s default 1000x660 viewport. This can require you to scroll for tests to work. To avoid that, you can increase the viewport size for a specific test by calling `cy.viewport(width, height)`. + +### Code reloading vs test reloading +When you edit a Cypress test file, the tests will refresh and run again. However, when you edit a code file, Cypress won’t detect that change. If you’re running `yarn build-hot`, the code will rebuild and update within Cypress. You’ll have to manually click rerun after the new code has loaded. + +### Inspecting while the “contains helper” is open +One great feature of Cypress is that you can use the Chrome inspector after each step of a test. They also helpfully provide a helper that can test out `contains` and `get` calls. This helper creates new UI that prevents inspecting from targeting the correct elements. If you want to inspect the DOM in Chrome, you should close this helper. + +### Putting the wrong HTML template in the Uberjar +`yarn build` and `yarn build-hot` each overwrite an HTML template to reference the correct Javascript files. If you run `yarn build` before building an Uberjar for Cypress tests, you won’t see changes to your Javascript reflected even if you then start `yarn build-hot`. + + +## DB Snapshots + +At the beginning of each test suite we wipe the backend's db and settings cache. This ensures that the test suite starts in a predictable state. + +Typically, we use the default snapshot by adding `before(restore)` inside the first `describe` block to restore before running the whole test suite. If you want to use a snapshot besides the default one, specify the name as an argument to `restore` like this: `before(() => restore("blank"))`. You can also call `restore()` inside `beforeEach()` to reset before every test, or inside specific tests. + +Snapshots are created with a separate set of Cypress tests. These tests start with a blank database and execute specific actions to put the database in predictable state. For example: signup as bob@metabase.com, add a question, turn on setting ABC. + +These snapshot-generating tests have the extension `.cy.snap.js`. When these tests run they create db dumps in `frontend/tests/snapshots/*.sql`. They are run before the tests begin and don't get committed to git. + +## Running in CI +Cypress records videos of each test run, which can be helpful in debugging. Additionally, failed tests have higher quality images saved. + + +These files can be found under the “Artifacts” tab in Circle: +![Circle CI Artifacts tab](https://user-images.githubusercontent.com/691495/72190614-f5995380-33cd-11ea-875e-4203d6dcf1c1.png) \ No newline at end of file diff --git a/docs/developers-guide/frontend.md b/docs/developers-guide/frontend.md new file mode 100644 index 000000000000..85de2c4fda5a --- /dev/null +++ b/docs/developers-guide/frontend.md @@ -0,0 +1,602 @@ +# Frontend + + +## Entity Loaders + +If you're developing a new feature or just generally need to get at some of the application data on the frontend, Entity Loaders are going to be your friend. They abstract away calling the API, handling loading and error state, cache previously loaded objects, invalidating the cache (in some cases) and let you easily perform updates, or create new items. + + +### Good uses for Entity Loaders + +- I need to get a specific X (user, database, etc) and display it. +- I need to get a list of X (databases, questions, etc) and display it. + +### Currently available entities: + +- Questions, Dashboards, Pulses +- Collections +- Databases, Tables, Fields, Segments, Metrics +- Users, Groups +- Full current list of entities here: https://github.com/metabase/metabase/tree/master/frontend/src/metabase/entities + + +There are two ways to use loaders, either as React "render prop" components or as React component class decorators ("higher order components"). + + +### Object loading + +In this example we're going to load information about a specific database for a new page. + +```js +import React from "react" +import Databases from "metabase/entities/databases" + + +@Databases.load({ id: 4 }) +class MyNewPage extends React.Component { + render () { + const { database } = this.props + return ( +
+

{database.name}

+
+ ) + } +} +``` + + +This example uses a class decorator to ask for and then display a database with ID 4. If you instead wanted to use a render prop component your code would look like this. + + +```js +import React from "react" +import Databases from "metabase/entities/databases" + +class MyNewPage extends React.Component { + render () { + const { database } = this.props + return ( +
+ + { ({ database }) => +

{database.name}

+ } +
+
+ ) + } +} +``` + +Now you most likely don't just want to display just one static item so for cases where some of the values you might need will be dynamic you can use a function to get at the props and return the value you need. If you're using the component approach you can just pass props as you would normally for dynamic values. + + +```js +@Databases.load({ + id: (state, props) => props.params.databaseId +})) +``` + +## List loading + +Loading a list of items is as easy as applying the `loadList` decorator: + +```js +import React from "react" +import Users from "metabase/entities/users" + +@Users.loadList() +class MyList extends React.Component { + render () { + const { users } = this.props + return ( +
+ { users.map(u => u.first_name) } +
+ ) + } +} +``` + +Similar to the object loader's `id` argument you can also pass a `query` object (if the API supports it): + +```js +@Users.loadList({ + query: (state, props) => ({ archived: props.showArchivedOnly }) +}) +``` + +### Control over loading and error states + +By default both `EntityObject` and `EntityList` loaders will handle loading state for you by using `LoadingAndErrorWrapper` under the hood. If for some reason you want to handle loading on your own you can disable this behavior by setting `loadingAndErrorWrapper: false`. + +### Wrapped objects + +If you pass `wrapped: true` to a loader then the object or objects will be wrapped with helper classes that let you do things like `user.getName()`, `user.delete()`, or `user.update({ name: "new name" )`. Actions are automatically already bound to `dispatch`. + +This may incur a performance penalty if there are many objects. + +Any additional selectors and actions defined in the entities' `objectSelectors` or `objectActions` will appear as the wrapped object's methods. + +### Advanced usage + +You can also use the Redux actions and selectors directly, for example, `dispatch(Users.actions.loadList())` and `Users.selectors.getList(state)`. + +## Forms + +Metabase includes a comprehensive custom React and [`redux-form`](https://redux-form.com/5.2.3/) based form library. It also integrates with Metabase's [Entities](https://github.com/metabase/metabase/wiki/Frontend:-Entity-Loaders) system. + +The core React component of the system is [`metabase/containers/Form`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/containers/Form.jsx). + +### Form Definitions + +Form definitions can be provided in two different ways, with a JavaScript-based form definition object, or inline React `` elements. + +Pass a form definition to the `form` prop: + +```javascript +
alert(JSON.stringify(values))} +/> +``` + +If `` doesn't have any children elements then it will use the [`metabase/components/StandardLayout`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/components/StandardLayout) component to provide a default form layout. + +The schema for this object is defined in [`Form.jsx`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/containers/Form.jsx#L41-L60). + +`fields` and `initial` (for initial values) can be provided directly or as functions that dynamically compute them based on the current form state and additional props. + +```javascript +{ + "fields": (values) => [ + { name: "a", type: } +``` + +`initial`, `normalize`, and `validate` properties can be provided at the top-level, or per-field. They can also be provided as props to the `` and `` components For definitions can be provided + +### Custom Layout + +Form definition can also be provided via `` React elements (exported from the same `metabase/containers/Form` module), which will also serve as the layout (this uses the [`metabase/components/CustomLayout`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/components/CustomLayout)) + +```javascript +import Form, { FormField, FormFooter } from "metabase/containers/Form"; + + alert(JSON.stringify(values))}> + + + + +``` + +You can also provide both the `form` prop and children `` elements, in which case the `form` prop will be merged with the ``s' props. + +### Custom Widgets + +Built-in field `type`s are defined in [metabase/components/form/FormWidget](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/components/form/FormWidget.jsx#L17-L28). You can also provide a React component as the `type` property. + +### Validation + +You might have noticed the `validate` API above. These are simple chainable validators compatible with this form library, and are provided by [`metabase/lib/validate`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/lib/validate.js). You can add additional validators in that file. + +Server-side validation and other errors are returned in a standard format understood by `
`. + +Field-level errors: + +```json +{ "errors": { "field_name": "error message" } } +``` + +Top-level errors: + +```json +{ "message": "error message" } +``` + + +### Integration with Entities + +The Form library is integrated with Metabase's [Entities](https://github.com/metabase/metabase/wiki/Frontend:-Entity-Loaders) system (via the [`EntityForm`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/entities/containers/EntityForm.jsx) component), so that every entity includes a `Form` component that can be used like so: + +```javascript + +``` + +which uses the default `form` defined on the entity, e.x. + +```javascript +const Users = createEntity({ + name: "users", + path: "/api/user", + + form: { + fields: [ + { name: "email" } + ] + } + + // Alternatively, it will take the first form from the `forms` object: + // form: { + // default: { + // fields: [ + // { name: "email" } + // ] + // } + // } +} +``` + +You can also explicitly pass a different form object: + +```javascript + +``` + +Entity `Form`s will automatically be wired up to the correct REST endpoints for creating or updating entities. + +If you need to load an object first, they compose nicely with the Entities `Loader` render prop: + +```javascript + + {({ user }) => } + +``` + +Or higher-order component: + +```javascript +Users.load({ id: (state, props) => props.params.userId })(Users.Form) +``` + +## Style Guide + +### Set up Prettier + +We use [Prettier](https://prettier.io/) to format our JavaScript code, and it is enforced by CI. We recommend setting your editor to "format on save". You can also format code using `yarn prettier`, and verify it has been formatted correctly using `yarn lint-prettier`. + +We use ESLint to enforce additional rules. It is integrated into the Webpack build, or you can manually run `yarn lint-eslint` to check. + +### React and JSX Style Guide + +For the most part we follow the [Airbnb React/JSX Style Guide](https://github.com/airbnb/javascript/tree/master/react). ESLint and Prettier should take care of a majority of the rules in the Airbnb style guide. Exceptions will be noted in this document. + +* Prefer React [function components over class components](https://reactjs.org/docs/components-and-props.html#function-and-class-components) +* For control components, typically we use `value` and `onChange`. Controls that have options (e.x. `Radio`, `Select`) usually take an `options` array of objects with `name` and `value` properties. +* Components named like `FooModal` and `FooPopover` typically refer to the modal/popover *content* which should be used inside a `Modal`/`ModalWithTrigger` or `Popover`/`PopoverWithTrigger` +* Components named like `FooWidget` typically include a `FooPopover` inside a `PopoverWithTrigger` with some sort of trigger element, often `FooName` + +* Use arrow function instance properties if you need to bind a method in a class (instead of `this.method = this.method.bind(this);` in the constructor), but only if the function needs to be bound (e.x. if you're passing it as a prop to a React component) + +```javascript +class MyComponent extends React.Component { + constructor(props) { + super(props); + // NO: + this.handleChange = this.handleChange.bind(this); + } + // YES: + handleChange = e => { + // ... + } + // no need to bind: + componentDidMount() { + } + render() { + return + } +} +``` + +* For styling components we currently use a mix of `styled-components` and ["atomic" / "utility-first" CSS classes](https://github.com/metabase/metabase/tree/master/frontend/src/metabase/css/core). +* Prefer using `grid-styled`'s `Box` and `Flex` components over raw `div`. +* Components should typically pass along their `className` prop to the root element of the component. It can be merged with additional classes using the `cx` function from the `classnames` package. +* In order to make components more reusable, a component should only apply classes or styles to the root element of the component which affects the layout/styling of it's own content, but *not* the layout of itself within it's parent container. For example, it can include padding or the `flex` class, but it shouldn't include margin or `flex-full`, `full`, `absolute`, `spread`, etc. Those should be passed via `className` or `style` props by the consumer of the component, which knows how the component should be positioned within itself. +* Avoid breaking JSX up into separate method calls within a single component. Prefer inlining JSX so that you can better see what the relation is of the JSX a `render` method returns to what is in the `state` or `props` of a component. By inlining JSX you'll also get a better sense of what parts should and should not be separate components. +```javascript + +// don't do this +render () { + return ( +
+ {this.renderThing1()} + {this.renderThing2()} + {this.state.thing3Needed && this.renderThing3()} +
+ ); +} + +// do this +render () { + return ( +
+ + + {this.state.thing3Needed && } +
+ ); +} +``` + +### JavaScript Conventions + +* `import`s should be ordered by type, typically: + 1. external libraries (`react` is often first, along with things like `ttags`, `underscore`, `classnames`, etc) + 2. Metabase's top-level React components and containers (`metabase/components/*`, `metabase/containers/*`, etc) + 3. Metabase's React components and containers specific to this part of the application (`metabase/*/components/*` etc) + 4. Metabase's `lib`s, `entities`, `services`, Redux files, etc +* Prefer `const` to `let` (and never use `var`). Only use `let` if you have a specific reason to reassign the identifier (note: this now enforced by ESLint) +* Prefer [arrow functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions) for inline functions, especially if you need to reference `this` from the parent scope (there should almost never be a need to do `const self = this;` etc), but usually even if you don't (e.x. `array.map(x => x * 2)`). +* Prefer `function` declarations for top-level functions, including React function components. The exception is for one-liner functions that return a value +```javascript +// YES: +function MyComponent(props) { + return
...
+} +// NO: +const MyComponent = (props) => { + return
...
+} +// YES: +const double = n => n * 2; +// ALSO OK: +function double(n) { + return n * 2; +} +``` + +* Prefer native `Array` methods over `underscore`'s. We polyfill all ES6 features. Use Underscore for things that aren't implemented natively. +* Prefer [`async`/`await`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) over using `promise.then(...)` etc directly. +* You may use assignment destructuring or argument destructuring, but avoid deeply nested destructuring, since they can be hard to read and `prettier` sometimes formats them with extra whitespace. + * avoid destructuring properties from "entity"-like objects, e.x. don't do `const { display_name } = column;` + * don't destructure `this` directly, e.x. `const { foo } = this.props; const { bar } = this.state;` instead of `const { props: { foo }, state: { bar } } = this;` +* Avoid nested ternaries as they often result in code that is difficult to read. If you have logical branches in your code that are dependent on the value of a string, prefer using an object as a map to multiple values (when evaluation is trivial) or a `switch` statement (when evaluation is more complex, like when branching on which React component to return): + +```javascript +// don't do this +const foo = str == 'a' ? 123 : str === 'b' ? 456 : str === 'c' : 789 : 0; + +// do this +const foo = { + a: 123, + b: 456, + c: 789, +}[str] || 0; + +// or do this +switch (str) { + case 'a': + return ; + case 'b': + return ; + case 'c': + return ; + case 'd': + default: + return ; +} +``` +If your nested ternaries are in the form of predicates evaluating to booleans, prefer an `if/if-else/else` statement that is siloed to a separate, pure function: + +```javascript +const foo = getFoo(a, b); + +function getFoo(a, b, c) { + if (a.includes('foo')) { + return 123; + } else if (a === b) { + return 456; + } else { + return 0; + } +} +``` +* Be conservative with what comments you add to the codebase. Comments shouldn't be used as reminders or as TODOs--record those by creating a new issue in Github. Ideally, code should be written in such a way that it explains itself clearly. When it does not, you should first try rewriting the code. If for whatever reason you are unable to write something clearly, add a comment to explain the "why". +```javascript + +// don't do this--the comment is redundant + +// get the native permissions for this db +const nativePermissions = getNativePermissions(perms, groupId, { + databaseId: database.id, +}); + +// don't add TODOs -- they quickly become forgotten cruft + +isSearchable(): boolean { + // TODO: this should return the thing instead + return this.isString(); +} + +// this is acceptable -- the implementer explains a not-obvious edge case of a third party library + +// foo-lib seems to return undefined/NaN occasionally, which breaks things +if (isNaN(x) || isNaN(y)) { + return; +} + +``` +* Avoid complex logical expressions inside of if statements +```javascript +// don't do this +if (typeof children === "string" && children.split(/\n/g).length > 1) { + // ... +} + +// do this +const isMultilineText = typeof children === "string" && children.split(/\n/g).length > 1 +if (isMultilineText) { + // ... +} +``` + +* Use ALL_CAPS for constants + +```javascript +// do this +const MIN_HEIGHT = 200; + +// also acceptable +const OBJECT_CONFIG_CONSTANT = { + camelCaseProps: "are OK", + abc: 123 +} +``` +* Prefer named exports over default exports +```javascript +// this makes it harder to search for Widget +import Foo from "./Widget"; +// do this to enforce using the proper name +import {Widget} from "./Widget"; +``` +* Avoid magic strings and numbers +```javascript +// don't do this +const options = _.times(10, () => ...); + +// do this in a constants file +export const MAX_NUM_OPTIONS = 10; +const options = _.times(MAX_NUM_OPTIONS, () => ...); +``` + +### Write Declarative Code + +You should write code with other engineers in mind as other engineers will spend more time reading than you spend writing (and re-writing). Code is more readable when it tells the computer "what to do" versus "how to do." Avoid imperative patterns like for loops: + +```javascript +// don't do this +let foo = [] +for (let i = 0; i < list.length; i++) { + if (list[i].bar === false) { + continue; + } + + foo.push(list[i]); +} + +// do this +const foo = list.filter(entry => entry.bar !== false); +``` + +When dealing with business logic you don't want to be concerned with the specifics of the language. Instead of writing `const query = new Question(card).query();` which entails instantiating a new `Question` instance and calling a `query` method on said instance, you should introduce a function like `getQueryFromCard(card)` so that implementers can avoid thinking about what goes into getting a `query` value from a card. + +## Component Styling Tree Rings + +### classic / global CSS with BEM style selectors (deprecated) + +```css +.Button.Button--primary { + color: -var(--color-brand); +} +``` + +### atomic / utility CSS (still used) + +```css +.text-brand { + color: -var(--color-brand); +} +``` + +```javascript +const Foo = () => +
+``` + + +### inline style (discouraged) + +```javascript +const Foo = ({ color ) => +
+``` + +### CSS modules (deprecated) + +```css +:local(.primary) { + color: -var(--color-brand); +} +``` + +```javascript +import style from "./Foo.css"; + +const Foo = () => +
+``` + +### [Styled Components](https://styled-components.com/) + +```javascript +import styled from "styled-components"; + +const FooWrapper = styled.div` + color: ${props => props.color} +`; + +const Bar = ({ color }) => + +``` + +### Styled Components + [styled-system](https://styled-system.com/) + +e.x. + +```javascript +import styled from "styled-components"; +import { color } from "styled-system"; + +const Foo = styled.div` + ${color} +`; + +const Bar = ({ color }) => + +``` + +## Popover + +Popovers are popups or modals. + +In Metabase core, they are visually responsive: they appear above or below the element that triggers their appearance. Their height is automatically calculated to make them fit on the screen. + +### Where to Find Popovers in the User Journey + +#### When creating custom questions + +1. From home, click on `Ask a question` +2. Click on `Custom question` +3. 👀 The option picker next to `Pick your starting data` is a ``. +3. Choose `Sample Dataset` +4. Choose any of the tables, for example `People` + +Here, clicking on the following will open `` components: + +* `Columns` (right-hand side of section labeled `Data`) +* Gray icon of a grid with + below section labeled `Data` +* `Add filters to narrow your answers` +* `Pick the metric you want to see` +* `Pick a column to group by` +* `Sort` icon with arrows pointing up and down above `Visualize` button diff --git a/docs/developers-guide/images/visual-tests/percy-healthcheck-step.png b/docs/developers-guide/images/visual-tests/percy-healthcheck-step.png new file mode 100644 index 000000000000..2a5002c74a48 Binary files /dev/null and b/docs/developers-guide/images/visual-tests/percy-healthcheck-step.png differ diff --git a/docs/developers-guide/internationalization.md b/docs/developers-guide/internationalization.md new file mode 100644 index 000000000000..e986a9c8a299 --- /dev/null +++ b/docs/developers-guide/internationalization.md @@ -0,0 +1,24 @@ +# Internationalization + +We are an application with lots of users all over the world. To help them use Metabase in their own language, we mark all of our strings as i18n. + +## Adding new strings: + +If you need to add new strings (try to be judicious about adding copy) do the following: + +1. Tag strings in the frontend using `t` and `jt` ES6 template literals (see more details in https://ttag.js.org/): + +```javascript +const someString = t`Hello ${name}!`; +const someJSX =
{jt`Hello ${name}`}
; +``` + +and in the backend using `trs` (to use the site language) or `tru` (to use the current User's language): + +```clojure +(trs "Hello {0}!" name) +``` + +## Translation errors or missing strings + +If you see incorrect or missing strings for your language, please visit our [POEditor project](https://poeditor.com/join/project/ynjQmwSsGh) and submit your fixes there. \ No newline at end of file diff --git a/docs/developers-guide/start.md b/docs/developers-guide/start.md new file mode 100644 index 000000000000..f68ae00d66d1 --- /dev/null +++ b/docs/developers-guide/start.md @@ -0,0 +1,13 @@ +# Developer's Guide + +This guide contains detailed information on how to work on Metabase codebase. + +**Covered in this guide:** + +- [How to compile your own copy of Metabase](build.md) +- [How to set up a development environment](devenv.md) +- [How to write and run end-to-end tests](e2e-tests.md) +- [How to work with the frontend code](frontend.md) +- [How to contribute back to the Metabase project](contributing.md) +- [How to add support in Metabase for other languages](internationalization.md) +- [How to use visual tests](visual-tests.md) diff --git a/docs/developers-guide/visual-tests.md b/docs/developers-guide/visual-tests.md new file mode 100644 index 000000000000..e9c764b7007a --- /dev/null +++ b/docs/developers-guide/visual-tests.md @@ -0,0 +1,55 @@ +# Visual Tests + +We use [Percy](https://percy.io/) via Github actions to run visual regression tests. Percy provides pull-request-based workflow, handles diff review and approval flow conveniently. In addition to that, It integrates with Cypress, which allows us to use all power of our custom helpers and commands. We run + +## How to run visual tests on CI + +Percy tests are supposed to be run on CI since every run is attached to a pull request. Only when a cypress tests run command is prefixed by `percy exec -- ` and there is a valid `PERCY_TOKEN` environment variable specified, Percy CLI will submit pages snapshots to Percy servers, and we will be charged for every screenshot. To make use of Percy more cost-efficient, we manually trigger visual tests by posting a PR comment with a `@metabase-bot run visual tests` command. +In addition to that, we need to ensure that underlying Cypress tests are valid, so we run them without submitting screenshots to Percy on every commit. + +**1. Write `@metabase-bot run visual tests` comment in a PR** + +![https://user-images.githubusercontent.com/14301985/126784124-b6753632-2735-496c-b80b-29521e0b9d15.png](https://user-images.githubusercontent.com/14301985/126784124-b6753632-2735-496c-b80b-29521e0b9d15.png) + +It triggers a workflow that is visible in the repo Actions tab + +![https://user-images.githubusercontent.com/14301985/126784265-8137570f-0f68-4064-ab77-c4455a6ad706.png](https://user-images.githubusercontent.com/14301985/126784265-8137570f-0f68-4064-ab77-c4455a6ad706.png) + +**2. If there are some visual changes, it shows a failed Percy check in the PR** + +![https://user-images.githubusercontent.com/14301985/126795943-50ebbe5e-ed36-48fe-ab69-642555a1bc1d.png](https://user-images.githubusercontent.com/14301985/126795943-50ebbe5e-ed36-48fe-ab69-642555a1bc1d.png) + +**3. Once you review and approve the changes, the PR check becomes green** + +![https://user-images.githubusercontent.com/14301985/126796075-31d5ed5d-6926-4e98-99d2-4aef20738b56.png](https://user-images.githubusercontent.com/14301985/126796075-31d5ed5d-6926-4e98-99d2-4aef20738b56.png) + +![https://user-images.githubusercontent.com/14301985/126796104-c533bbea-006c-47ef-83fa-0c07fcf5393b.png](https://user-images.githubusercontent.com/14301985/126796104-c533bbea-006c-47ef-83fa-0c07fcf5393b.png) + + +## How to create a visual test + +We use Cypress to write Percy tests so we can fully use all existing helpers and custom commands. + +Visual regression tests live inside the `frontend/test/metabase-visual` directory. Writing a Percy test consists of creating a desired page state and executing `cy.percySnapshot()` command. + +### Goal + +Each visual test should cover as many as possible different elements, variants on the same screenshot. For instance, when we are writing E2E test that checks a chart on a dashboard we add just one card and run assertions. In opposite to that, a visual test can contain every type of chart on the same dashboard because it significantly reduces the number of screenshots we produce which reduces the cost of using Percy. + +### Workflow + +1) Run Metabase in the dev mode locally (`yarn dev` or similar commands). +2) Run `yarn test-visual-open` to open Cypress locally. You do not need to export any `PERCY_TOKEN`. +3) Create a spec inside `frontend/test/metabase-visual` and run it via Cypress runner. + +At this step, if you added `percySnapshot` command somewhere in your test, you will see `percyHealthCheck` step in your test: + +![Learn about your data in the SQL editor](./images/visual-tests/percy-healthcheck-step.png) + +Consider the page state at `percyHealthCheck` step as the one that will be captured. + +### Notes + +- You don't need to export `PERCY_TOKEN` for running tests. If a token is exported Percy will send snapshots from your local machine to their servers so that you will be able to see your local run in their interface. +- When the application code uses `Date.now()`, you can [freeze](https://docs.percy.io/docs/freezing-dynamic-data#freezing-datetime-in-cypress) date/time in Cypress. +- [Stub](https://github.com/metabase/metabase/pull/17380/files#diff-4e8ebaf75969143a5eee6bfb8adcd4b72d4330d18d77319e3434d11cf6c75e40R15) `Math.random` when to deal with randomization. diff --git a/docs/enterprise-guide/activating-the-enterprise-edition.md b/docs/enterprise-guide/activating-the-enterprise-edition.md index 525ebb421063..62727a428df2 100644 --- a/docs/enterprise-guide/activating-the-enterprise-edition.md +++ b/docs/enterprise-guide/activating-the-enterprise-edition.md @@ -1,24 +1,37 @@ -## Getting and activating the Enterprise edition +## Activating your Metabase commercial license -The Enterprise edition of Metabase is distinct from the open-source edition, so to use it you'll need to first get a license, get the Enterprise edition, and then activate enterprise features with your license. +The paid Pro and Enterprise editions of Metabase are distinct from the free Open Source edition, so to use your paid features you’ll need to first get a license. And if you want to self-host, you'll need a different JAR or Docker image that you can use to activate the advanced features with your license token. -You can get a license by signing up for a free trial of the Enterprise edition. [Find out more here](https://metabase.com/enterprise/). Once you sign up for a free trial, you will receive an email containing a license token +### Where to get a license -To get the Enterprise edition, you can either [download the latest .jar file](https://downloads.metabase.com/enterprise/latest/metabase.jar), or get the [latest Docker image](https://hub.docker.com/r/metabase/metabase-enterprise/) at `metabase/metabase-enterprise:latest` +You can get a license by signing up for a free trial of the [Pro or Enterprise edition plans](https://www.metabase.com/pricing), both of which can be self-hosted or hosted on Metabase Cloud. -If you don't want to host the Enterprise Edition yourself, you can have it hosted for you with [Metabase Cloud](https://www.metabase.com/start/hosted/index.html). +If you sign up for a Metabase Cloud option, you're already good to go. -Once you have the Enterprise edition running, to activate all of its features go to the Admin Panel within Metabase, click on the Enterprise tab, click the "Activate a license" button, and then paste in your license token. The page should change to show you that Enterprise features are now active. +### How to activate your token when self-hosting -### Validating Your Enterprise Token +If you chose to host Metabase yourself, you'll get an email containing a unique license token. But to use it, you'll need to install the right JAR file. -Your Metabase instance will need to be able to access the internet (specifically `https://store.metabase.com/api/[token-id]/v2/status`) in order to validate your token and grant access to the Enterprise feature set. +You can either: -If you need to route outbound Metabase traffic through a proxy on your network, use the following command: +- [Download the latest metabase-enterprise JAR](https://downloads.metabase.com/enterprise/latest/metabase.jar) (the filename is the same, irrespective of your plan), or +- [Get the latest Docker image](https://hub.docker.com/r/metabase/metabase-enterprise/) at `metabase/metabase-enterprise:latest`. -`java -Dhttps.proxyHost=[your proxy's hostname] -Dhttps.proxyPort=[your proxy's port] -jar enterprise_metabase.jar` +Run Metabase as you would normally, then go to the __Admin__ > __Enterprise__ tab, click the __Activate a license__ button, and paste in your license token. The page will show you the features that are now active. -Depending on your organization's set-up, additional configuration steps may need to be taken. If the command above does not work for you, we would recommend reaching out to your internal infrastructure or dev ops teams for assistance. +### **Validating your token** + +Your Metabase needs to be able to access the internet (specifically `https://store.metabase.com/api/[token-id]/v2/status`) in order to validate the token and maintain access to the advanced features. + +If your Metabase can't validate the token, it'll disable the advanced features, but will continue to work normally otherwise, as if it were the Open Source edition. + +In case you need to route outbound Metabase traffic through a proxy on your network, use the following command when starting Metabase: + +``` +java -Dhttps.proxyHost=[your proxy's hostname] -Dhttps.proxyPort=[your proxy's port] -jar enterprise_metabase.jar +``` + +Depending on your organization’s setup, you may need to take additional configuration steps. If the command above doesn't work for you, we recommend reaching out to your internal infrastructure or dev ops teams for assistance. --- diff --git a/docs/enterprise-guide/audit.md b/docs/enterprise-guide/audit.md index 99a30f17e530..8504fdc49059 100644 --- a/docs/enterprise-guide/audit.md +++ b/docs/enterprise-guide/audit.md @@ -1,40 +1,41 @@ -## Audit Logs +# Audit Logs -As an administrator of Metabase, you already know the importance of using data to understand your users. With the Audit tool, you can use Metabase to understand your Metabase users' usage. It's, well, meta! +As an administrator of Metabase, you already know the importance of using data to understand your how people interact with your products or services. With the Audit tool, you can use Metabase to understand how people in your organization use Metabase. It's, well, meta! -Access the tool by clicking the gear icon in the upper right and then clicking Audit in the top navigation. There's a lot of data available, not only about your people, but also about your questions, dashboards, databases and more! We'll walk you through each of the sections below. +Access the tool by clicking the gear icon in the upper right and then clicking __Audit__ in the top navigation. There's a lot of data available, not only about your people, but also about your questions, dashboards, databases and more! We'll walk you through each of the sections below. -### People +## People -Use the People section to gain a better understanding of how your end-users are interacting with Metabase. You'll want to pay attention to the Overview tab, especially when you first launch Metabase at your organization — it will give you data around how many active and newly created users you have each day. Further down the page, you'll see charts showing you which of your users are most engaged. Use these charts to find your power users! +### Team members -![Team Members](./images/audit-team.png) +Use the __People__ section to gain a better understanding of how people are interacting with Metabase. You'll want to pay attention to the __Overview__ tab, especially when you first launch Metabase at your organization — it will give you data around how many active and newly created accounts you have each day. Further down the page, you'll see charts showing you which people are most engaged. -The Team Members panel has a view of other options for viewing your team's usage data. The All Members tab will give you a list of your team members, and stats about their activity, such as when they were last active. +![Team Members](./images/audit-team.png) -The Audit Log tab will display, in chronological order, each query, who viewed it, and when. Note that if the user did not save their query, its name will be listed as `Ad-hoc`. Each query name can be clicked to view more details about your users' interactions with it, including a full revision history. You will also be able to view the query in Metabase. Note that this link will always show the latest version of the query - use the revision history to see changes over time. +The __Team Members__ panel has a view of other options for viewing your team's usage data. The __All Members__ tab will give you a list of your team members, and stats about their activity, such as when they were last active. -#### Drilling In +The __Audit Log__ tab will display, in chronological order, each query, who viewed it, and when. Note that if the person didn't save their query, its name will be listed as `Ad-hoc`. Each query name can be clicked to view more details about the person's interactions with it, including a full revision history. You'll also be able to view the query in Metabase. Note that this link will always show the latest version of the query - use the revision history to see changes over time. -Throughout the People section, names can be clicked to access the profile of a specific user’s activity. This profile includes: +Throughout the People section, names can be clicked to access the profile of a specific person’s activity. This profile includes: - Dashboard views - Query views - Downloads +- Subscriptions & Alerts ![Team Members](./images/audit-teammember.png) -### Data +## Data The Data section focuses on your databases, schemas and tables, and is divided into corresponding sections. Look here if you're trying to uncover queries and schemas that need optimization. Each section provides visualizations around the use and speed of querying against your databases, schemas or tables. You will also be able to view lists of stats about all of your databases, schemas and tables. ![Data](./images/audit-data.png) -### Items +## Items -The Items section focuses on questions, dashboards and downloads, and is divided into corresponding sections. Use these pages to gain a better understanding of what your users are interacting with. +The __Items__ section focuses on questions, dashboards, downloads, and Subscriptions and Alerts. -#### Questions +### Questions The Questions section will show you your most popular queries, as well as your slowest queries. If queries you think are important aren't appearing on your most popular queries list, you may want to make sure your team is focusing on the right things. @@ -53,22 +54,47 @@ A list of all of your questions is available as well, and you can see various da - Collection - Public Link -#### Drilling In - You can also click on any question to drill into a more detailed profile showing: - View activity - Revision History - A full audit log of who viewed the question, and when -#### Dashboards +### Dashboards -The Dashboards section is another great way to understand what your users who are dashboard-focused are looking at, and to make sure they're having a smooth experience. If you notice that a popular dashboard has a high average question loading time, you can investigate further using the Questions section outlined above. +The __Dashboards__ section helps you understand what dashboards people are looking at, and to make sure they're having a smooth experience. If you notice that a popular dashboard has a high average question loading time, you can investigate further using the Questions section outlined above. ![Items](./images/audit-dashboards.png) -A list of all of your dashboards is available as well, and you can see various data points about each dashboard at a glance, such as number of views and average question execution time. +A list of all of your dashboards is available as well, and you can see various data points about each dashboard at a glance, such as: + +- Number of views +- Average question execution time(ms) +- Number of cards +- Saved by +- Cache duration +- Public link +- Saved on +- Last edited on + +### Downloads + +Use the __Downloads__ section to understand which people are downloading (or exporting) data, and the size (number of rows) of the downloads they're performing. This section contains some visualizations, as well as a list of all downloads. + +### Subscriptions & Alerts + +Here Admins can get an overview of all of the [Dashboard subscriptions][dashboard-subscriptions] and [Alerts][alerts] that are currently active for that Metabase. + +- Dashboard name (or Question name for Alerts) +- Recipients +- Type (e.g., email or Slack) +- Collection +- Frequency +- Created By +- Created At +- Filters -#### Downloads +Admins can add and remove people from a subscription or alert by clicking on the item's __Recipients__. Admins can also delete the subscription or alert entirely by clicking on the **X** on the relevant line. -Use the Downloads section to understand which of your users are downloading (or exporting) data and the size (number of rows) of the downloads they're performing. This section contains some visualizations, as well as a list of all downloads. +[alerts]: ../users-guide/15-alerts.md +[dashboard-subscriptions]: ../users-guide/dashboard-subscriptions.md \ No newline at end of file diff --git a/docs/enterprise-guide/data-sandboxes.md b/docs/enterprise-guide/data-sandboxes.md index 3dde61f44a1c..80baec4f55df 100644 --- a/docs/enterprise-guide/data-sandboxes.md +++ b/docs/enterprise-guide/data-sandboxes.md @@ -1,6 +1,6 @@ ## Sandboxing your data -Data sandboxes are a powerful and flexible permissions tool in Metabase Enterprise Edition that allow you to grant filtered access to specific tables. +Data sandboxes are a powerful and flexible permissions tool in Metabase Enterprise Edition that allow you to grant filtered access to specific tables. If you haven't already, check out our [overview of how permissions work in Metabase][permissions-overview]. Say you have users who you want to be able to log into your Metabase instance, but who should only be able to view data that pertains to them. For example, you might have some customers or partners who you want to let view your Orders table, but you only want them to see their orders. Sandboxes let you do just that. @@ -47,9 +47,7 @@ Next we’ll see a worksheet that will ask us how we want to filter this table f ![Sandbox settings](images/sandboxing/select-user-attribute.png) -We’ll click Done, then we’ll click Save Changes at the top of the screen to save the changes we’ve made to our permissions. If we ever want to edit how this table should be filtered for users in this group, we can just click on the blue box and select “Edit sandboxed access.” - -![Edit access](images/sandboxing/edit-sandboxed-access.png) +We’ll click Done, then we’ll click Save Changes at the top of the screen to save the changes we’ve made to our permissions. If we ever want to edit how this table should be filtered for users in this group, we can just click on the __Data access__ dropdown for that group and select __Edit sandboxed access__. To test this out, we’ll open up a new incognito browser window and log in with our test user account. We’ll click on the Sample Dataset on the home page and then pick the Orders table. As you can see here, this user correctly only sees orders where the User ID column is equal to 1, because that’s what this user’s user_id attribute is. @@ -99,6 +97,21 @@ The filtering question that I'll create will exclude columns that I don't want t ![Filtering question](images/sandboxing/advanced-example-2-filtering-question.png) +And here's the code: + +``` +SELECT + id, + created_at, + product_id, + quantity, + total, + user_id +FROM + orders +[[WHERE user_id = {%raw%}{{cid}}{%endraw%}]] +``` + Going back over to the Permissions section, when I open up the sandboxed access modal and select the second option and select my filtering question, I'll see an additional section which allows me to map the variable I defined in my question with a user attribute: ![Sandboxing options](images/sandboxing/advanced-example-2-sandboxing-options.png) @@ -147,4 +160,8 @@ Public questions and dashboards can't be sandboxed. Sandboxing works by filterin The next section will explain [how to embed](full-app-embedding.md) interactive dashboards and charts, or even whole sections of Metabase within your app. [permissions]: /learn/permissions/index.html +<<<<<<< HEAD +======= +[permissions-overview]: ../administration-guide/05-setting-permissions.md +>>>>>>> tags/v0.41.0 [troubleshoot-sandbox]: ../troubleshooting-guide/sandboxing.html \ No newline at end of file diff --git a/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png b/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png index 4250e4977be3..cc5defeeb02c 100644 Binary files a/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png and b/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png differ diff --git a/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png b/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png index fc23b1cfd334..01d733d65aa4 100644 Binary files a/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png and b/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png differ diff --git a/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png b/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png index 410b87621231..1569125c4570 100644 Binary files a/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png and b/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png differ diff --git a/docs/enterprise-guide/images/sandboxing/edit-sandboxed-access.png b/docs/enterprise-guide/images/sandboxing/edit-sandboxed-access.png deleted file mode 100644 index dc8cd2ca3cdb..000000000000 Binary files a/docs/enterprise-guide/images/sandboxing/edit-sandboxed-access.png and /dev/null differ diff --git a/docs/enterprise-guide/images/sandboxing/edit-user-details.png b/docs/enterprise-guide/images/sandboxing/edit-user-details.png index b839c4b32a49..8dcb7e5fd040 100644 Binary files a/docs/enterprise-guide/images/sandboxing/edit-user-details.png and b/docs/enterprise-guide/images/sandboxing/edit-user-details.png differ diff --git a/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png b/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png index d0e31e577bea..536d4d6cc0d2 100644 Binary files a/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png and b/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png differ diff --git a/docs/enterprise-guide/images/sandboxing/select-user-attribute.png b/docs/enterprise-guide/images/sandboxing/select-user-attribute.png index 163e3fa04da3..6612dea457b3 100644 Binary files a/docs/enterprise-guide/images/sandboxing/select-user-attribute.png and b/docs/enterprise-guide/images/sandboxing/select-user-attribute.png differ diff --git a/docs/operations-guide/environment-variables.md b/docs/operations-guide/environment-variables.md index be242ceba1c3..2fd6317191db 100644 --- a/docs/operations-guide/environment-variables.md +++ b/docs/operations-guide/environment-variables.md @@ -681,7 +681,7 @@ Server port, usually 389 or 636 if SSL is used. #### `MB_LDAP_SECURITY` -Type: string
+Type: string (`"none"`, `"ssl"`, `"starttls"`)
Default: `"none"` Use SSL, TLS or plain text. diff --git a/docs/operations-guide/migrating-from-h2.md b/docs/operations-guide/migrating-from-h2.md index f41fe43bf366..0c33c76dde30 100644 --- a/docs/operations-guide/migrating-from-h2.md +++ b/docs/operations-guide/migrating-from-h2.md @@ -17,7 +17,11 @@ docker cp metabase:/metabase.db/metabase.db.mv.db ./ The above command would copy the database file to the directory you ran the command from. With your database file outside of the container, all you need to do is follow the "How to migrate" steps below. +<<<<<<< HEAD ## How to migrate +======= +## How to migrate +>>>>>>> tags/v0.41.0 Metabase provides a custom migration command for upgrading H2 application database files by copying their data to a new database. Here's what you'll do: @@ -51,4 +55,8 @@ Metabase expects that you'll run the command against a brand-new (empty) databas ### Troubleshooting +<<<<<<< HEAD If you get an error, check out [Error when loading application database from H2](../troubleshooting-guide/loading-from-h2.md). +======= +If you get an error, check out [this troubleshooting guide](../troubleshooting-guide/loading-from-h2.md). +>>>>>>> tags/v0.41.0 diff --git a/docs/operations-guide/running-metabase-on-heroku.md b/docs/operations-guide/running-metabase-on-heroku.md index a6967bd2a303..00645577350c 100644 --- a/docs/operations-guide/running-metabase-on-heroku.md +++ b/docs/operations-guide/running-metabase-on-heroku.md @@ -37,7 +37,7 @@ Now that you’ve installed Metabase, it’s time to [set it up and connect it t ### Troubleshooting * If your Metabase instance is getting stuck part way through the initialization process and only every shows roughly 30% completion on the loading progress. - * The most likely culprit here is a stale database migrations lock that was not cleared. This can happen if for some reason Heroku kills your Metabase dyno at the wrong time during startup. __To fix it:__ you can either clear the lock using the built-in [release-locks](../troubleshooting-guide/application-database.md) command line function, or if needed you can login to your Metabase application database directly and delete the row in the `DATABASECHANGELOGLOCK` table. Then just restart Metabase. + * The most likely culprit here is a stale database migrations lock that was not cleared. This can happen if for some reason Heroku kills your Metabase dyno at the wrong time during startup. __To fix it:__ you can either clear the lock using the built-in [release-locks](../troubleshooting-guide/loading-from-h2.md) command line function, or if needed you can login to your Metabase application database directly and delete the row in the `DATABASECHANGELOGLOCK` table. Then just restart Metabase. ## Deploying New Versions of Metabase @@ -51,7 +51,7 @@ Here's each step: * Clone the metabase-deploy repo to your local machine: ```bash -git clone https://github.com/metabase/metabase-deploy.git +git clone https://github.com/metabase/metabase-deploy.git cd metabase-deploy ``` @@ -97,7 +97,7 @@ git push master ### Database Syncs -You may want to ensure that your staging database is synced with production before you deploy a new version. Luckily with Heroku you can restore a backup from one app to another. +You may want to ensure that your staging database is synced with production before you deploy a new version. Luckily with Heroku you can restore a backup from one app to another. For example, assuming your production app is named `awesome-metabase-prod`, this command will create a backup: @@ -117,12 +117,12 @@ Once this is done, restart your staging app and begin testing. ### Pinning Metabase versions -For whatever reason, should you want to pin Metabase to a specific version, you can append the version number to the buildpack URL (as long as that tag exists in the [`metabase-buildpack`](https://github.com/metabase/metabase-buildpack) repository). +For whatever reason, should you want to pin Metabase to a specific version, you can append the version number to the buildpack URL (as long as that tag exists in the [`metabase-buildpack`](https://github.com/metabase/metabase-buildpack) repository). If you haven't cloned the `metabase-deploy` repository, this can be done with the Heroku CLI: ```bash -heroku buildpacks:set --index 2 https://github.com/metabase/metabase-buildpack#0.34.1 \ +heroku buildpacks:set --index 2 https://github.com/metabase/metabase-buildpack#0.34.1 \ --app ``` diff --git a/docs/troubleshooting-guide/cant-log-in.md b/docs/troubleshooting-guide/cant-log-in.md index 97f011c62e42..389e2629656b 100644 --- a/docs/troubleshooting-guide/cant-log-in.md +++ b/docs/troubleshooting-guide/cant-log-in.md @@ -36,7 +36,16 @@ If you are an administrator and want to check SSO settings, go to **Admin Panel* ## If you are using Metabase Cloud, are you trying to use the correct password? +<<<<<<< HEAD **Root cause:** +======= +**Root cause:** You are trying to log in to Metabase Cloud using the password you set for the Metabase store. + +**Steps to take:** + +1. Check which password you are using. +2. If you cannot remember the Metababse Cloud admin password, please see the next entry. +>>>>>>> tags/v0.41.0 ## Do you need to reset the admin password? diff --git a/docs/troubleshooting-guide/index.md b/docs/troubleshooting-guide/index.md index e5781aecdda6..be92694f33e6 100644 --- a/docs/troubleshooting-guide/index.md +++ b/docs/troubleshooting-guide/index.md @@ -30,9 +30,13 @@ Problems, their causes, how to detect them, and how to fix them. - [Metabase isn't sending email][not-sending-email]. +<<<<<<< HEAD - [Using the default H2 application database][appdb]. - [Loading an exported H2 application database][loadh2]. +======= +- [Using or migrating from an H2 application database][appdb]. +>>>>>>> tags/v0.41.0 - [The dates and times in my questions and charts are wrong][incorrect-times]. @@ -59,7 +63,11 @@ Metabase will log errors, both on the server and in the browser console, dependi - [Safari][safari] - [Edge][edge] +<<<<<<< HEAD ## Metabase tutorials +======= +## Metabase tutorials +>>>>>>> tags/v0.41.0 For tutorials that walk you through how to use Metabase features, check out [Learn Metabase][learn]. @@ -75,11 +83,19 @@ For quick answers to common questions, check out our [Frequently Asked Questions Metabase adds new features and squashes bugs with each release. [Upgrading to the latest and greatest][upgrade] may resolve your issue. If you're using [Metabase Cloud][cloud], we'll handle the upgrades for you. You can checkout the [release notes][releases] to see what's new. +<<<<<<< HEAD [appdb]: ./application-database.html [bugs]: ./bugs.html [cant-see-tables]: ./cant-see-tables.html [chrome]: https://developers.google.com/web/tools/chrome-devtools/open#console [cloud]: https://www.metabase.com/start/ +======= +[appdb]: ./loading-from-h2.html +[bugs]: ./bugs.html +[cant-see-tables]: ./cant-see-tables.html +[chrome]: https://developers.google.com/web/tools/chrome-devtools/open#console +[cloud]: https://www.metabase.com/start/ +>>>>>>> tags/v0.41.0 [datawarehouse]: ./datawarehouse.html [docker]: ./docker.html [edge]: https://docs.microsoft.com/en-us/microsoft-edge/devtools-guide-chromium @@ -87,12 +103,19 @@ Metabase adds new features and squashes bugs with each release. [Upgrading to th [filters]: ./filters.html [firefox]: https://developer.mozilla.org/en-US/docs/Tools/Web_Console/Opening_the_Web_Console [forum]: https://discourse.metabase.com/ +<<<<<<< HEAD [incorrect-times]: ./times-appear-incorrect.html +======= +[incorrect-times]: ./timezones.html +>>>>>>> tags/v0.41.0 [ldap]: ./ldap.html [learn]: https://www.metabase.com/learn [linked-filters]: ./linked-filters.html [login]: ./cant-log-in.html +<<<<<<< HEAD [loadh2]: ./loading-from-h2.html +======= +>>>>>>> tags/v0.41.0 [not-sending-email]: ./cant-send-email.html [proxies]: ./proxies.html [releases]: https://github.com/metabase/metabase/releases diff --git a/docs/troubleshooting-guide/linked-filters.md b/docs/troubleshooting-guide/linked-filters.md index 54c158d8db35..9a132bc55b97 100644 --- a/docs/troubleshooting-guide/linked-filters.md +++ b/docs/troubleshooting-guide/linked-filters.md @@ -53,6 +53,10 @@ If you are having problems with a regular [filter widget][filter-widget-gloss], 1. Check that Metabase's data model for your database includes the foreign key relationship. +<<<<<<< HEAD +======= +[filter-widget-gloss]: /glossary.html#filter_widget +>>>>>>> tags/v0.41.0 [foreign-key-gloss]: /glossary.html#foreign_key [join-types]: /learn/sql-questions/sql-join-types.html [learn-linking]: /learn/dashboards/linking-filters.html diff --git a/docs/troubleshooting-guide/loading-from-h2.md b/docs/troubleshooting-guide/loading-from-h2.md index 47dffb4a39f0..1de0d8b40827 100644 --- a/docs/troubleshooting-guide/loading-from-h2.md +++ b/docs/troubleshooting-guide/loading-from-h2.md @@ -1,3 +1,4 @@ +<<<<<<< HEAD # Loading exported application database fails If you've been using the default H2 application database that ships with Metabase, and want to [migrate from the default H2 application database][migrate] to a production database like [PostgreSQL][postgres] or MySQL/MariaDB, you'll need to use the `load-from-h2` command, which will fail if the database filename is incorrect. @@ -31,3 +32,143 @@ If you're using Metabase Enterprise Edition, you should check out the [Serializa [postgres]: https://www.postgresql.org/ [serialization-docs]: ../enterprise-guide/serialization.html [serialization-learn]: https://www.metabase.com/learn/administration/serialization.html +======= +--- +redirect_from: + - ./loading-from-h2.html +--- + +# Using or migrating from an H2 application database + +You have installed Metabase, but: + +- You're trying to migrate the application database from H2 to another database and something has gone wrong, +- You're trying to downgrade rather than upgrade, +- Metabase logs a `liquibase` error message when you try to run it, +- Metabase logs another error message that mentions `H2` or `h2` while it is running, or +- You're on Windows 10 and get a warning about file permissions. + +## Are you currently using H2 as your application database? + +**Root cause:** Metabase stores information about users, questions, and so on in a database of its own called the "application database", or "app database" for short. By default Metabase uses [H2][what-is-h2] for the app database, but we don't recommended it for production---because it's an on-disk database, it's sensitive to filesystem errors, such as a drive being corrupted or a file not being flushed properly. + +**Steps to take:** + +1. To check what you're using as the app database, go to **Admin Panel**, open the **Troubleshooting** tab, scroll down to "Diagnostic Info", and look for the `application-database` key in the JSON it displays. +2. See [Migrating from H2][migrate] for instructions on how to migrate to a more robust app database. + +## Are you trying to migrate the application database from H2 to something else? + +**Root cause:** You are trying to [migrate][migrate] the app database from H2 to a production database such as PostgreSQL or MySQL/MariaDB using the `load-from-h2` command, but this has failed because the database filename is incorrect with an error message like: + +``` +Command failed with exception: Unsupported database file version or invalid file header in file +``` + +**Steps to take:** + +1. Create a copy of the exported H2 database (see [Backing up Metabase Application Data][backup]). _Do not proceed until you have done this_ in case something goes wrong. + +2. Check that the H2 database file you exported is named `metabase.db.mv.db`. + +3. H2 automatically adds `.mv.db` extension to the database path you specify on the command line, so make sure the path to the DB file you pass to the command does _not_ include the `.mv.db` extension. For example, if you've exported an application database, and you want to load the data from that H2 database into a PostgreSQL database using `load-from-h2`, your command will look something like: + + ``` + export MB_DB_TYPE=postgres + export MB_DB_DBNAME=metabase + export MB_DB_PORT=5432 + export MB_DB_USER= + export MB_DB_PASS= + export MB_DB_HOST=localhost + java -jar metabase.jar load-from-h2 /path/to/metabase.db # do not include .mv.db + ``` + +If you're using [Metabase Enterprise Edition][enterprise], you can use [serialization][serialization-docs] to snapshot your application database. Serialization is useful when you want to [preload questions and dashboards][serialization-learn] in a new Metabase instance. + +## Are you trying to downgrade? + +**Root cause:** Metabase does not support downgrading (i.e., reverting to an early version of the application). + +**Steps to take:** + +1. Shut down Metabase. +2. Restore the backup copy of the app database you made before trying to upgrade or downgrade. +3. Restore the JAR file or container of the older version you want to revert to. +4. Restart Metabase. + +## Is the app database locked? + +**Root cause:** Sometimes Metabase fails to start up because an app database lock did not clear properly during a previous run. The error message looks something like: + +``` +liquibase.exception.DatabaseException: liquibase.exception.LockException: Could not acquire change log lock. +``` + +**Steps to take:** + +1. Open a shell on the server where Metabase is installed and manually clear the locks by running: + + ``` + java -jar metabase.jar migrate release-locks + ``` + +2. Once this command completes, restart your Metabase instance normally (_without_ the `migrate release-locks` flag). + +## Is the app database corrupted? + +**Root cause:** H2 is less reliable than production-quality database management systems, and sometimes the database itself becomes corrupted. This can result in loss of data in the app database, but can _not_ damage data in the databases that Metabase is connected. + +**Steps to take:** Error messages can vary depending on how the app database was corrupted, but in most cases the log message will mention `h2`. A typical command and message are: + +``` +myUser@myIp:~$ java -cp metabase.jar org.h2.tools.RunScript -script whatever.sql -url jdbc:h2:~/metabase.db +Exception in thread "main" org.h2.jdbc.JdbcSQLException: Row not found when trying to delete from index """"".I37: ( /* key:7864 */ X'5256470012572027c82fc5d2bfb855264ab45f8fec4cf48b0620ccad281d2fe4', 165)" [90112-194] + at org.h2.message.DbException.getJdbcSQLException(DbException.java:345) + [etc] +``` + +**How to fix this:** not all H2 errors are recoverable (which is why if you're using H2, _please_ have a backup strategy for the application database file). + +If you are running a recent version and using H2, the app database is stored in `metabase.db.mv.db`. - Open a shell on the server where the Metabase instance is running and attempt to recover the corrupted H2 file by running the following four commands: + +``` +java -cp metabase.jar org.h2.tools.Recover + +mv metabase.db.mv.db metabase-old.db.mv.db + +touch metabase.db.mv.db + +java -cp target/uberjar/metabase.jar org.h2.tools.RunScript -script metabase.db.h2.sql -url jdbc:h2:`pwd`/metabase.db +``` + +## Are you running Metabase with H2 on Windows 10? + +**Root cause:** In some situations on Windows 10, the Metabase JAR needs to have permissions to create local files for the application database. When running the JAR, you'll see an error message like this: + +``` +Exception in thread "main" java.lang.AssertionError: Assert failed: Unable to connect to Metabase DB. +``` + +**Steps to take:** + +1. Right-click on the Metabase JAR file (_not_ the app database file). +2. Select "Properties". +3. Select "Unblock." + +## Is the application database taking too long to load? + +**Root cause:** You're using H2 as your app database, and the app database is so large that it can't be loaded in less than 5 seconds (which is the default timeout value). You'll see the message "Timeout" appear in the console when you try to start Metabase. + +**Steps to take:** + +1. Use a production-quality database such as PostgreSQL for the app database (preferred). +2. Go to the **Admin Panel** and increase the timeout setting for the app database. +3. Move Metabase to a faster server (in particular, a server with faster disks). + +[backup]: ../operations-guide/backing-up-metabase-application-data.md +[enterprise]: /enterprise/ +[migrate]: ../operations-guide/migrating-from-h2.md +[serialization-docs]: ../enterprise-guide/serialization.md +[serialization-learn]: /learn/administration/serialization.html +[what-is-h2]: ../faq/setup/what-is-h2.md +>>>>>>> tags/v0.41.0 diff --git a/docs/troubleshooting-guide/sandboxing.md b/docs/troubleshooting-guide/sandboxing.md index a4a0a47355a9..6a5057109275 100644 --- a/docs/troubleshooting-guide/sandboxing.md +++ b/docs/troubleshooting-guide/sandboxing.md @@ -148,11 +148,19 @@ Someone is supposed to be able to view some of the values in a table in their qu ## Is the person who can't see the sandboxed data in multiple groups? +<<<<<<< HEAD **Root cause:** We only allow [one sandbox per table][one-sandbox-per-table]: if someone is a member of two or more groups with different permissions, every rule for figuring out whether access should be allowed or not is confusing. We therefore only allow one rule. **Steps to take:** The administrator can [create a new group][group] to capture precisely who's allowed access to what. +======= +**Root cause:** We only allow one sandbox per table: if someone is a member of two or more groups with different permissions, every rule for figuring out whether access should be allowed or not is confusing. We therefore only allow one rule. + +**Steps to take:** + +The administrator can [create a new group][groups] to capture precisely who's allowed access to what. +>>>>>>> tags/v0.41.0 [authenticating-with-saml]: ../enterprise-guide/authenticating-with-saml.html [locked-parameters]: /learn/embedding/embedding-charts-and-dashboards.html#hide-or-lock-parameters-to-restrict-what-data-is-shown @@ -161,7 +169,10 @@ The administrator can [create a new group][group] to capture precisely who's all [data-permissions]: /learn/permissions/data-permissions.html [groups]: ../administration-guide/05-setting-permissions.html#group [jwt-auth]: ../enterprise-guide/authenticating-with-jwt.html +<<<<<<< HEAD [one-sandbox-per-table]: ../enterprise-guide/data-sandboxes.html#a-user-can-only-have-one-sandbox-per-table +======= +>>>>>>> tags/v0.41.0 [permissions]: /learn/permissions/data-permissions.html [prepared-statement]: /glossary.html#prepared_statement [public-sharing]: ../administration-guide/12-public-links.html diff --git a/docs/troubleshooting-guide/timezones.md b/docs/troubleshooting-guide/timezones.md new file mode 100644 index 000000000000..e65759a5ac55 --- /dev/null +++ b/docs/troubleshooting-guide/timezones.md @@ -0,0 +1,78 @@ +# The dates and times in my questions and charts are wrong + +You are doing calculations with dates and times, or displaying them in charts, but: + +- the values appear to be wrong, or +- summary values are wrong. + +## Is the problem due to time zones? + +**Root cause:** Dates and times are stored using different time zones, but some or all of those time zones aren't taken into account when doing calculations (i.e., the problem is inconsistent data). + +**Steps to take:** + +To fix this problem you'll need answers to these questions: + +1. What is the correct time zone of the data you think is being displayed improperly (i.e., what's the right answer)? +2. Is there an explicit time zone setting on every timestamp, or are some or all timestamps being stored without a time zone? For example, `Dec 1, 2019 00:00:00Z00` includes the time zone (shown after the `Z`), but `Dec 1, 2019` doesn't. +3. What time zone is the database server using? +4. What time zone is Metabase using? + +Once you have these answers, look for cases like these: + +1. Your question or chart is comparing or sorting values with inconsistent or missing time zones. For example, if a flight's departure and arrival times are reported in local time, it can appear to arrive before it has left. +2. Your question is aggregating timetsamps with different time zones: for example, the "daily" totals for your website's traffic include more than 24 hours worth of data because you are using the local dates from East Asia, Europe, and the Americas. + +Once you think you have identified a problem, drill down to understand exactly what time zone conversion is causing the underlying problem. For example, suppose you're looking at a time series with daily values; if your error is happening with weekly totals, you can: + +1. Pick a specific day where you know the number is incorrect. +2. Click on the data point in a chart, or a cell in a result table, and select "View these X." +3. Open this question in two other tabs in your browser. Change the date filters so that one tab has the rows in the underlying table from the _previous_ day, and the other table has the rows in the underlying table from the _next_ day. +4. Check that the date field being used to group the result in the underlying display is correct. If it is different from what you have stored in the database, or what you have in another tool, then the timestamp is being transformed incorrectly across the board. This often happens when you use a date or time lacking an explicit time zone. +5. If the underlying timestamps are correct (which they should if they have explicit time zones), the individual times are probably being grouped into days in a different time zone than the one you want. +6. To find out which time zone they are being transformed to, tweak the times on the date filters on the question you are looking at by moving the start time and start date backwards by an hour until you either get the correct number or you have gone back by 12 hours. (If any of your time zones include India, Newfoundland, or another jurisdiction with a half-step time zone, you may need to do this in half-hour increments.) +7. If that doesn't work, try moving the start and end times forward by an hour until you either get the correct number of you've gone forward by 12 hours. +8. If by this point you have the correct value, it means your time zone was converted by the number of hours forward or backwards you manually set the filter. If that's the case, check whether the offset you've come up with matches either the time zone of the data warehouse or the timezone of Metabase itself. + +## Is the Report Time Zone set incorrectly? + +**Root cause:** Wrong numbers in questions or charts can be caused by a mis-match in the time zone being used by Metabase and the time zone being used by the data warehouse. + +**Steps to take:** + +1. Go to the Admin Panel, select the **Localization** tab, and check the **Report Time Zone** setting, which controls the timezone Metabase uses when connecting to the database. This setting is currently supported on: + - Druid + - MySQL + - Oracle + - PostgreSQL + - Presto + - Vertica +2. If you're using a database that doesn't support a Report Time Zone, ensure that Metabase's time zone matches that of the database. Metabase's time zone is the Java Virtual Machine's time zone, typically set via a `-Duser.timezone<..>` parameter or the `JAVA_TIMEZONE` environment variable; exactly how it is set will depend on how you launch Metabase. Note that Metabase's time zone doesn't impact any databases that use a Report Time Zone. + +## Are SQL queries not respecting the Reporting Time Zone setting? + +**Root cause:** We don't currently apply a reporting time zone to the results of SQL queries. + +**Steps to take:** + +1. Set a reporting time zone explicitly in your SQL query. + +## Are dates without an explicit time zone being converted to another day? + +**Root cause:** You are grouping by a date (rather than by a time) that doesn't have a time zone attached to it. + +**Steps to take:** + +1. Look at every time field your question uses in the [Data Model Reference][data-model] and see if any of them are simply a "Date" field. +2. If so, make sure the server time zone reflects the reporting time zone, because when a query is run on Metabase, the server applies the configured time zone to that date. + +## Are you mixing explicit and implicit time zones? + +**Root cause:** You're comparing or doing arithmetic on two dates where one has an explicit time zone and one doesn't. + +** Steps to take:** + +1. This typically happens with a question that uses multiple fields: for example, you're filtering on one timestamp and grouping by another. Check the time zones of each of the dates or times you are using in your question. +2. You''ll need to explicitly set the time zone for any value that doesn't have an explicit time zone. This will need to be done either in a SQL query or by transforming the data in your database to ensure both timestamps have time zones. + +[data-model]: ../users-guide/12-data-model-reference.html diff --git a/docs/users-guide/01-what-is-metabase.md b/docs/users-guide/01-what-is-metabase.md index 1a5060db0e9d..2569ce330ce8 100644 --- a/docs/users-guide/01-what-is-metabase.md +++ b/docs/users-guide/01-what-is-metabase.md @@ -46,9 +46,9 @@ To make a dashboard or pulse, click the plus (+) icon in the top-right of the ma ### Use search to quickly find things -![Search results](images/sharing-answers/search-results.gif) +![Search results](images/basic-exploration/search-results.gif) -The search bar at the top of the screen helps you find tables, dashboards, collections, saved questions, metrics, segments, and pulses in an instant. +The search bar at the top of the screen helps you find tables, dashboards, collections, saved questions, metrics, segments, and pulses in an instant. ## A primer on databases diff --git a/docs/users-guide/03-basic-exploration.md b/docs/users-guide/03-basic-exploration.md index 5c08786457aa..ed44f41cfccf 100644 --- a/docs/users-guide/03-basic-exploration.md +++ b/docs/users-guide/03-basic-exploration.md @@ -14,7 +14,7 @@ To learn more, see [Exploring data with Metabase's data browser](https://www.met #### Exploring collections -Collections in Metabase are a lot like folders. They're where all your team's dashboards and charts are kept. To explore a collection, just click on one in the **Our analytics** section of the home page, or click on `Browse all items` to see everything. +[Collections][collections] in Metabase are a lot like folders. They're where all your team's dashboards and charts are kept. To explore a collection, just click on one in the **Our analytics** section of the home page, or click on `Browse all items` to see everything. ![A collection](./images/collection-detail.png) @@ -24,7 +24,7 @@ Collections have a list of any other items that are saved within them, and you c #### Exploring dashboards -Dashboards are a set of questions and text cards that you want to be able to refer back to regularly. [Learn more about dashboards](07-dashboards.md). +[Dashboards][dashboards] are a set of questions and text cards that you want to be able to refer back to regularly. If you click on a part of a chart, such as a bar in a bar chart, or a dot on a line chart, you'll see a the **Action menu**, with actions you can take to dive deeper into that result, branch off from it in a different direction, or create an [X-ray](14-x-rays.md) to see an automatic exploration of the data. ![Drill through](images/drill-through/drill-through.png) @@ -57,8 +57,21 @@ When you're looking at the detail view of a question, you can use all the same a One of our personal favorite ways to explore is with the **Distribution** option. This will show you how many rows there are in a given table, grouped by the column you clicked on. So if you have a Users table, if you click on an Age column and select Distribution, you'll see a bar chart with the count of users you have in each age bracket. +### Search + +![Search results](./images/basic-exploration/search-results.gif) + +Use the search bar to find dashboards, questions, collections, and pulses. You can select from the typeahead's dropdown results, or hit enter to view a search results page. You can also activate the search bar from anywhere by pressing the `/` key. + +Searches take into account items’ titles, descriptions, and other metadata — you can even search the contents of your SQL queries. For example, you can search for things like `SELECT escape_pod FROM mothership` and find that one question you worked on six months ago. The results will display an item’s description, which collection it’s saved in, what kind of object it is, and whether it’s pinned. Note that you'll only ever see items in collections you have permission to view. + +![Search results](./images/basic-exploration/search-results.png) + --- ## Next: Asking custom questions So what do you do if you can't find an existing dashboard or question that's exactly what you're looking for? Let's learn about [asking our own new questions](04-asking-questions.md). + +[collections]: collections.md +[dashboards]: 07-dashboards.md \ No newline at end of file diff --git a/docs/users-guide/04-asking-questions.md b/docs/users-guide/04-asking-questions.md index c7a883041b11..298dc57de824 100644 --- a/docs/users-guide/04-asking-questions.md +++ b/docs/users-guide/04-asking-questions.md @@ -111,23 +111,17 @@ Click on a record's ID number (or primary key) to see more information about a g ![Record details](./images/notebook/record-details.png) -## Downloading Your Results - -You can download or export the results of a question by clicking on the Download arrow in the lower right of the screen. Results can be downloaded into .csv, .xlsx, or .json files. The maximum download size is 1 million rows. - -![Download Button](./images/download-button.png) - ## Starting new explorations from saved questions -Each time you start modifying a saved question, Metabase will create a new question for you. It'll give the new question a placeholder title, and let you know which question you started from. +Each time you start modifying a saved question, Metabase will create a new question for you. It'll give the new question a placeholder title, and let you know which question you started from. ![Starting from a saved question](./images/notebook/started-from-saved-question.png) So feel free to play around with any saved question, as you won't have any effect on the existing question. When you hit **Save** on the question, you can choose either to save as a new question (the default), or you can overwrite the existing question you started from. -You can also explicitly **Duplicate this question** from the edit menu (the pencil icon). +## Editing and sharing questions -![Duplicate a question](./images/notebook/duplicate-question.png) +Check out [sharing answers](06-sharing-answers.md). --- diff --git a/docs/users-guide/05-visualizing-results.md b/docs/users-guide/05-visualizing-results.md index e0771f8afb59..2e81299260b0 100644 --- a/docs/users-guide/05-visualizing-results.md +++ b/docs/users-guide/05-visualizing-results.md @@ -21,7 +21,7 @@ In Metabase, an answer to a question can be visualized in a number of ways: - [Funnel](#funnel) - [Map](#maps) -To change how the answer to your question is displayed, click on the **Visualization** button in the bottom-right of the screen to open the visualization sidebar. +To change how the answer to your question is displayed, click on the **Visualization** button in the bottom-left of the screen to open the visualization sidebar. ![Visualization options](images/VisualizeChoices.png) @@ -31,7 +31,7 @@ Once a question returns results, you can save the question, download the results ## Visualization types and options -Each visualization type has its own advanced options. Click the **Settings** button next to the Visualization button to see your options. The options panel also automatically opens up whenever you pick a new visualization type. +Each visualization type has its own advanced options. Click the **Settings** button next to the Visualization button to see your options. The options panel also automatically opens up whenever you pick a new visualization type. Not sure which visualization type to use? Check out [Which chart should you use?](https://www.metabase.com/learn/basics/visualizing-data/guide.html). @@ -148,7 +148,7 @@ This auto-pivoting is distinct from the pivot table visualization, which we cove ### Pivot table -Pivot tables allow you swap rows and columns, group data, and include subtotals in your table. You can group one or more metrics by one or more dimensions. +Pivot tables allow you swap rows and columns, group data, and include subtotals in your table. You can group one or more metrics by one or more dimensions. Pivot tables are not currently available for the following databases in Metabase: @@ -163,7 +163,7 @@ In the settings for the Pivot Table visualization, you can assign fields to one - Fields to use for the table **rows** - Fields to use for the table **columns** -- Fields to use for the table **values** +- Fields to use for the table **values** Let's say we ask the following question in the notebook editor: @@ -175,7 +175,7 @@ From the `Orders` table, we've summarized by the count of orders and the average We've assigned the fields `User → State` and `Created At` to table rows, and assigned the `Product -> Category` field to generate our columns: Doohickey, Gadget, and so on. We can drag and drop dimensions between the row and column buckets, and add aggregations to the table values bucket. For example, if we assign a field to the columns bucket, Metabase will pivot that field and render each unique value of that field as a column heading. -You can put multiple fields in the "rows" and "columns" buckets, but note that the order of the fields changes how Metabase displays the table: each additional field will nest within the previous field. +You can put multiple fields in the "rows" and "columns" buckets, but note that the order of the fields changes how Metabase displays the table: each additional field will nest within the previous field. Where it makes sense, Metabase will automatically include subtotals for grouped rows. For example, as in the image above, because we've grouped our rows first by `State`, then by `Created At`, Metabase will list each year for each `State`, and aggregate the metric(s) for that subgroup. For orders placed in Wisconsin, Metabase would sum the count of orders for each category, and find the average annual order total in each product category in Wisconsin. @@ -327,6 +327,6 @@ Learn more about [visualizing data with maps](https://www.metabase.com/learn/bas --- -## Next: Sharing and organizing questions +## Next: saving and editing your questions -Now let's learn about [sharing and organizing your saved questions](06-sharing-answers.md). +Now let's learn about [saving and editing your questions](06-sharing-answers.md). diff --git a/docs/users-guide/06-sharing-answers.md b/docs/users-guide/06-sharing-answers.md index 006fc2b00eb0..29d17565aa7e 100644 --- a/docs/users-guide/06-sharing-answers.md +++ b/docs/users-guide/06-sharing-answers.md @@ -1,77 +1,80 @@ -## Sharing and organizing your questions and answers +# Saving and editing your questions -### How to save a question +## How to save a question -Whenever you’ve arrived at an answer that you want to save for later, click the **SAVE** button in the top right of the screen. This will also save the visualization option you’ve chosen for your answer. +Whenever you’ve arrived at an answer that you want to save for later, click the **Save** button in the top right of the screen. This will also save the visualization option you’ve chosen for your answer. ![Save button](images/sharing-answers/save-button.png) -A pop-up box will appear, prompting you to give your question a name and description, and to pick which collection to save it in. Note that your administrator might have set things up so that you're only allowed to save questions in certain collections, but you can always save things in your Personal Collection. After saving your question, you'll be asked if you want to add it to a new or existing dashboard. +A pop-up box will appear, prompting you to give your question a name and description, and to pick which [collection](#collection) to save it in. Note that your administrator might have set things up so that you're only allowed to [save questions in certain collection](#collection-permissions), but you can always save things in your Personal Collection. After saving your question, you'll be asked if you want to add it to a new or existing dashboard. -Now, whenever you want to refer to your question again you can find it by searching for it in the search bar at the top of Metabase, or by navigating to the collection where you saved it. To edit your question, go to it and click the pencil icon in the top-right. +Now, whenever you want to refer to your question again you can find it by searching for it in the search bar at the top of Metabase, or by navigating to the collection where you saved it. -### Sharing questions with public links +## Downloading Your Results -If your Metabase administrator has enabled [public sharing](../administration-guide/12-public-links.md) on a saved question or dashboard, you can go to that question or dashboard and click on the sharing icon to find its public links. Public links can be viewed by anyone, even if they don't have access to Metabase. You can also use the public embedding code to embed your question or dashboard in a simple web page or blog post. - -![Share icon](images/sharing-answers/share-icon.png) - -### Organizing and finding your saved questions +You can export the results of a question by clicking on the __Download arrow__ (a down arrow in a cloud) in the lower right of the screen. Results can be downloaded into .csv, .xlsx, or .json files. The maximum download size is 1 million rows. Exported .xlsx files preserve the formatting defined in the question: date and currency formats are kept throughout, as well as column ordering and visibility. Files names for the exported question will include a slug of the question title, so you can easily distinguish files when exporting multiple questions. -After your team has been using Metabase for a while, you’ll probably end up with lots of saved questions. Metabase has several ways to help you organize things and find what you’re looking for. +## Editing your question -![Our analytics](images/sharing-answers/our-analytics-page.png) +Once you save your question, a down arrow will appear to the right of the question's title. Clicking on the down arrow will bring up the **Question detail sidebar**, which gives you some options: -#### Collections +![Question detail sidebar](images/sharing-answers/question-details-sidebar.png) -Collections are the main way to organize questions, as well as dashboards and pulses. [Administrators can give you different kinds of access](../administration-guide/06-collections.md) to each collection: +- **Edit details** (Pencil icon). Change the title of the question, and add some description for context. Adding a description will also make the question easier to find using the search bar. You can also select more options to [cache the results of the question](#caching-results). +- **Add to dashbboard** (Dashboard icon with plus symbol). See [dashboards][dashboards]. +- **Move** (Document icon with right arrow). Relocate the question to a different [collection](#collections). +- **Duplicate** (Square with little square). Create a copy of the question. Keep in mind that whenever you start editing a saved question, Metabase will create a copy of the question. You can either save your edits as a new question, or overwrite the original saved question. +- **Archive** (Folder with down arrow). See [Archiving items](#archiving-items). -- **View access:** you can see the collection and its contents, but you can't modify anything or put anything new into the collection. -- **Curate access:** you can edit, move, or archive the collection and its contents. You can also move or save new things in it and create new collections inside of it, and can also pin items in the collection to the top of the screen. Only administrators can edit permissions for collections, however. -- **No access:** you can't see the collection or its contents. If you have access to a dashboard, but it contains questions that are saved in a collection you don't have access to, those questions will show a permissions notification instead of the chart or table. +### Caching results -#### Your personal collection +{% include plans-blockquote.html %} -In addition to the collections you and your teammates have made, you'll also always have your own personal collection that only you and administrators can see. To find it, click on the "browse all items" button on the homepage and click on "my personal collection" in the list of collections. +If your results don't change frequently, you may want to cache your results, that is: store your results in Metabase so that the next time you visit the question, Metabase can retrieve the stored results rather than query the database again. For example, if your data only updates once a day, there's no point in querying the database more than once a day, as they data won't have changed. Returning cached results can be significantly faster, as the database won't have to redo the work to answer your query. -You can use your personal collection as a scratch space to put experiments and explorations that you don't think would be particularly interesting to the rest of your team, or as a work-in-progress space where you can work on things and then move them to a shared place once they're ready. +To cache results, click on the down arrow next to the question's title to open the __Question detail sidebar__, then click on the __Pencil icon__ to __Edit details__. In the Modal that pops up, in the bottom left, select __More options__. There you'll be able to tell Metabase how long it should cache the question's results. This caching will only apply to this specific question; admins can [configure database-wide caching settings][caching] in the __Admin panel__. -#### Pinned items +Admins can still set global caching, but setting a cache duration on a specific question will override that global setting–useful for when a particular question has a different natural cadence. -![Pins](images/sharing-answers/pinned-items.png) +### Question moderation -In each collection, you can pin important or useful dashboards or questions to make them stick to the top of the screen. Pinned items will also be displayed as large cards to make them stand out well. If you have Curate permissions for a collection, you can pin and un-pin things, and drag and drop pins to change their order. +{% include plans-blockquote.html %} -Any dashboards that are pinned in the main "Our analytics" collection will also show up on the homepage. +Administrators can **Verify** a question by clicking on the **Verify checkmark** in the **Moderation** section of the **Question detail sidebar**. Verifying a question is a simple way for an administrator to signal that they've reviewed the question and deemed it to be trustworthy. That is: the question is filtering the right columns, or summarizing the right metrics, and querying records from the right tables. -#### Search +Once verified, the question will have a verified icon next to the question's title. -![Search results](./images/sharing-answers/search-results.gif) +![Verified icon](images/sharing-answers/verified-icon.png) -Use the search bar to find dashboards, questions, collections, and pulses. You can select from the typeahead's dropdown results, or hit enter to view a search results page. You can also activate the search bar from anywhere by pressing the `/` key. +Verified questions are also more likely to show up higher in search suggestions and search results. -Searches take into account items’ titles, descriptions, and other metadata — you can even search the contents of your SQL queries. For example, you can search for things like `SELECT escape_pod FROM mothership` and find that one question you worked on six months ago. The results will display an item’s description, which collection it’s saved in, what kind of object it is, and whether it’s pinned. Note that you'll only ever see items in collections you have permission to view. +If someone modifies a verified question, the question will lose its verified status, and an administrator will need to review and verify the question again to restore its verified status. -![Search results](./images/sharing-answers/search-results.png) +### Question history -#### Moving +You can see the history of a question, including edits and verifications, in the **History** section of the **Question detail sidebar**. -To move a question, dashboard, or pulse into a collection, or from one collection to another, just click and drag it onto the collection where you want it to go. You can also click on the `…` menu to the right of the question and pick the Move action. If you're trying to move several things at once, click on the items' icons to select them, then click the Move action that pops up at the bottom of the screen. +Below each edit entry in the timeline, you can click on **Revert** to reinstate the question at the time of the edit. -![Selecting questions](images/sharing-answers/question-checkbox.png) +## Sharing questions with public links -Note that you have to have Curate permission for the collection that you're moving a question into _and_ the collection you're moving the question out of. +If your Metabase administrator has enabled [public sharing](../administration-guide/12-public-links.md) on a saved question or dashboard, you can go to that question or dashboard and click on the sharing icon to find its public links. Public links can be viewed by anyone, even if they don't have access to Metabase. You can also use the public embedding code to embed your question or dashboard in a simple web page or blog post. -#### Archiving +![Share icon](images/sharing-answers/share-icon.png) -Sometimes questions outlive their usefulness and need to be sent to Question Heaven. To archive a question or dashboard, just click on the `…` menu that appears on the far right when you hover over a question and pick the Archive action. You'll only see that option if you have "curate" permission for the current collection. You can also archive multiple items at once, the same way as you move multiple items. Note that archiving a question removes it from all dashboards or Pulses where it appears, so be careful! +To share a question, click on the arrow pointing up and to the right in the bottom right of the question. -You can also archive _collections_ if you have curate permissions for the collection you're trying to archive, the collection _it's_ inside of, as well as any and all collections inside of _it_. Archiving a collection archives all of its contents as well. +## Setting up alerts -If you have second thoughts and want to bring an archived item back, you can see all your archived questions from the archive; click the menu icon in the top-right of any collection page to get to the archive. To unarchive a question, hover over it and click the unarchive icon that appears on the far right. +You can set up questions to run periodically and notify you if the results are interesting. Check out [Alerts][alerts]. --- -## Next: creating dashboards +## Next: collections + +Next, we'll learn about how to organize our questions in [collections][collections]. -Next, we'll learn about [creating dashboards and adding questions to them](07-dashboards.md). +[alerts]: 15-alerts.md +[caching]: ../administration-guide/14-caching.md +[collections]: collections.md +[dashboards]: 07-dashboards.md diff --git a/docs/users-guide/07-dashboards.md b/docs/users-guide/07-dashboards.md index d6df00f1cf57..4b3051242ae7 100644 --- a/docs/users-guide/07-dashboards.md +++ b/docs/users-guide/07-dashboards.md @@ -1,8 +1,9 @@ -## Dashboards +# Dashboards ![Interactive dashboard](images/dashboards/interactive-dashboard.png) Quick links: +<<<<<<< HEAD - [Dashboard filters](08-dashboard-filters.md) - [Dashboard subscriptions](dashboard-subscriptions.md) @@ -15,19 +16,33 @@ Quick links: A dashboard comprises a set of cards arranged on a grid. These cards can be questions - such as [tables, charts, or maps](05-visualizing-results.md) - or they can be [text boxes](/learn/dashboards/markdown.html). +======= + +- [Dashboard filters](08-dashboard-filters.md) +- [Dashboard subscriptions](dashboard-subscriptions.md) +- [Make your dashboards interactive](interactive-dashboards.md) +- [Learn how to build great dashboards](https://www.metabase.com/learn/dashboards/index.html) + +## What is a dashboard? + +**Dashboards** group questions and present them on a single page. You can think of dashboards as shareable reports that feature a set of related questions. You can set up [subscriptions to dashboards](dashboard-subscriptions.md) via email or Slack to receive the exported results of the dashboard's questions. + +A dashboard comprises a set of cards arranged on a grid. These cards can be questions - such as [tables, charts, or maps](05-visualizing-results.md) - or they can be [text boxes](/learn/dashboards/markdown.html). + +>>>>>>> tags/v0.41.0 You can add [filter widgets to dashboards](08-dashboard-filters.md) that filter data identically across multiple questions, and [customize what happens when people click on a chart or a table](interactive-dashboards.md). You can make as many dashboards as you want. Go nuts. -### How to create a dashboard +## How to create a dashboard -In the top right of the screen, click the **+** icon to open the **Create** menu, and select **New Dashboard**. Give your new dashboard a name and a description, choose which [collection](06-sharing-answers.md#collections) the dashboard should go in, then click **Create**, and Metabase will take you to your shiny new dashboard. +In the top right of the screen, click the **+** icon to open the **Create** menu, and select **New Dashboard**. Give your new dashboard a name and a description, choose which [collections](collections.md) the dashboard should go in, then click **Create**, and Metabase will take you to your shiny new dashboard. ![Create Dashboard](images/dashboards/DashboardCreate.png) If you don't want to build a dashboard from scratch, or want to experiment by making changes to an existing dashboard without affecting the original, you can **duplicate** an existing dashboard. From an existing dashboard, click on the **...** menu in the upper right, and select **Duplicate**. -### Adding saved questions to a dashboard +## Adding saved questions to a dashboard There are two ways to add questions to a dashboard: from the dashboard, or from the question you want to add. @@ -39,7 +54,7 @@ Once you add a question to your dashboard, it’ll look something like this: ![First Dashboard](images/dashboards/FirstDashboard.png) -### Adding headings or descriptions with text cards +## Adding headings or descriptions with text cards Another neat thing you can do is add text cards to your dashboards. Text cards allow you to include descriptions, explanations, notes, or even images and GIFs to your dashboards. You can also use text cards to create separations between sections of charts in your dashboards, or include links to other dashboards, questions, or websites. @@ -61,7 +76,7 @@ Click the **eye** icon to see what your formatted Markdown will look like when y To learn more, see [Fun with Markdown in your dashboards](https://www.metabase.com/blog/markdown-in-dashboards/index.html). -### Arranging cards +## Arranging cards Each question on a dashboard is in its own card that you can move around or resize as you see fit. Just click the **pencil** icon in the top right of a dashboard to enter the dashboard's editing interface. @@ -75,13 +90,13 @@ Once you're in edit mode, you'll see a grid appear. You can move and resize the Metabase will automatically update a question's display to make sure your data looks great at any size you choose. -### Changing a question's visualization settings +## Changing a question's visualization settings You can change a question's visualization settings on a dashboard (to add a goal line, for example,) without affecting the original question. Click on the **pencil** icon to enter dashboard edit mode, hover over the question you want to edit, and click on the palette icon to edit the question's visualization's settings. ![Visualization settings](images/dashboards/visualization-settings.png). -### Finding dashboards +## Finding dashboards You can search for any dashboard (or question, collection, or pulse) by its title in the big search box at the top of Metabase. @@ -89,7 +104,7 @@ After a while, your team might accumulate a lot of dashboards. To make it easier ![Pinning a dashboard in a collection](images/dashboards/pinning-dashboard.png) -### Fullscreen dashboards +## Fullscreen dashboards After you've made your ideal dashboard, you may want to put the dashboard on a TV to help keep your team up to date throughout the day. @@ -99,7 +114,7 @@ Once you've entered fullscreen mode, you can also switch the dashboard into "Nig ![Night mode](images/dashboards/DashboardNightMode.png) -### Auto refresh +## Auto refresh If your data updates frequently, you can set up your dashboard to refresh automatically by clicking on the **clock** icon. @@ -111,6 +126,14 @@ Enabling auto refresh will re-run all the queries on the dashboard at the interv Combining fullscreen mode and auto refresh is a great way to keep your team in sync with your data throughout the day. +## Caching dashboards + +{% include plans-blockquote.html %} + +If your results don't change frequently, you may want to cache your results, that is: store your results in Metabase so that the next time you visit the dashboard, Metabase can retrieve the stored results rather than query the database(s) again. For example, if your data only updates once a day, there's no point in querying the database more than once a day, as they data won't have changed. Returning cached results can be significantly faster, as the database won't have to redo the work to answer your query. + +You can set cache duration for a dashboard by clicking on the _..._ > __Edit dashboard details__ > __More options__. + ## Sharing dashboards with public links If your Metabase administrator has enabled [public sharing](../administration-guide/12-public-links.md) on a saved question or dashboard, you can go to that question or dashboard and click on the **sharing** icon to find its public links. @@ -119,7 +142,7 @@ If your Metabase administrator has enabled [public sharing](../administration-gu Public links can be viewed by anyone, even if they don't have access to Metabase. You can also use the public embedding code to embed your question or dashboard in a simple web page or blog post. Check out examples of simple apps with embedded dashboards in our [embedding-reference-apps repository](https://github.com/metabase/embedding-reference-apps). To learn more about [embedding](../administration-guide/13-embedding.md), check out our article on [How to use Metabase to deliver analytics to your customers](https://www.metabase.com/blog/external-facing-analytics/index.html), as well as an article on how to combine branding, Single Sign-On, full app embedding, and data sandboxing to deliver [multi-tenant, self-service analytics](https://www.metabase.com/blog/embedding/index.html). -### Configuring a dashboard through its URL +## Configuring a dashboard through its URL You can amend the URL of a dashboard to automatically enter fullscreen, enable night mode, or auto-refresh the dashboard. Customizing the dashboard's URL allows you to configure the dashboard - even when you do not have any input access to the device where the dashboard will be displayed, like scripted screens, for example. @@ -131,10 +154,10 @@ The part that says `refresh=60` sets the dashboard to automatically refresh ever There is one important limitation with the `fullscreen` option: for security reasons, many browsers require user interaction to initiate fullscreen. In those browsers, using the `fullscreen` option will enable the fullscreen UI in Metabase, but it won't expand the browser content to fill the screen. To ensure the dashboard occupies the entire screen, either activate fullscreen by clicking the button in the UI, or use the `fullscreen` URL option and launch the browser in fullscreen or kiosk mode. -### Archiving a dashboard +## Archiving a dashboard Archiving a dashboard removes the dashboard from searches and collections. Archiving a dashboard does not archive the individual saved questions on it — it just archives the dashboard. - + To archive a dashboard, click the **pencil** icon to enter edit mode, then click the **...** menu, and select **Archive**. To view all archived items, click the **menu** icon in the top-right of any collection page. You can **unarchive** a dashboard by clicking the icon of the box with the upward arrow next to that dashboard. @@ -145,7 +168,7 @@ To make a great dashboard, you first need to decide what you want the dashboard Some tips: -- **Emphasize the most important questions**. To draw people’s attention to what matters most, place the most important saved question cards near the top of the dashboard, and/or make them bigger than the other cards, +- **Emphasize the most important questions**. To draw people’s attention to what matters most, place the most important saved question cards near the top of the dashboard, and/or make them bigger than the other cards, - **Keep dashboards focused**. If you have more than 10 cards on a dashboard, think about breaking the dashboard into two separate ones. You don't want to overwhelm people with too much information, and each dashboard should revolve around one theme or topic. Remember — you can make as many dashboards as you want, so you don’t have to cram everything into just one. - **Add filters to your dashboard**. [Adding filters](08-dashboard-filters.md) to dashboards makes them more useful. For example, instead of your dashboard being full of questions that are restricted to a specific time span, you can make more general questions and use dashboard filters to change the time span you're looking at. - **Make your dashboards interactive.** [Customize what happens when users click on a chart or table in your dashboard](interactive-dashboards.md). diff --git a/docs/users-guide/collections.md b/docs/users-guide/collections.md new file mode 100644 index 000000000000..f19ac9e2aa5a --- /dev/null +++ b/docs/users-guide/collections.md @@ -0,0 +1,59 @@ +## Collections + + After your team has been using Metabase for a while, you’ll probably end up with lots of saved questions. + +![Our analytics](images/collections/our-analytics-page.png) + +Collections are the main way to organize questions, as well as dashboards and pulses. You can think of them like folders or directories. You can nest collections in other collections, and move collections around. One thing to note is that a single item, like a question or dashboard, can only be in one collection at a time (excluding parent collections). + +### Collection types + +- **Regular collections**. They're just basic collections. You can put stuff in them. +- **Official collections**. These are special collections, in that they have a badge to let people know that the items in this collection are the ones people should be looking at (or whatever "official" means to you). Questions and dashboards in official collections are also more likely to show up at the top of search results. + +![Official collections](images/collections/official-collection.png) + +### Collection permissions + +[Administrators can give you different kinds of access](../administration-guide/06-collections.md) to each collection: + +- **View access:** you can see the collection and its contents, but you can't modify anything or put anything new into the collection. +- **Curate access:** you can edit, move, or archive the collection and its contents. You can also move or save new things in it and create new collections inside of it, and can also pin items in the collection to the top of the screen. Only administrators can edit permissions for collections, however. +- **No access:** you can't see the collection or its contents. If you have access to a dashboard, but it contains questions that are saved in a collection you don't have access to, those questions will show a permissions notification instead of the chart or table. + +### Your personal collection + +In addition to the collections you and your teammates have made, you'll also always have your own personal collection that only you and administrators can see. To find it, click on the "browse all items" button on the homepage and click on "my personal collection" in the list of collections. + +You can use your personal collection as a scratch space to put experiments and explorations that you don't think would be particularly interesting to the rest of your team, or as a work-in-progress space where you can work on things and then move them to a shared place once they're ready. + +### Pinned items + +![Pins](images/collections/pinned-items.png) + +In each collection, you can pin important or useful dashboards or questions to make them stick to the top of the screen. Pinned items will also be displayed as large cards to make them stand out well. If you have Curate permissions for a collection, you can pin and un-pin things, and drag and drop pins to change their order. + +Any dashboards that are pinned in the main "Our analytics" collection will also show up on the homepage. + +### Moving items from collection to collection + +To move a question, dashboard, or pulse into a collection, or from one collection to another, just click and drag it onto the collection where you want it to go. You can also click on the `…` menu to the right of the question and pick the Move action. If you're trying to move several things at once, click on the items' icons to select them, then click the Move action that pops up at the bottom of the screen. + +![Selecting questions](images/collections/question-checkbox.png) + +Note that you have to have Curate permission for the collection that you're moving a question into _and_ the collection you're moving the question out of. + +### Archiving items + +Sometimes questions outlive their usefulness and need to be sent to Question Heaven. To archive a question or dashboard, just click on the `…` menu that appears on the far right when you hover over a question and pick the Archive action. You'll only see that option if you have "curate" permission for the current collection. You can also archive multiple items at once, the same way as you move multiple items. Note that archiving a question removes it from all dashboards or Pulses where it appears, so be careful! + +You can also archive _collections_ if you have curate permissions for the collection you're trying to archive, the collection _it's_ inside of, as well as any and all collections inside of _it_. Archiving a collection archives all of its contents as well. + +If you have second thoughts and want to bring an archived item back, you can see all your archived questions from the archive; click the menu icon in the top-right of any collection page to get to the archive. To unarchive a question, hover over it and click the unarchive icon that appears on the far right. + +## Next: creating dashboards + +Next, we'll learn about [creating dashboards and adding questions to them][dashboards]. + +[dashboards]: 07-dashboards.md + diff --git a/docs/users-guide/custom-questions.md b/docs/users-guide/custom-questions.md index 67c92e141a43..511e7d11e4ad 100644 --- a/docs/users-guide/custom-questions.md +++ b/docs/users-guide/custom-questions.md @@ -1,8 +1,8 @@ -## Creating custom questions with the notebook editor +# Creating custom questions with the notebook editor If you have a question that's a bit more involved than a [simple question](04-asking-questions.md), you can create a custom question using the notebook editor. You can get there by clicking the Ask a Question button in the top nav bar and selecting Custom Question. If you started from a Simple question or a saved question, you can get back to the custom question notebook editor by clicking the icon in the top-right of the screen. -### The parts of the notebook +## The parts of the notebook ![The notebook](./images/notebook/notebook-ui.png) @@ -10,7 +10,7 @@ The notebook is made up of a sequence of individual steps. Under each step you'l ![Previewing results](./images/notebook/preview-table.png) -#### Picking your starting data +### Picking your starting data This first step is required, and is where you pick the data that you want to base your question on. In most cases you'll pick one of the tables in your database, but you can also choose a previously saved question's result as the starting point for your new question. What this means in practice is that you can do things like use complex SQL queries to create new tables that can be used as starting data in a question just like any other table in your database. @@ -24,7 +24,7 @@ There are some kinds of saved questions that can't be used as source data: - questions that use `Cumulative Sum` or `Cumulative Count` aggregations - questions that have columns that are named the same or similar thing, like `Count` and `Count 2` -#### Filtering +### Filtering ![Filtering](./images/notebook/filter-step.png) @@ -38,7 +38,7 @@ You can add subsequent filter steps after every Summarize step. This lets you do If you have a more complex filter you're trying to express, you can pick "Custom Expression" from the add-filter menu create a filter expression. You can use comparison operators like greater than (>) or less than (<), as well as spreadsheet-like functions. For example, `[Subtotal] > 100 OR median([Age]) < 40`. [Learn more about writing expressions](./expressions.md) -#### Summarizing +### Summarizing ![Summarizing](./images/notebook/summarize-step.png) @@ -48,13 +48,13 @@ If you summarize and add a grouping you can then summarize _again_. You can also ![Multiple summarize steps](./images/notebook/multiple-summarize-steps.png) -**Custom expressions** +### Custom expressions ![Custom expression](./images/expressions/aggregation-expression.png) Custom expressions allow you to use spreadsheet-like functions and simple arithmetic within or between aggregation functions. For example, you could do `Average(sqrt[FieldX]) + Sum([FieldY])` or `Max(floor([FieldX] - [FieldY]))`, where `FieldX` and `FieldY` are fields in the currently selected table. [Learn more about writing expressions](./expressions.md) -#### Creating custom columns +### Creating custom columns ![Custom column](./images/expressions/custom-column.png) @@ -62,17 +62,17 @@ Custom columns are helpful when you need to create a new column based on a calcu You can use the following math operators in your formulas: `+`, `–`, `*` (multiplication), and `/` (division), along with a whole host of spreadsheet-like functions. You can also use parentheses to clarify the order of operations. You can [learn more about writing expressions here](./expressions.md). -#### Sorting results +### Sorting results ![Sorting](./images/notebook/sort-step.png) The sorting step lets you pick one or more columns to sort your results by. For each column you pick, you can also choose whether to sort ascending or descending; just click the arrow to change from ascending (up arrow) to descending (down arrow). -#### Setting a row limit +### Setting a row limit The row limit step lets you limit how many rows you want from the previous results. When used in conjunction with sorting, this can let you do things like create a top-10 list, by first sorting by one of the columns in your result, then adding a row limit of 10. Unlike other steps, the row limit step can only be added at the end of your question. -#### Joining data +### Joining data ![Joining](./images/notebook/join-step.png) @@ -97,7 +97,7 @@ Here are the basic types of joins: **A left outer join example:** If Table A is Orders and Table B is Customers, and assuming you do a join where the `customer_id` column in Orders is equal to the `ID` column in Customers, when you do a left outer join your results will be a full list of all your orders, and each order row will also display the columns of the customer who placed that order. Since a single customer can place many orders, a given customer's information might be repeated many times for different order rows. If there isn't a corresponding customer for a given order, the order's information will be shown, but the customer columns will just be blank for that row. -##### Multiple stages of joins +#### Multiple stages of joins In many cases you might have tables A, B, and C, where A and B have a connection, and B and C have a connection, but A and C don't. If you want to join A to B to C, all you have to do is add multiple join steps. Click on Join Data, join table A to table B, then click the Join Data step below that completed join block to add a second join step, and join the results of your last join to table C. @@ -105,6 +105,12 @@ In many cases you might have tables A, B, and C, where A and B have a connection See [Joins in Metabase](https://www.metabase.com/blog/joining-tables/index.html) to learn more. +#### Joining on multiple conditions + +Your joins can also include multiple conditions to refine your results. Metabase will combine multiple conditions using the `AND` operator. + +![Joining tables on multiple columns](./images/notebook/joining-on-multiple-columns.png) + ### Viewing the SQL that powers your question Under the hood, all Metabase questions are SQL (gasp!). If you're curious to see the SQL that will get run when you ask your question, you can click the little console icon in the top-right of the notebook editor. In the modal that opens up, you'll also be given the option to start a new query in the SQL editor, using this generated SQL as a starting point. It's a nice little shortcut to have Metabase write some boilerplate SQL for you, but then allows you to tweak and customize the query. diff --git a/docs/users-guide/images/sharing-answers/search-results.gif b/docs/users-guide/images/basic-exploration/search-results.gif similarity index 100% rename from docs/users-guide/images/sharing-answers/search-results.gif rename to docs/users-guide/images/basic-exploration/search-results.gif diff --git a/docs/users-guide/images/sharing-answers/search-results.png b/docs/users-guide/images/basic-exploration/search-results.png similarity index 100% rename from docs/users-guide/images/sharing-answers/search-results.png rename to docs/users-guide/images/basic-exploration/search-results.png diff --git a/docs/users-guide/images/collections/official-collection.png b/docs/users-guide/images/collections/official-collection.png new file mode 100644 index 000000000000..4203643e894d Binary files /dev/null and b/docs/users-guide/images/collections/official-collection.png differ diff --git a/docs/users-guide/images/sharing-answers/our-analytics-page.png b/docs/users-guide/images/collections/our-analytics-page.png similarity index 100% rename from docs/users-guide/images/sharing-answers/our-analytics-page.png rename to docs/users-guide/images/collections/our-analytics-page.png diff --git a/docs/users-guide/images/sharing-answers/pinned-items.png b/docs/users-guide/images/collections/pinned-items.png similarity index 100% rename from docs/users-guide/images/sharing-answers/pinned-items.png rename to docs/users-guide/images/collections/pinned-items.png diff --git a/docs/users-guide/images/sharing-answers/question-checkbox.png b/docs/users-guide/images/collections/question-checkbox.png similarity index 100% rename from docs/users-guide/images/sharing-answers/question-checkbox.png rename to docs/users-guide/images/collections/question-checkbox.png diff --git a/docs/users-guide/images/download-button.png b/docs/users-guide/images/download-button.png deleted file mode 100644 index 20e449cf7ecf..000000000000 Binary files a/docs/users-guide/images/download-button.png and /dev/null differ diff --git a/docs/users-guide/images/notebook/duplicate-question.png b/docs/users-guide/images/notebook/duplicate-question.png deleted file mode 100644 index 3328760eefe3..000000000000 Binary files a/docs/users-guide/images/notebook/duplicate-question.png and /dev/null differ diff --git a/docs/users-guide/images/notebook/join-a-b-c.png b/docs/users-guide/images/notebook/join-a-b-c.png index 728273c982a9..b34d61f7173e 100644 Binary files a/docs/users-guide/images/notebook/join-a-b-c.png and b/docs/users-guide/images/notebook/join-a-b-c.png differ diff --git a/docs/users-guide/images/notebook/joining-on-multiple-columns.png b/docs/users-guide/images/notebook/joining-on-multiple-columns.png new file mode 100644 index 000000000000..217c0838a9b9 Binary files /dev/null and b/docs/users-guide/images/notebook/joining-on-multiple-columns.png differ diff --git a/docs/users-guide/images/notebook/notebook-ui.png b/docs/users-guide/images/notebook/notebook-ui.png index 03a94b8fb0d5..19547c84ce1a 100644 Binary files a/docs/users-guide/images/notebook/notebook-ui.png and b/docs/users-guide/images/notebook/notebook-ui.png differ diff --git a/docs/users-guide/images/sharing-answers/question-details-sidebar.png b/docs/users-guide/images/sharing-answers/question-details-sidebar.png new file mode 100644 index 000000000000..da8eccc711bf Binary files /dev/null and b/docs/users-guide/images/sharing-answers/question-details-sidebar.png differ diff --git a/docs/users-guide/images/sharing-answers/verified-icon.png b/docs/users-guide/images/sharing-answers/verified-icon.png new file mode 100644 index 000000000000..7ab54270f31a Binary files /dev/null and b/docs/users-guide/images/sharing-answers/verified-icon.png differ diff --git a/docs/users-guide/start.md b/docs/users-guide/start.md index f5a3368cdb02..5a75cb6c2092 100644 --- a/docs/users-guide/start.md +++ b/docs/users-guide/start.md @@ -17,6 +17,7 @@ **Sharing results** - [Sharing and organizing your saved questions](06-sharing-answers.md) +- [Collections](collections.md) - [Creating dashboards](07-dashboards.md) - [Adding filters to dashboards](08-dashboard-filters.md) - [Making dashboards interactive](interactive-dashboards.md) diff --git a/enterprise/README.md b/enterprise/README.md index 272bec096ade..6295a227d0ce 100644 --- a/enterprise/README.md +++ b/enterprise/README.md @@ -19,28 +19,15 @@ MB_EDITION=ee yarn build-hot ### Back-end -You need to add the `:ee` profile to the leiningen command to run Metabase Enterprise Edition. +You need to add the `:ee` alias to the Clojure CLI command to run Metabase Enterprise Edition. ```clj -lein with-profile +ee run -``` - -```clj -lein with-profile +ee uberjar -``` - -```clj -lein with-profile +ee repl -``` - -In Emacs/CIDER you can customize the `lein repl` command used to start the REPL by passing a prefix argument, e.g. - -```emacs-lisp -C-u M-x cider-jack-in -``` +# Start a local Metabase server that includes EE sources +clojure -M:ee:run -or, programatically: +# start a REPL that includes EE sources. +clojure -A:ee -```emacs-lisp -(cider-jack-in '(4)) +# start a REPL that includes EE sources & test namespaces. +clojure -A:dev:ee:ee-dev ``` diff --git a/enterprise/backend/README.md b/enterprise/backend/README.md new file mode 100644 index 000000000000..ae772308474d --- /dev/null +++ b/enterprise/backend/README.md @@ -0,0 +1,45 @@ +### EE Code Structure Notes + +EE namespaces follow the pattern work like this. + +EE namespace = take the equivalent OSS namespace and replace `metabase.` with `metabase-enterprise.` where +`` is the premium token feature that one must have to use this feature. + +For example, Sandboxing-related API endpoints for Tables go in `metabase-enterprise.sandboxes.api.table` and +Sandboxing-related models (e.g. GTAP) go in `metabase-enterprise.sandboxes.models`. Sandboxing-specific code for +existing models follow this same pattern, e.g. Sandboxing-specific code for Tables goes in +`metabase-enterprise.sandboxes.models.table`. + +Groups of API routes should be defined in namespaces like we do in OSS, for example +`metabase-enterprise.content-management.api.review` for ModerationReview-related endpoints. All endpoints for a +specific feature are combined into a single `routes` handler in a `metabase-enterprise..api.routes` namespace +similar to how OSS routes are combined in `metabase.api.routes`. Finally, all EE routes are combined into a single +handler in `metabase-enterprise.api.routes`; this handler is included in `metabase.api.routes/routes` if EE code is +available. + +Please keep these rules in mind when adding new EE namespaces. In general, new namespaces **SHOULD NOT** be added +directly under `metabase-enterprise` unless they apply to the Enterprise codebase as a whole; put them under the +appropriate `metabase-enterprise.` directory instead. + +### Naming EE API routes + +To make things consistent EE-only API routes should follow the same pattern and be given route names that correspond +to their namespaces (i.e., are prefixed with `ee/`). For example, an `:advanced-config`-only +route to delete User subscriptions should be named something like + +``` +DELETE /api/ee/advanced-config/user/:id/subscriptions +``` + +rather than + +``` +DELETE /api/user/:id/subscriptions +``` + +Not all EE endpoints follow this pattern yet, but they should; please feel free to fix stuff as you come across it if +I don't get to it first. + +### Questions :interrobang: + +Ping me (`@cam`) if you have any questions. diff --git a/enterprise/backend/src/metabase_enterprise/advanced_config/models/pulse_channel.clj b/enterprise/backend/src/metabase_enterprise/advanced_config/models/pulse_channel.clj new file mode 100644 index 000000000000..58f544822b31 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/advanced_config/models/pulse_channel.clj @@ -0,0 +1,43 @@ +(ns metabase-enterprise.advanced-config.models.pulse-channel + (:require [clojure.string :as str] + [metabase.models.setting :as setting :refer [defsetting]] + [metabase.public-settings.premium-features :as premium-features] + [metabase.util :as u] + [metabase.util.i18n :refer [deferred-tru tru]])) + +(defsetting subscription-allowed-domains + (deferred-tru "Allowed email address domain(s) for new DashboardSubscriptions and Alerts. Does not affect existing subscriptions.") + :visibility :public + ;; this is a comma-separated string but we're not using `:csv` because it gets serialized to an array which makes it + ;; inconvenient to use on the frontend. + :type :string) + +(defn- allowed-domains-set + "Parse [[subscription-allowed-domains]] into a set. `nil` if the Setting is not set or empty." + [] + (some-> (subscription-allowed-domains) + (str/split #",") + set + not-empty)) + +(defn validate-email-domains + "Check that `email-addresses` associated with a [[metabase.models.pulse-channel]] are allowed based on the value of + the [[subscription-allowed-domains]] Setting, if set. This function no-ops if `subscription-allowed-domains` is + unset or if we do not have a premium token with the `:advanced-config` feature. + + This function is called by [[metabase.models.pulse-channel/validate-email-domains]] when Pulses are created and + updated." + [email-addresses] + (when (premium-features/enable-advanced-config?) + (when-let [allowed-domains (allowed-domains-set)] + (doseq [email email-addresses + :let [domain (u/email->domain email)]] + (assert (u/email? email) + (tru "Invalid email address: {0}" (pr-str email))) + (when-not (contains? allowed-domains domain) + (throw (ex-info (tru "You cannot create new subscriptions for the domain {0}. Allowed domains are: {1}" + (pr-str domain) + (str/join ", " allowed-domains)) + {:email email + :allowed-domains allowed-domains + :status-code 403}))))))) diff --git a/enterprise/backend/src/metabase_enterprise/api/routes.clj b/enterprise/backend/src/metabase_enterprise/api/routes.clj new file mode 100644 index 000000000000..836fde5ad86d --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/api/routes.clj @@ -0,0 +1,26 @@ +(ns metabase-enterprise.api.routes + "API routes that are only available when running Metabase® Enterprise Edition™. Even tho these routes are available, + not all routes might work unless we have a valid premium features token to enable those features. + + These routes should generally live under prefixes like `/api/ee//` -- see the + `enterprise/backend/README.md` for more details." + (:require [compojure.core :as compojure] + [metabase-enterprise.api.routes.common :as ee.api.common] + [metabase-enterprise.audit-app.api.routes :as audit-app] + [metabase-enterprise.content-management.api.routes :as content-management] + [metabase-enterprise.sandbox.api.routes :as sandbox])) + +(compojure/defroutes ^{:doc "API routes only available when running Metabase® Enterprise Edition™."} routes + ;; The following routes are NAUGHTY and do not follow the naming convention (i.e., they do not start with + ;; `/ee//`). + ;; + ;; TODO -- Please fix them! + content-management/routes + sandbox/routes + ;; The following routes are NICE and do follow the `/ee//` naming convention. Please add new routes here + ;; and follow the convention. + (compojure/context + "/ee" [] + (compojure/context + "/audit-app" [] + (ee.api.common/+require-premium-feature :audit-app audit-app/routes)))) diff --git a/enterprise/backend/src/metabase_enterprise/api/routes/common.clj b/enterprise/backend/src/metabase_enterprise/api/routes/common.clj new file mode 100644 index 000000000000..436478e56633 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/api/routes/common.clj @@ -0,0 +1,43 @@ +(ns metabase-enterprise.api.routes.common + "Shared stuff used by various EE-only API routes." + (:require [metabase.public-settings.premium-features :as premium-features] + [metabase.util.i18n :refer [tru]])) + +(defn +require-premium-feature + "Wraps Ring `handler`. Check that we have a premium token with `feature` (a keyword; see [[metabase.public-settings.premium-features]] for a + current known features) or return a 401 if it is not. + + (context \"/whatever\" [] (+require-premium-feature :sandboxes whatever/routes)) + + Very important! Make sure you only wrap handlers inside [[compojure.core/context]] forms with this middleware (as in + example above). Otherwise it can end up causing requests the handler would not have handled anyway to fail. + Use [[when-premium-feature]] instead if you want the handler to apply if we have the premium feature but pass-thru + if we do not." + [feature handler] + (fn [request respond raise] + (if-not (premium-features/has-feature? feature) + (respond {:body (tru "This API endpoint is only enabled if you have a premium token with the {0} feature." + feature) + ;; 402 Payment Required + :status 402}) + (handler request respond raise)))) + +(defn ^:deprecated +when-premium-feature + "Wraps Ring `handler`. Only applies handler if we have a premium token with `feature`; if not, passes thru to the next + handler. + + (+when-premium-feature :sandboxes (+auth table/routes)) + + This is typically used to _replace_ OSS versions of API endpoints with special implementations that live in EE-land. + If the endpoint **only** exists in EE you should use [[+require-premium-feature]] instead which will give the API + user a useful error message if the endpoint is not available because they do not have the token feature in + question, rather than a generic 'endpoint does not exist' 404 error. + + In general, it's probably better NOT to swap out API endpoints, because it's not obvious at all that it happened, + and it makes it hard for us to nicely structure our contexts in [[metabase-enterprise.api.routes/routes]]. So only + do this if there's absolutely no other way (which is probably not the case)." + [feature handler] + (fn [request respond raise] + (if-not (premium-features/has-feature? feature) + (respond nil) + (handler request respond raise)))) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/database_detail.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/database_detail.clj deleted file mode 100644 index bed468bd5979..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/database_detail.clj +++ /dev/null @@ -1,35 +0,0 @@ -(ns metabase-enterprise.audit.pages.database-detail - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase.util.schema :as su] - [ring.util.codec :as codec] - [schema.core :as s])) - -(s/defn ^:internal-query-fn audit-log - [database-id :- su/IntGreaterThanZero] - {:metadata [[:started_at {:display_name "Viewed on", :base_type :type/DateTime}] - [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :query}] - [:query_hash {:display_name "Query Hash", :base_type :type/Text}] - [:query {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] - [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user}] - [:user {:display_name "Queried by", :base_type :type/Text, :remapped_from :user_id}] - [:schema {:display_name "Schema", :base_type :type/Text}] - [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table}] - [:table {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}]] - :results (common/reducible-query - {:select [:qe.started_at - [:card.id :card_id] - [:qe.hash :query_hash] - [(common/card-name-or-ad-hoc :card) :query] - [:u.id :user_id] - [(common/user-full-name :u) :user] - :t.schema - [:t.id :table_id] - [:t.name :table]] - :from [[:query_execution :qe]] - :where [:= :qe.database_id database-id] - :join [[:metabase_database :db] [:= :db.id :qe.database_id] - [:core_user :u] [:= :qe.executor_id :u.id]] - :left-join [[:report_card :card] [:= :qe.card_id :card.id] - [:metabase_table :t] [:= :card.table_id :t.id]] - :order-by [[:qe.started_at :desc]]}) - :xform (map #(update (vec %) 2 codec/base64-encode))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/queries.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/queries.clj deleted file mode 100644 index 9c4344ae2288..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/queries.clj +++ /dev/null @@ -1,134 +0,0 @@ -(ns metabase-enterprise.audit.pages.queries - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.cards :as cards] - [metabase.util.honeysql-extensions :as hx] - [schema.core :as s])) - -(defn ^:internal-query-fn ^:deprecated views-and-avg-execution-time-by-day - "Query that returns data for a two-series timeseries chart with number of queries ran and average query running time - broken out by day." - [] - {:metadata [[:day {:display_name "Date", :base_type :type/Date}] - [:views {:display_name "Views", :base_type :type/Integer}] - [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] - :results (common/reducible-query - {:select [[(hx/cast :date :started_at) :day] - [:%count.* :views] - [:%avg.running_time :avg_running_time]] - :from [:query_execution] - :group-by [(hx/cast :date :started_at)] - :order-by [[(hx/cast :date :started_at) :asc]]})}) - -(defn ^:internal-query-fn most-popular - "Query that returns the 10 most-popular Cards based on number of query executions, in descending order." - [] - {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] - [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] - [:executions {:display_name "Executions", :base_type :type/Integer}]] - :results (common/reducible-query - {:select [[:c.id :card_id] - [:c.name :card_name] - [:%count.* :executions]] - :from [[:query_execution :qe]] - :join [[:report_card :c] [:= :qe.card_id :c.id]] - :group-by [:c.id] - :order-by [[:executions :desc]] - :limit 10})}) - -(defn ^:internal-query-fn ^:deprecated slowest - "Query that returns the 10 slowest-running Cards based on average query execution time, in descending order." - [] - {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] - [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] - [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] - :results (common/reducible-query - {:select [[:c.id :card_id] - [:c.name :card_name] - [:%avg.running_time :avg_running_time]] - :from [[:query_execution :qe]] - :join [[:report_card :c] [:= :qe.card_id :c.id]] - :group-by [:c.id] - :order-by [[:avg_running_time :desc]] - :limit 10})}) - -(s/defn ^:internal-query-fn table - "A list of all questions. - - Three possible argument lists. All arguments are always nullable. - - [] : - Dump them all, sort by name ascending - - - [questionFilter] : - Dump all filtered by the questionFilter string, sort by name ascending. - questionFilter filters on the `name` column in `cards` table. - - - [questionFilter, collectionFilter, sortColumn, sortDirection] : - Dump all filtered by both questionFilter and collectionFilter, - sort by the given column and sort direction. - questionFilter filters on the `name` column in `cards` table. - collectionFilter filters on the `name` column in `collections` table. - - Sort column is given over in keyword form to honeysql. Default `card.name` - - Sort direction can be `asc` or `desc`, ascending and descending respectively. Default `asc`. - - All inputs have to be strings because that's how the magic middleware - that turns these functions into clojure-backed 'datasets' works." - ([] - (table nil nil nil nil)) - ([questionFilter :- (s/maybe s/Str)] - (table questionFilter nil nil nil)) - ([questionFilter :- (s/maybe s/Str) - collectionFilter :- (s/maybe s/Str) - sortColumn :- (s/maybe s/Str) - sortDirection :- (s/maybe (s/enum "asc" "desc"))] - {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] - [:card_name {:display_name "Name", :base_type :type/Name, :remapped_from :card_id}] - [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] - [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] - [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] - [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] - [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] - [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] - [:user_id {:display_name "Created By ID", :base_type :type/Integer, :remapped_to :user_name}] - [:user_name {:display_name "Created By", :base_type :type/Text, :remapped_from :user_id}] - [:public_link {:display_name "Public Link", :base_type :type/URL}] - [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] - [:avg_exec_time {:display_name "Average Runtime (ms)", :base_type :type/Integer}] - [:total_runtime {:display_name "Total Runtime (ms)", :base_type :type/Number}] - [:query_runs {:display_name "Query Runs", :base_type :type/Integer}] - ] - :results (common/reducible-query - (-> - {:with [cards/avg-exec-time-45 - cards/total-exec-time-45 - cards/query-runs-45] - :select [[:card.id :card_id] - [:card.name :card_name] - :collection_id - [:coll.name :collection_name] - :card.database_id - [:db.name :database_name] - :card.table_id - [:t.name :table_name] - [:card.creator_id :user_id] - [(common/user-full-name :u) :user_name] - [(common/card-public-url :card.public_uuid) :public_link] - :card.cache_ttl - [:avg_exec_time.avg_running_time_ms :avg_exec_time] - [:total_runtime.total_running_time_ms :total_runtime] - [:query_runs.count :query_runs]] - :from [[:report_card :card]] - :left-join [[:collection :coll] [:= :card.collection_id :coll.id] - [:metabase_database :db] [:= :card.database_id :db.id] - [:metabase_table :t] [:= :card.table_id :t.id] - [:core_user :u] [:= :card.creator_id :u.id] - :avg_exec_time [:= :card.id :avg_exec_time.card_id] - :total_runtime [:= :card.id :total_runtime.card_id] - :query_runs [:= :card.id :query_runs.card_id]] - :where [:= :card.archived false]} - (common/add-search-clause questionFilter :card.name) - (common/add-search-clause collectionFilter :coll.name) - (common/add-sort-clause - (or sortColumn "card.name") - (or sortDirection "asc"))))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/query_detail.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/query_detail.clj deleted file mode 100644 index 46e190e068c4..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/query_detail.clj +++ /dev/null @@ -1,19 +0,0 @@ -(ns metabase-enterprise.audit.pages.query-detail - "Queries to show details about a (presumably ad-hoc) query." - (:require [cheshire.core :as json] - [metabase-enterprise.audit.pages.common :as common] - [metabase.util.schema :as su] - [ring.util.codec :as codec] - [schema.core :as s])) - -(s/defn ^:internal-query-fn details - [query-hash :- su/NonBlankString] - {:metadata [[:query {:display_name "Query", :base_type :type/Dictionary}] - [:average_execution_time {:display_name "Avg. Exec. Time (ms)", :base_type :type/Number}]] - :results (common/reducible-query - {:select [:query - :average_execution_time] - :from [:query] - :where [:= :query_hash (codec/base64-decode query-hash)] - :limit 1}) - :xform (map #(update (vec %) 0 json/parse-string))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/question_detail.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/question_detail.clj deleted file mode 100644 index db1427cbcc42..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/question_detail.clj +++ /dev/null @@ -1,32 +0,0 @@ -(ns metabase-enterprise.audit.pages.question-detail - "Detail page for a single Card (Question)." - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.card-and-dashboard-detail :as card-and-dash-detail] - [metabase.models.card :refer [Card]] - [metabase.util.schema :as su] - [schema.core :as s])) - -(s/defn ^:internal-query-fn views-by-time - "Get views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`." - [card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] - (card-and-dash-detail/views-by-time "card" card-id datetime-unit)) - -(s/defn ^:internal-query-fn cached-views-by-time - "Get cached views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`." - [card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] - (card-and-dash-detail/cached-views-by-time card-id datetime-unit)) - -(s/defn ^:internal-query-fn revision-history - "Get the revision history for a Card." - [card-id :- su/IntGreaterThanZero] - (card-and-dash-detail/revision-history Card card-id)) - -(s/defn ^:internal-query-fn audit-log - "Get a view log for a Card." - [card-id :- su/IntGreaterThanZero] - (card-and-dash-detail/audit-log "card" card-id)) - -(s/defn ^:internal-query-fn avg-execution-time-by-time - "Average execution time broken out by period" - [card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] - (card-and-dash-detail/avg-execution-time-by-time card-id datetime-unit)) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/api/routes.clj b/enterprise/backend/src/metabase_enterprise/audit_app/api/routes.clj new file mode 100644 index 000000000000..575cc6ae8ff8 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/api/routes.clj @@ -0,0 +1,9 @@ +(ns metabase-enterprise.audit-app.api.routes + "API endpoints that are only enabled if we have a premium token with the `:audit-app` feature. These live under + `/api/ee/audit-app/`. Feature-flagging for these routes happens in [[metabase-enterprise.api.routes/routes]]." + (:require [compojure.core :as compojure] + [metabase-enterprise.audit-app.api.user :as user] + [metabase.api.routes.common :refer [+auth]])) + +(compojure/defroutes ^{:doc "Ring routes for mt API endpoints."} routes + (compojure/context "/user" [] (+auth user/routes))) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/api/user.clj b/enterprise/backend/src/metabase_enterprise/audit_app/api/user.clj new file mode 100644 index 000000000000..a47ffda1bff8 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/api/user.clj @@ -0,0 +1,22 @@ +(ns metabase-enterprise.audit-app.api.user + "`/api/ee/audit-app/user` endpoints. These only work if you have a premium token with the `:audit-app` feature." + (:require [compojure.core :refer [DELETE]] + [metabase.api.common :as api] + [metabase.api.user :as api.user] + [metabase.models.pulse :refer [Pulse]] + [metabase.models.pulse-channel-recipient :refer [PulseChannelRecipient]] + [toucan.db :as db])) + +(api/defendpoint DELETE "/:id/subscriptions" + "Delete all Alert and DashboardSubscription subscriptions for a User (i.e., so they will no longer receive them). + Archive all Alerts and DashboardSubscriptions created by the User. Only allowed for admins or for the current user." + [id] + (api.user/check-self-or-superuser id) + ;; delete all `PulseChannelRecipient` rows for this User, which means they will no longer receive any + ;; Alerts/DashboardSubscriptions + (db/delete! PulseChannelRecipient :user_id id) + ;; archive anything they created. + (db/update-where! Pulse {:creator_id id, :archived false} :archived true) + api/generic-204-no-content) + +(api/define-routes) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/interface.clj b/enterprise/backend/src/metabase_enterprise/audit_app/interface.clj new file mode 100644 index 000000000000..dbc53513c2c0 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/interface.clj @@ -0,0 +1,35 @@ +(ns metabase-enterprise.audit-app.interface + (:require [metabase.plugins.classloader :as classloader] + [metabase.util.i18n :refer [tru]] + [metabase.util.schema :as su] + [schema.core :as s])) + +(def ResultsMetadata + "Schema for the expected format for `:metadata` returned by an internal query function." + (su/non-empty + [[(s/one su/KeywordOrString "field name") + (s/one {:base_type su/FieldType, :display_name su/NonBlankString, s/Keyword s/Any} + "field metadata")]])) + +(defmulti internal-query + "Define a new internal query type. Conventionally `query-type` should be a namespaced keyword with the namespace in + which the method is defined. See docstring + for [[metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries]] for a description of what this + method should return." + {:arglists '([query-type & args])} + (fn [query-type & _] + (keyword query-type))) + +(defmethod internal-query :default + [query-type & _] + (throw (ex-info (str (tru "Unable to run internal query function: cannot resolve {0}" query-type)) + {:status-code 400}))) + +(defn resolve-internal-query + "Invoke the internal query with `query-type` (invokes the corresponding implementation of [[internal-query]])." + [query-type & args] + (let [query-type (keyword query-type) + ns-str (namespace query-type)] + (when ns-str + (classloader/require (symbol ns-str))) + (apply internal-query query-type args))) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/alerts.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/alerts.clj new file mode 100644 index 000000000000..7c92047e8502 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/alerts.clj @@ -0,0 +1,58 @@ +(ns metabase-enterprise.audit-app.pages.alerts + (:require [clojure.string :as str] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.pulses :as common.pulses])) + +(def ^:private table-metadata + (into + [[:card_id {:display_name "Question ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Question Name" :base_type :type/Text, :remapped_from :card_id}]] + common.pulses/table-metadata)) + +(def ^:private table-query-columns + (into + [:card_id + :card_name] + common.pulses/table-query-columns)) + +(defn- table-query [card-name] + (-> common.pulses/table-query + (update :select (partial into + [[:card.id :card_id] + [:card.name :card_name]])) + (update :left-join into [:pulse_card [:= :pulse.id :pulse_card.pulse_id] + [:report_card :card] [:= :pulse_card.card_id :card.id]]) + (update :where (fn [where] + (into + where + (filter some?) + ;; make sure the pulse_card actually exists. + [[:not= :pulse_card.card_id nil] + [:= :pulse.dashboard_id nil] + ;; if `pulse.alert_condition` is non-NULL then the Pulse is an Alert + [:not= :pulse.alert_condition nil] + (when-not (str/blank? card-name) + [:like :%lower.card.name (str \% (str/lower-case card-name) \%)])]))) + (assoc :order-by [[:%lower.card.name :asc] + ;; Newest first. ID instead of `created_at` because the column is currently only + ;; second-resolution for MySQL which busts our tests + [:channel.id :desc]]))) + +(def ^:private ^{:arglists '([row-map])} row-map->vec + (apply juxt (map first table-metadata))) + +(defn- post-process-row [row] + (-> (zipmap table-query-columns row) + common.pulses/post-process-row-map + row-map->vec)) + +;; with optional param `card-name`, only show subscriptions matching card name. +(defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + + ([_ card-name] + {:metadata table-metadata + :results (common/reducible-query (table-query card-name)) + :xform (map post-process-row)})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common.clj similarity index 89% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common.clj index 486fd248f5cb..04e74c7b7dfb 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common.clj @@ -1,4 +1,4 @@ -(ns metabase-enterprise.audit.pages.common +(ns metabase-enterprise.audit-app.pages.common "Shared functions used by audit internal queries across different namespaces." (:require [clojure.core.async :as a] [clojure.core.memoize :as memoize] @@ -10,7 +10,7 @@ [honeysql.helpers :as h] [java-time :as t] [medley.core :as m] - [metabase-enterprise.audit.query-processor.middleware.handle-audit-queries :as qp.middleware.audit] + [metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries :as qp.middleware.audit] [metabase.db :as mdb] [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute] [metabase.driver.sql-jdbc.sync :as sql-jdbc.sync] @@ -19,6 +19,7 @@ [metabase.query-processor.timezone :as qp.tz] [metabase.util :as u] [metabase.util.honeysql-extensions :as hx] + [metabase.util.i18n :refer [tru]] [metabase.util.urls :as urls] [schema.core :as s] [toucan.db :as db])) @@ -91,12 +92,20 @@ (fn [] (timezone (mdb/db-type) (db/connection))))) +(defn- compile-honeysql [driver honeysql-query] + (try + (let [honeysql-query (cond-> honeysql-query + ;; MySQL 5.x does not support CTEs, so convert them to subselects instead + (= driver :mysql) CTEs->subselects)] + (db/honeysql->sql (add-default-params honeysql-query))) + (catch Throwable e + (throw (ex-info (tru "Error compiling audit query: {0}" (ex-message e)) + {:driver driver, :honeysql-query honeysql-query} + e))))) + (defn- reduce-results* [honeysql-query context rff init] (let [driver (mdb/db-type) - honeysql-query (cond-> honeysql-query - ;; MySQL 5.x does not support CTEs, so convert them to subselects instead - (= driver :mysql) CTEs->subselects) - [sql & params] (db/honeysql->sql (add-default-params honeysql-query)) + [sql & params] (compile-honeysql driver honeysql-query) canceled-chan (context/canceled-chan context)] ;; MySQL driver normalizies timestamps. Setting `*results-timezone-id-override*` is a shortcut ;; instead of mocking up a chunk of regular QP pipeline. @@ -109,11 +118,17 @@ cols (sql-jdbc.execute/column-metadata driver rsmeta) metadata {:cols cols} rf (rff metadata)] - (reduce rf init (sql-jdbc.execute/reducible-rows driver rs rsmeta canceled-chan)))) (catch InterruptedException e (a/>!! canceled-chan :cancel) - (throw e)))))) + (throw e)) + (catch Throwable e + (throw (ex-info (tru "Error running audit query: {0}" (ex-message e)) + {:driver driver + :honeysql-query honeysql-query + :sql sql + :params params} + e))))))) (defn reducible-query "Return a function with the signature @@ -131,7 +146,7 @@ (defn query "Run a internal audit query, automatically including limits and offsets for paging. This function returns results directly as a series of maps (the 'legacy results' format as described in - `metabase-enterprise.audit.query-processor.middleware.handle-audit-queries.internal-queries`)" + `metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries.internal-queries`)" [honeysql-query] (let [context {:canceled-chan (a/promise-chan)} rff (fn [{:keys [cols]}] @@ -205,7 +220,7 @@ (defn lowercase-field "Lowercase a SQL field, to enter into honeysql query" [field] - (keyword (str "%lower." (name field)))) + (hsql/call :lower field)) (defn add-45-days-clause "Add an appropriate `WHERE` clause to limit query to 45 days" diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common/card_and_dashboard_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/card_and_dashboard_detail.clj similarity index 94% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common/card_and_dashboard_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common/card_and_dashboard_detail.clj index c06f7f5b84c4..894851fa9c9a 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common/card_and_dashboard_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/card_and_dashboard_detail.clj @@ -1,7 +1,7 @@ -(ns metabase-enterprise.audit.pages.common.card-and-dashboard-detail +(ns metabase-enterprise.audit-app.pages.common.card-and-dashboard-detail "Common queries used by both Card (Question) and Dashboard detail pages." (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.models.card :refer [Card]] [metabase.models.dashboard :refer [Dashboard]] [metabase.models.revision :as revision] @@ -95,11 +95,13 @@ [model :- ModelName, model-id :- su/IntGreaterThanZero] {:metadata [[:when {:display_name "When", :base_type :type/DateTime}] [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :who}] - [:who {:display_name "Who", :base_type :type/Name, :remapped_from :user_id}]] + [:who {:display_name "Who", :base_type :type/Name, :remapped_from :user_id}] + [:what {:display_name "What", :base_type :type/Text}]] :results (common/reducible-query {:select [[:vl.timestamp :when] :vl.user_id - [(common/user-full-name :u) :who]] + [(common/user-full-name :u) :who] + [:vl.metadata :what]] :from [[:view_log :vl]] :join [[:core_user :u] [:= :vl.user_id :u.id]] :where [:and diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common/cards.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/cards.clj similarity index 51% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common/cards.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common/cards.clj index 31c2a2390483..fbfa1d21a0f7 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common/cards.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/cards.clj @@ -1,5 +1,6 @@ -(ns metabase-enterprise.audit.pages.common.cards - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.common.cards + (:require [metabase-enterprise.audit-app.pages.common :as common] + [metabase.db.connection :as mdb.connection] [metabase.util.honeysql-extensions :as hx])) (def avg-exec-time @@ -25,6 +26,23 @@ :group-by [:card_id]} (common/add-45-days-clause :started_at))]) +(def latest-qe + "HoneySQL for a CTE to get latest QueryExecution for a Card." + [:latest_qe {:select [:query_execution.card_id :error :query_execution.started_at] + :from [:query_execution] + :join [[{:select [:card_id [:%max.started_at :started_at]] + :from [:query_execution] + :group-by [:card_id]} :inner_qe] + [:= :query_execution.started_at :inner_qe.started_at]] + :limit 1}]) + +(def query-runs + "HoneySQL for a CTE to include the total number of queries for each Card forever." + [:query_runs {:select [:card_id + [:%count.* :count]] + :from [:query_execution] + :group-by [:card_id]}]) + (def query-runs-45 "HoneySQL for a CTE to include the total number of queries for each Card for 45 days." [:query_runs (-> {:select [:card_id @@ -33,6 +51,21 @@ :group-by [:card_id]} (common/add-45-days-clause :started_at))]) +(def dashboards-count + "HoneySQL for a CTE to enumerate the dashboards for a Card." + [:dash_card {:select [:card_id [:%count.* :count]] + :from [:report_dashboardcard] + :group-by [:card_id]}]) + +(def dashboards-ids + "HoneySQL for a CTE to enumerate the dashboards for a Card. We get the actual ID's" + [:dash_card {:select [:card_id [(common/group-concat (hx/cast + (if (= (mdb.connection/db-type) :mysql) :char :text) + :report_dashboard.name) "|") :name_str]] + :from [:report_dashboardcard] + :join [:report_dashboard [:= :report_dashboardcard.dashboard_id :report_dashboard.id]] + :group-by [:card_id]}]) + (def views "HoneySQL for a CTE to include the total view count for each Card." [:card_views {:select [[:model_id :card_id] diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common/dashboards.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/dashboards.clj similarity index 93% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common/dashboards.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common/dashboards.clj index 1a1da7a51d26..3445908f2b92 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common/dashboards.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/dashboards.clj @@ -1,7 +1,7 @@ -(ns metabase-enterprise.audit.pages.common.dashboards +(ns metabase-enterprise.audit-app.pages.common.dashboards (:require [honeysql.core :as hsql] [honeysql.helpers :as h] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [metabase.util.urls :as urls])) @@ -13,6 +13,7 @@ [:saved_by_id {:display_name "Saved by User ID", :base_type :type/Text, :remapped_to :saved_by}] [:saved_by {:display_name "Saved by", :base_type :type/Text, :remapped_from :saved_by_id}] [:saved_on {:display_name "Saved on", :base_type :type/DateTime}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Integer}] [:last_edited_on {:display_name "Last edited on", :base_type :type/DateTime}] [:cards {:display_name "Cards", :base_type :type/Integer}] [:public_link {:display_name "Public Link", :base_type :type/URL}] @@ -44,6 +45,7 @@ [:u.id :saved_by_id] [(common/user-full-name :u) :saved_by] [:d.created_at :saved_on] + [:d.cache_ttl :saved_on] [:d.updated_at :last_edited_on] [:cc.card_count :cards] [(hsql/call :case diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/pulses.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/pulses.clj new file mode 100644 index 000000000000..3b8d59fce65b --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/pulses.clj @@ -0,0 +1,130 @@ +(ns metabase-enterprise.audit-app.pages.common.pulses + "Shared code for [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] + and [[metabase-enterprise.audit-app.pages.alerts]]." + (:require [cheshire.core :as json] + [clojure.tools.logging :as log] + [metabase.models.collection :as collection] + [metabase.util.cron :as u.cron] + [metabase.util.honeysql-extensions :as hx] + [metabase.util.i18n :refer [trs tru]])) + +(def table-metadata + "Common Metadata for the columns returned by both the [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] + and [[metabase-enterprise.audit-app.pages.alerts]] audit queries. (These respective queries also return their own + additional columns.)" + [[:pulse_id {:display_name "Pulse ID", :base_type :type/Integer}] + [:recipients {:display_name "Recipients", :base_type :type/Integer}] + [:subscription_type {:display_name "Type", :base_type :type/Text}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] + [:frequency {:display_name "Frequency", :base_type :type/Text}] + [:creator_id {:display_name "Created By ID", :base_type :type/Integer, :remapped_to :creator_name}] + [:creator_name {:display_name "Created By", :base_type :type/Text, :remapped_from :creator_id}] + [:created_at {:display_name "Created At", :base_type :type/DateTimeWithTZ}] + [:num_filters {:display_name "Filters", :base_type :type/Integer}]]) + +(def table-query-columns + "Keyword names of columns returned by the queries by both + the [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] and [[metabase-enterprise.audit-app.pages.alerts]] audit + queries." + [:pulse_id + :num_user_recipients + :channel_id + :channel_details + :subscription_type + :collection_id + :collection_name + :schedule_type + :schedule_hour + :schedule_day + :schedule_frame + :creator_id + :creator_name + :created_at + :pulse_parameters]) + +(def table-query + "Common HoneySQL base query for both the [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] + and [[metabase-enterprise.audit-app.pages.alerts]] audit queries. (The respective implementations tweak this query and + add additional columns, filters, and order-by clauses.)" + {:with [[:user_recipients {:select [[:recipient.pulse_channel_id :channel_id] + [:%count.* :count]] + :from [[:pulse_channel_recipient :recipient]] + :group-by [:channel_id]}]] + :select [[:pulse.id :pulse_id] + [:user_recipients.count :num_user_recipients] + [:channel.id :channel_id] + [:channel.details :channel_details] + [:channel.channel_type :subscription_type] + [:collection.id :collection_id] + [:collection.name :collection_name] + :channel.schedule_type + :channel.schedule_hour + :channel.schedule_day + :channel.schedule_frame + [:creator.id :creator_id] + [(hx/concat :creator.first_name (hx/literal " ") :creator.last_name) :creator_name] + [:channel.created_at :created_at] + [:pulse.parameters :pulse_parameters]] + :from [[:pulse_channel :channel]] + :left-join [:pulse [:= :channel.pulse_id :pulse.id] + :collection [:= :pulse.collection_id :collection.id] + [:core_user :creator] [:= :pulse.creator_id :creator.id] + :user_recipients [:= :channel.id :user_recipients.channel_id]] + :where [:and + [:not= :pulse.archived true] + [:= :channel.enabled true]]}) + +(defn- describe-frequency [row] + (-> (select-keys row [:schedule_type :schedule_hour :schedule_day :schedule_frame]) + u.cron/schedule-map->cron-string + u.cron/describe-cron-string)) + +(defn- describe-recipients + "Return the number of recipients for email `PulseChannel`s. Includes both User recipients (represented by + `PulseChannelRecipient` rows) and plain email recipients (stored directly in the `PulseChannel` `:details`). Returns + `nil` for Slack channels." + [{channel-id :channel_id + subscription-type :subscription_type + channel-details :channel_details + num-recipients :num_user_recipients}] + (let [details (json/parse-string channel-details true)] + (when (= (keyword subscription-type) :email) + ((fnil + 0 0) num-recipients (count (:emails details)))))) + +(defn- pulse-parameter-count [{pulse-parameters :pulse_parameters}] + (if-let [params (try + (some-> pulse-parameters (json/parse-string true)) + (catch Throwable e + (log/error e (trs "Error parsing Pulse parameters: {0}" (ex-message e))) + nil))] + (count params) + 0)) + +(defn- root-collection-name [] + (:name (collection/root-collection-with-ui-details nil))) + +(defn post-process-row-map + "Post-process a `row` **map** for the subscription and alert audit page tables. Get this map by doing something like + this: + + (zipmap table-query-columns row-vector) + + This map should contain at least the keys in [[table-query-columns]] (provided by the common [[table-query]]). After + calling this function, you'll need to convert the row map back to a vector; something like + + (apply juxt (map first table-metadata)) + + should do the trick." + [row] + {:pre [(map? row)]} + (-> row + (assoc :frequency (describe-frequency row) + :recipients (describe-recipients row) + :num_filters (pulse-parameter-count row)) + (update :subscription_type (fn [subscription-type] + (case (keyword subscription-type) + :email (tru "Email") + :slack (tru "Slack") + subscription-type))) + (update :collection_name #(or % (root-collection-name))))) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboard_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_detail.clj similarity index 73% rename from enterprise/backend/src/metabase_enterprise/audit/pages/dashboard_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_detail.clj index e36d59784cbd..afdf66bef117 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboard_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_detail.clj @@ -1,39 +1,42 @@ -(ns metabase-enterprise.audit.pages.dashboard-detail +(ns metabase-enterprise.audit-app.pages.dashboard-detail "Detail page for a single dashboard." - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.card-and-dashboard-detail :as card-and-dash-detail] - [metabase-enterprise.audit.pages.common.cards :as cards] + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.card-and-dashboard-detail :as card-and-dash-detail] + [metabase-enterprise.audit-app.pages.common.cards :as cards] [metabase.models.dashboard :refer [Dashboard]] [metabase.util.schema :as su] [schema.core :as s])) -(s/defn ^:internal-query-fn views-by-time - "Get views of a Dashboard broken out by a time `unit`, e.g. `day` or `day-of-week`." - [dashboard-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] +;; Get views of a Dashboard broken out by a time `unit`, e.g. `day` or `day-of-week`. +(s/defmethod audit.i/internal-query ::views-by-time + [_ dashboard-id :- su/IntGreaterThanZero datetime-unit :- common/DateTimeUnitStr] (card-and-dash-detail/views-by-time "dashboard" dashboard-id datetime-unit)) -(s/defn ^:internal-query-fn revision-history - [dashboard-id :- su/IntGreaterThanZero] +;; Revision history for a specific Dashboard. +(s/defmethod audit.i/internal-query ::revision-history + [_ dashboard-id :- su/IntGreaterThanZero] (card-and-dash-detail/revision-history Dashboard dashboard-id)) -(s/defn ^:internal-query-fn audit-log - [dashboard-id :- su/IntGreaterThanZero] +;; View log for a specific Dashboard. +(s/defmethod audit.i/internal-query ::audit-log + [_ dashboard-id :- su/IntGreaterThanZero] (card-and-dash-detail/audit-log "dashboard" dashboard-id)) - -(s/defn ^:internal-query-fn cards - [dashboard-id :- su/IntGreaterThanZero] +;; Information about the Saved Questions (Cards) in this instance. +(s/defmethod audit.i/internal-query ::cards + [_ dashboard-id :- su/IntGreaterThanZero] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Title", :base_type :type/Name, :remapped_from :card_id}] [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] - [:created_at {:display_name "Created At", :base_type :type/DateTime}] + [:created_at {:display_name "Created At", :base_type :type/DateTime}] [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] [:avg_running_time_ms {:display_name "Avg. exec. time (ms)", :base_type :type/Number}] - [:cache_ttl {:display_name "Cache TTL", :base_type :type/Number}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] [:public_link {:display_name "Public Link", :base_type :type/URL}] [:total_views {:display_name "Total Views", :base_type :type/Integer}]] :results (common/reducible-query diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_subscriptions.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_subscriptions.clj new file mode 100644 index 000000000000..b625c4a451fa --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_subscriptions.clj @@ -0,0 +1,53 @@ +(ns metabase-enterprise.audit-app.pages.dashboard-subscriptions + (:require [clojure.string :as str] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.pulses :as common.pulses])) + +(def ^:private table-metadata + (into + [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] + [:dashboard_name {:display_name "Dashboard Name" :base_type :type/Text, :remapped_from :dashboard_id}]] + common.pulses/table-metadata)) + +(def ^:private table-query-columns + (into + [:dashboard_id + :dashboard_name] + common.pulses/table-query-columns)) + +(defn- table-query [dashboard-name] + (-> common.pulses/table-query + (update :select (partial into + [[:dashboard.id :dashboard_id] + [:dashboard.name :dashboard_name]])) + (update :left-join into [[:report_dashboard :dashboard] [:= :pulse.dashboard_id :dashboard.id]]) + (update :where (fn [where] + (into + where + (filter some?) + [[:not= :pulse.dashboard_id nil] + (when-not (str/blank? dashboard-name) + [:like :%lower.dashboard.name (str \% (str/lower-case dashboard-name) \%)])]))) + (assoc :order-by [[:%lower.dashboard.name :asc] + ;; Newest first. ID instead of `created_at` because the column is currently only + ;; second-resolution for MySQL which busts our tests + [:channel.id :desc]]))) + +(def ^:private ^{:arglists '([row-map])} row-map->vec + (apply juxt (map first table-metadata))) + +(defn- post-process-row [row] + (-> (zipmap table-query-columns row) + common.pulses/post-process-row-map + row-map->vec)) + +;; with optional param `dashboard-name`, only show subscriptions matching dashboard name. +(defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + + ([_ dashboard-name] + {:metadata table-metadata + :results (common/reducible-query (table-query dashboard-name)) + :xform (map post-process-row)})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboards.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboards.clj similarity index 50% rename from enterprise/backend/src/metabase_enterprise/audit/pages/dashboards.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboards.clj index 9ace5fdc09a2..11806bd6667a 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboards.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboards.clj @@ -1,44 +1,31 @@ -(ns metabase-enterprise.audit.pages.dashboards +(ns metabase-enterprise.audit-app.pages.dashboards "Dashboards overview page." - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.dashboards :as dashboards] + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.dashboards :as dashboards] [metabase.util.honeysql-extensions :as hx] [schema.core :as s])) -(defn ^:deprecated ^:internal-query-fn views-per-day - "DEPRECATED: use `views-and-saves-by-time ` instead." - [] - {:metadata [[:day {:display_name "Date", :base_type :type/Date}] - [:views {:display_name "Views", :base_type :type/Integer}]] - :results (common/reducible-query - {:select [[(hx/cast :date :timestamp) :day] - [:%count.* :views]] - :from [:view_log] - :where [:= :model (hx/literal "dashboard")] - :group-by [(hx/cast :date :timestamp)] - :order-by [(hx/cast :date :timestamp)]})}) - - -(s/defn ^:internal-query-fn views-and-saves-by-time - "Two-series timeseries that includes total number of Dashboard views and saves broken out by a `datetime-unit`." - [datetime-unit :- common/DateTimeUnitStr] +;; Two-series timeseries that includes total number of Dashboard views and saves broken out by a `datetime-unit`. +(s/defmethod audit.i/internal-query ::views-and-saves-by-time + [_ datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:views {:display_name "Views", :base_type :type/Integer}] [:saves {:display_name "Saves", :base_type :type/Integer}]] ;; this is so nice and easy to implement in a single query with FULL OUTER JOINS but unfortunately only pg supports ;; them(!) :results (let [views (common/query - {:select [[(common/grouped-datetime datetime-unit :timestamp) :date] - [:%count.* :views]] - :from [:view_log] - :where [:= :model (hx/literal "dashboard")] - :group-by [(common/grouped-datetime datetime-unit :timestamp)]}) + {:select [[(common/grouped-datetime datetime-unit :timestamp) :date] + [:%count.* :views]] + :from [:view_log] + :where [:= :model (hx/literal "dashboard")] + :group-by [(common/grouped-datetime datetime-unit :timestamp)]}) date->views (zipmap (map :date views) (map :views views)) saves (common/query - {:select [[(common/grouped-datetime datetime-unit :created_at) :date] - [:%count.* :saves]] - :from [:report_dashboard] - :group-by [(common/grouped-datetime datetime-unit :created_at)]}) + {:select [[(common/grouped-datetime datetime-unit :created_at) :date] + [:%count.* :saves]] + :from [:report_dashboard] + :group-by [(common/grouped-datetime datetime-unit :created_at)]}) date->saves (zipmap (map :date saves) (map :saves saves)) all-dates (sort (keep identity (distinct (concat (keys date->views) (keys date->saves)))))] @@ -47,10 +34,9 @@ :views (date->views date 0) :saves (date->saves date 0)}))}) - -(defn ^:internal-query-fn ^:deprecated most-popular - "Deprecated: use `most-popular-with-avg-speed` instead." - [] +;; DEPRECATED Use `most-popular-with-avg-speed` instead. +(defmethod audit.i/internal-query ::most-popular + [_] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Title, :remapped_from :dashboard_id}] [:views {:display_name "Views", :base_type :type/Integer}]] @@ -65,49 +51,48 @@ :order-by [[:%count.* :desc]] :limit 10})}) -(defn ^:internal-query-fn most-popular-with-avg-speed - "10 most popular dashboards with their average speed." - [] +;; Ten most popular dashboards with their average speed. +(defmethod audit.i/internal-query ::most-popular-with-avg-speed + [_] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Title, :remapped_from :dashboard_id}] [:views {:display_name "Views", :base_type :type/Integer}] [:avg_running_time {:display_name "Avg. Question Load Time (ms)", :base_type :type/Decimal}]] :results (common/reducible-query - {:with [[:most_popular {:select [[:d.id :dashboard_id] - [:d.name :dashboard_name] - [:%count.* :views]] - :from [[:view_log :vl]] - :left-join [[:report_dashboard :d] [:= :vl.model_id :d.id]] - :where [:= :vl.model (hx/literal "dashboard")] - :group-by [:d.id] - :order-by [[:%count.* :desc]] - :limit 10}] - [:card_running_time {:select [:qe.card_id - [:%avg.qe.running_time :avg_running_time]] - :from [[:query_execution :qe]] - :where [:not= :qe.card_id nil] - :group-by [:qe.card_id]}] - [:dash_avg_running_time {:select [[:d.id :dashboard_id] - [:%avg.rt.avg_running_time :avg_running_time]] - :from [[:report_dashboardcard :dc]] - :left-join [[:card_running_time :rt] [:= :dc.card_id :rt.card_id] - [:report_dashboard :d] [:= :dc.dashboard_id :d.id]] - :group-by [:d.id] - :where [:in :d.id {:select [:dashboard_id] - :from [:most_popular]}]}]] - :select [:mp.dashboard_id - :mp.dashboard_name - :mp.views - :rt.avg_running_time] - :from [[:most_popular :mp]] - :left-join [[:dash_avg_running_time :rt] [:= :mp.dashboard_id :rt.dashboard_id]] - :order-by [[:mp.views :desc]] - :limit 10})}) - + {:with [[:most_popular {:select [[:d.id :dashboard_id] + [:d.name :dashboard_name] + [:%count.* :views]] + :from [[:view_log :vl]] + :left-join [[:report_dashboard :d] [:= :vl.model_id :d.id]] + :where [:= :vl.model (hx/literal "dashboard")] + :group-by [:d.id] + :order-by [[:%count.* :desc]] + :limit 10}] + [:card_running_time {:select [:qe.card_id + [:%avg.qe.running_time :avg_running_time]] + :from [[:query_execution :qe]] + :where [:not= :qe.card_id nil] + :group-by [:qe.card_id]}] + [:dash_avg_running_time {:select [[:d.id :dashboard_id] + [:%avg.rt.avg_running_time :avg_running_time]] + :from [[:report_dashboardcard :dc]] + :left-join [[:card_running_time :rt] [:= :dc.card_id :rt.card_id] + [:report_dashboard :d] [:= :dc.dashboard_id :d.id]] + :group-by [:d.id] + :where [:in :d.id {:select [:dashboard_id] + :from [:most_popular]}]}]] + :select [:mp.dashboard_id + :mp.dashboard_name + :mp.views + :rt.avg_running_time] + :from [[:most_popular :mp]] + :left-join [[:dash_avg_running_time :rt] [:= :mp.dashboard_id :rt.dashboard_id]] + :order-by [[:mp.views :desc]] + :limit 10})}) -(defn ^:internal-query-fn ^:deprecated slowest - "Query that returns the 10 Dashboards that have the slowest average execution times, in descending order." - [] +;; DEPRECATED Query that returns the 10 Dashboards that have the slowest average execution times, in descending order. +(defmethod audit.i/internal-query ::slowest + [_] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Title, :remapped_from :dashboard_id}] [:avg_running_time {:display_name "Avg. Question Load Time (ms)", :base_type :type/Decimal}]] @@ -127,10 +112,9 @@ :order-by [[:avg_running_time :desc]] :limit 10})}) - -(defn ^:internal-query-fn ^:deprecated most-common-questions - "Query that returns the 10 Cards that appear most often in Dashboards, in descending order." - [] +;; DEPRECATED Query that returns the 10 Cards that appear most often in Dashboards, in descending order. +(defmethod audit.i/internal-query ::most-common-questions + [_] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] [:count {:display_name "Count", :base_type :type/Integer}]] @@ -144,10 +128,9 @@ :order-by [[:%count.* :desc]] :limit 10})}) - -(s/defn ^:internal-query-fn table - "Internal audit app query powering a table of different Dashboards with lots of extra info about them." - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; Internal audit app query powering a table of different Dashboards with lots of extra info about them. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] (dashboards/table query-string))) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/database_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/database_detail.clj new file mode 100644 index 000000000000..0e3cfe4cc529 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/database_detail.clj @@ -0,0 +1,37 @@ +(ns metabase-enterprise.audit-app.pages.database-detail + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase.util.schema :as su] + [ring.util.codec :as codec] + [schema.core :as s])) + +;; Query execution history for queries against this Database. +(s/defmethod audit.i/internal-query ::audit-log + [_ database-id :- su/IntGreaterThanZero] + {:metadata [[:started_at {:display_name "Viewed on", :base_type :type/DateTime}] + [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :query}] + [:query_hash {:display_name "Query Hash", :base_type :type/Text}] + [:query {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] + [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user}] + [:user {:display_name "Queried by", :base_type :type/Text, :remapped_from :user_id}] + [:schema {:display_name "Schema", :base_type :type/Text}] + [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table}] + [:table {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}]] + :results (common/reducible-query + {:select [:qe.started_at + [:card.id :card_id] + [:qe.hash :query_hash] + [(common/card-name-or-ad-hoc :card) :query] + [:u.id :user_id] + [(common/user-full-name :u) :user] + :t.schema + [:t.id :table_id] + [:t.name :table]] + :from [[:query_execution :qe]] + :where [:= :qe.database_id database-id] + :join [[:metabase_database :db] [:= :db.id :qe.database_id] + [:core_user :u] [:= :qe.executor_id :u.id]] + :left-join [[:report_card :card] [:= :qe.card_id :card.id] + [:metabase_table :t] [:= :card.table_id :t.id]] + :order-by [[:qe.started_at :desc]]}) + :xform (map #(update (vec %) 2 codec/base64-encode))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/databases.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/databases.clj similarity index 79% rename from enterprise/backend/src/metabase_enterprise/audit/pages/databases.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/databases.clj index e58632c0a184..daa137866bd0 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/databases.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/databases.clj @@ -1,6 +1,7 @@ -(ns metabase-enterprise.audit.pages.databases +(ns metabase-enterprise.audit-app.pages.databases (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.cron :as cron] [schema.core :as s])) @@ -15,9 +16,11 @@ ;; JOIN metabase_database db ON t.db_id = db.id ;; GROUP BY db.id ;; ORDER BY lower(db.name) ASC -(defn ^:internal-query-fn ^:deprecated total-query-executions-by-db - "Return Databases with the total number of queries ran against them and the average running time for all queries." - [] +;; +;; DEPRECATED Return Databases with the total number of queries ran against them and the average running time for all +;; queries. +(defmethod audit.i/internal-query ::total-query-executions-by-db + [_] {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] [:queries {:display_name "Queries", :base_type :type/Integer}] @@ -34,9 +37,9 @@ :group-by [:db.id] :order-by [[:%lower.db.name :asc]]})}) -(s/defn ^:internal-query-fn query-executions-by-time - "Query that returns count of query executions grouped by Database and a `datetime-unit`." - [datetime-unit :- common/DateTimeUnitStr] +;; Query that returns count of query executions grouped by Database and a `datetime-unit`. +(s/defmethod audit.i/internal-query ::query-executions-by-time + [_ datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database Name", :base_type :type/Name, :remapped_from :database_id}] @@ -63,16 +66,17 @@ [:%lower.db.name :asc] [:qx.database_id :asc]]})}) -(defn ^:deprecated ^:internal-query-fn query-executions-per-db-per-day - "Query that returns count of query executions grouped by Database and day." - [] - (query-executions-by-time "day")) +;; DEPRECATED Use `::query-executions-by-time` instead. Query that returns count of query executions grouped by +;; Database and day. +(defmethod audit.i/internal-query ::query-executions-per-db-per-day + [_] + (audit.i/internal-query ::query-executions-by-time "day")) - -(s/defn ^:internal-query-fn table - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; Table with information and statistics about all the data warehouse Databases in this Metabase instance. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] ;; TODO - Should we convert sync_schedule from a cron string into English? Not sure that's going to be feasible for ;; really complicated schedules {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :title}] @@ -80,7 +84,8 @@ [:added_on {:display_name "Added On", :base_type :type/DateTime}] [:sync_schedule {:display_name "Sync Schedule", :base_type :type/Text}] [:schemas {:display_name "Schemas", :base_type :type/Integer}] - [:tables {:display_name "Tables", :base_type :type/Integer}]] + [:tables {:display_name "Tables", :base_type :type/Integer}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Integer}]] :results (common/reducible-query (-> {:with [[:counts {:select [[:db_id :id] @@ -93,7 +98,8 @@ [:db.created_at :added_on] [:db.metadata_sync_schedule :sync_schedule] [:counts.schemas :schemas] - [:counts.tables :tables]] + [:counts.tables :tables] + [:db.cache_ttl :cache_ttl]] :from [[:metabase_database :db]] :left-join [:counts [:= :db.id :counts.id]] :order-by [[:%lower.db.name :asc] diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/downloads.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/downloads.clj similarity index 62% rename from enterprise/backend/src/metabase_enterprise/audit/pages/downloads.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/downloads.clj index 0d01a99e2504..2691d2a8c175 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/downloads.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/downloads.clj @@ -1,63 +1,55 @@ -(ns metabase-enterprise.audit.pages.downloads +(ns metabase-enterprise.audit-app.pages.downloads "Audit queries returning info about query downloads. Query downloads are any query executions whose results are returned as CSV/JSON/XLS." (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.db :as mdb] [metabase.driver.sql.query-processor :as sql.qp] - [metabase.util.honeysql-extensions :as hx] - [schema.core :as s])) + [metabase.util.honeysql-extensions :as hx])) -;;; ------------------------------------------------ per-day-by-size ------------------------------------------------- - -(s/defn ^:internal-query-fn per-day-by-size - "Pairs of count of rows downloaded and date downloaded for the 1000 largest (in terms of row count) queries over the - past 30 days. Intended to power scatter plot." - [] +;; Pairs of count of rows downloaded and date downloaded for the 1000 largest (in terms of row count) queries over the +;; past 30 days. Intended to power scatter plot. +(defmethod audit.i/internal-query ::per-day-by-size + [_] {:metadata [[:date {:display_name "Day", :base_type :type/DateTime}] [:rows {:display_name "Rows in Query", :base_type :type/Integer}] [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user_name}] [:user_name {:display_name "User", :base_type :type/Text, :remapped_from :user_id}]] :results (common/reducible-query - {:select [[:qe.started_at :date] - [:qe.result_rows :rows] - [:qe.executor_id :user_id] - [(common/user-full-name :u) :user_name]] - :from [[:query_execution :qe]] - :left-join [[:core_user :u] [:= :qe.executor_id :u.id]] - :where [:and - [:> :qe.started_at (sql.qp/add-interval-honeysql-form (mdb/db-type) :%now -30 :day)] - (common/query-execution-is-download :qe)] - :order-by [[:qe.result_rows :desc]] - :limit 1000})}) - - -;;; ---------------------------------------------------- per-user ---------------------------------------------------- - -(s/defn ^:internal-query-fn per-user - "Total count of query downloads broken out by user, ordered by highest total, for the top 10 users." - [] + {:select [[:qe.started_at :date] + [:qe.result_rows :rows] + [:qe.executor_id :user_id] + [(common/user-full-name :u) :user_name]] + :from [[:query_execution :qe]] + :left-join [[:core_user :u] [:= :qe.executor_id :u.id]] + :where [:and + [:> :qe.started_at (sql.qp/add-interval-honeysql-form (mdb/db-type) :%now -30 :day)] + (common/query-execution-is-download :qe)] + :order-by [[:qe.result_rows :desc]] + :limit 1000})}) + +;; Total count of query downloads broken out by user, ordered by highest total, for the top 10 users. +(defmethod audit.i/internal-query ::per-user + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user_name}] [:user_name {:display_name "User", :base_type :type/Text, :remapped_from :user_id}] [:downloads {:display_name "Downloads", :base_type :type/Integer}]] :results (common/reducible-query - {:with [[:downloads_by_user - {:select [[:qe.executor_id :user_id] - [:%count.* :downloads]] - :from [[:query_execution :qe]] - :where (common/query-execution-is-download :qe) - :group-by [:qe.executor_id] - :order-by [[:%count.* :desc]] - :limit 10}]] - :select [[:d.user_id :user_id] - [(common/user-full-name :u) :user_name] - [:d.downloads :downloads]] - :from [[:downloads_by_user :d]] - :join [[:core_user :u] [:= :d.user_id :u.id]] - :order-by [[:d.downloads :desc]]})}) - - -;;; ---------------------------------------------------- by-size ----------------------------------------------------- + {:with [[:downloads_by_user + {:select [[:qe.executor_id :user_id] + [:%count.* :downloads]] + :from [[:query_execution :qe]] + :where (common/query-execution-is-download :qe) + :group-by [:qe.executor_id] + :order-by [[:%count.* :desc]] + :limit 10}]] + :select [[:d.user_id :user_id] + [(common/user-full-name :u) :user_name] + [:d.downloads :downloads]] + :from [[:downloads_by_user :d]] + :join [[:core_user :u] [:= :d.user_id :u.id]] + :order-by [[:d.downloads :desc]]})}) (def ^:private bucket-maxes "Add/remove numbers here to adjust buckets returned by the `by-size` query." @@ -111,31 +103,28 @@ [[:= :rows_bucket_max -1] (hx/literal (format "> %s" (format-number-add-commas (last bucket-maxes))))]))) -(s/defn ^:internal-query-fn by-size - "Query download count broken out by bucketed number of rows of query. E.g. 10 downloads of queries with 0-10 rows, 15 - downloads of queries with 11-100, etc. Intended to power bar chart." - [] +;; Query download count broken out by bucketed number of rows of query. E.g. 10 downloads of queries with 0-10 rows, +;; 15 downloads of queries with 11-100, etc. Intended to power bar chart. +(defmethod audit.i/internal-query ::by-size + [_] {:metadata [[:rows {:display_name "Rows Downloaded", :base_type :type/Text}] [:downloads {:display_name "Downloads", :base_type :type/Integer}]] :results (common/reducible-query - {:with [[:bucketed_downloads - {:select [[rows->bucket-case-expression :rows_bucket_max]] - :from [:query_execution] - :where [:and - (common/query-execution-is-download :query_execution) - [:not= :result_rows nil]]}]] - :select [[bucket->range-str-case-expression :rows] - [:%count.* :downloads]] - :from [:bucketed_downloads] - :group-by [:rows_bucket_max] - :order-by [[:rows_bucket_max :asc]]})}) - - -;;; ----------------------------------------------------- table ------------------------------------------------------ - -(s/defn ^:internal-query-fn table - "Table showing all query downloads ordered by most recent." - [] + {:with [[:bucketed_downloads + {:select [[rows->bucket-case-expression :rows_bucket_max]] + :from [:query_execution] + :where [:and + (common/query-execution-is-download :query_execution) + [:not= :result_rows nil]]}]] + :select [[bucket->range-str-case-expression :rows] + [:%count.* :downloads]] + :from [:bucketed_downloads] + :group-by [:rows_bucket_max] + :order-by [[:rows_bucket_max :asc]]})}) + +;; Table showing all query downloads ordered by most recent. +(defmethod audit.i/internal-query ::table + [_] {:metadata [[:downloaded_at {:display_name "Downloaded At", :base_type :type/DateTime}] [:rows_downloaded {:display_name "Rows Downloaded", :base_type :type/Integer}] [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/queries.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/queries.clj new file mode 100644 index 000000000000..dfb2e91dae58 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/queries.clj @@ -0,0 +1,213 @@ +(ns metabase-enterprise.audit-app.pages.queries + (:require [honeysql.core :as hsql] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.cards :as cards] + [metabase.db.connection :as mdb.connection] + [metabase.util.honeysql-extensions :as hx])) + +;; DEPRECATED Query that returns data for a two-series timeseries chart with number of queries ran and average query +;; running time broken out by day. +(defmethod audit.i/internal-query ::views-and-avg-execution-time-by-day + [_] + {:metadata [[:day {:display_name "Date", :base_type :type/Date}] + [:views {:display_name "Views", :base_type :type/Integer}] + [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] + :results (common/reducible-query + {:select [[(hx/cast :date :started_at) :day] + [:%count.* :views] + [:%avg.running_time :avg_running_time]] + :from [:query_execution] + :group-by [(hx/cast :date :started_at)] + :order-by [[(hx/cast :date :started_at) :asc]]})}) + +;; Query that returns the 10 most-popular Cards based on number of query executions, in descending order. +(defmethod audit.i/internal-query ::most-popular + [_] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] + [:executions {:display_name "Executions", :base_type :type/Integer}]] + :results (common/reducible-query + {:select [[:c.id :card_id] + [:c.name :card_name] + [:%count.* :executions]] + :from [[:query_execution :qe]] + :join [[:report_card :c] [:= :qe.card_id :c.id]] + :group-by [:c.id] + :order-by [[:executions :desc]] + :limit 10})}) + +;; DEPRECATED Query that returns the 10 slowest-running Cards based on average query execution time, in descending +;; order. +(defmethod audit.i/internal-query ::slowest + [_] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] + [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] + :results (common/reducible-query + {:select [[:c.id :card_id] + [:c.name :card_name] + [:%avg.running_time :avg_running_time]] + :from [[:query_execution :qe]] + :join [[:report_card :c] [:= :qe.card_id :c.id]] + :group-by [:c.id] + :order-by [[:avg_running_time :desc]] + :limit 10})}) + +;; List of all failing questions +(defmethod audit.i/internal-query ::bad-table + ([_] + (audit.i/internal-query ::bad-table nil nil nil nil nil)) + ([_ + error-filter + db-filter + collection-filter + sort-column + sort-direction] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer :remapped_to :card_name}] + [:card_name {:display_name "Question", :base_type :type/Text :remapped_from :card_id}] + [:error_substr {:display_name "Error", :base_type :type/Text :code true}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text :remapped_from :collection_id}] + [:database_id {:display_name "Database ID", :base_type :type/Integer :remapped_to :database_name}] + [:database_name {:display_name "Database", :base_type :type/Text :remapped_from :database_id}] + [:schema_name {:display_name "Schema", :base_type :type/Text}] + [:table_id {:display_name "Table ID", :base_type :type/Integer :remapped_to :table_name}] + [:table_name {:display_name "Table", :base_type :type/Text :remapped_from :table_id}] + [:last_run_at {:display_name "Last run at", :base_type :type/DateTime}] + [:total_runs {:display_name "Total runs", :base_type :type/Integer}] + ;; if it appears a billion times each in 2 dashboards, that's 2 billion appearances + [:num_dashboards {:display_name "Dashboards it's in", :base_type :type/Integer}] + [:user_id {:display_name "Created By ID", :base_type :type/Integer :remapped_to :user_name}] + [:user_name {:display_name "Created By", :base_type :type/Text :remapped_from :user_id}] + [:updated_at {:display_name "Updated At", :base_type :type/DateTime}]] + :results (common/reducible-query + (let [coll-name (hsql/call :coalesce :coll.name "Our Analytics") + error-substr (hsql/call :concat + (hsql/call :substring :latest_qe.error + (if (= (mdb.connection/db-type) :mysql) 1 0) + 60) + "...") + dash-count (hsql/call :coalesce :dash_card.count 0)] + (-> + {:with [cards/query-runs + cards/latest-qe + cards/dashboards-count] + :select [[:card.id :card_id] + [:card.name :card_name] + [error-substr :error_substr] + :collection_id + [coll-name :collection_name] + :card.database_id + [:db.name :database_name] + [:t.schema :schema_name] + :card.table_id + [:t.name :table_name] + [:latest_qe.started_at :last_run_at] + [:query_runs.count :total_runs] + [dash-count :num_dashboards] + [:card.creator_id :user_id] + [(common/user-full-name :u) :user_name] + [:card.updated_at :updated_at]] + :from [[:report_card :card]] + :left-join [[:collection :coll] [:= :card.collection_id :coll.id] + [:metabase_database :db] [:= :card.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id] + [:core_user :u] [:= :card.creator_id :u.id] + :latest_qe [:= :card.id :latest_qe.card_id] + :query_runs [:= :card.id :query_runs.card_id] + :dash_card [:= :card.id :dash_card.card_id]] + :where [:and + [:= :card.archived false] + [:<> :latest_qe.error nil]]} + (common/add-search-clause error-filter :latest_qe.error) + (common/add-search-clause db-filter :db.name) + (common/add-search-clause collection-filter coll-name) + (common/add-sort-clause + (or sort-column "card.name") + (or sort-direction "asc")))))})) + +;; A list of all questions. +;; +;; Three possible argument lists. All arguments are always nullable. +;; +;; - [] : +;; Dump them all, sort by name ascending +;; +;; - [question-filter] : +;; Dump all filtered by the question-filter string, sort by name ascending. +;; question-filter filters on the `name` column in `cards` table. +;; +;; - [question-filter, collection-filter, sort-column, sort-direction] : +;; Dump all filtered by both question-filter and collection-filter, +;; sort by the given column and sort direction. +;; question-filter filters on the `name` column in `cards` table. +;; collection-filter filters on the `name` column in `collections` table. +;; +;; Sort column is given over in keyword form to honeysql. Default `card.name` +;; +;; Sort direction can be `asc` or `desc`, ascending and descending respectively. Default `asc`. +;; +;; All inputs have to be strings because that's how the magic middleware +;; that turns these functions into clojure-backed 'datasets' works. +(defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil nil nil nil)) + + ([query-type question-filter] + (audit.i/internal-query query-type question-filter nil nil nil)) + + ([_ + question-filter + collection-filter + sort-column + sort-direction] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Name", :base_type :type/Name, :remapped_from :card_id}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] + [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] + [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] + [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] + [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] + [:user_id {:display_name "Created By ID", :base_type :type/Integer, :remapped_to :user_name}] + [:user_name {:display_name "Created By", :base_type :type/Text, :remapped_from :user_id}] + [:public_link {:display_name "Public Link", :base_type :type/URL}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] + [:avg_exec_time {:display_name "Average Runtime (ms)", :base_type :type/Integer}] + [:total_runtime {:display_name "Total Runtime (ms)", :base_type :type/Number}] + [:query_runs {:display_name "Query Runs", :base_type :type/Integer}]] + :results (common/reducible-query + (-> + {:with [cards/avg-exec-time-45 + cards/total-exec-time-45 + cards/query-runs-45] + :select [[:card.id :card_id] + [:card.name :card_name] + :collection_id + [:coll.name :collection_name] + :card.database_id + [:db.name :database_name] + :card.table_id + [:t.name :table_name] + [:card.creator_id :user_id] + [(common/user-full-name :u) :user_name] + [(common/card-public-url :card.public_uuid) :public_link] + :card.cache_ttl + [:avg_exec_time.avg_running_time_ms :avg_exec_time] + [:total_runtime.total_running_time_ms :total_runtime] + [:query_runs.count :query_runs]] + :from [[:report_card :card]] + :left-join [[:collection :coll] [:= :card.collection_id :coll.id] + [:metabase_database :db] [:= :card.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id] + [:core_user :u] [:= :card.creator_id :u.id] + :avg_exec_time [:= :card.id :avg_exec_time.card_id] + :total_runtime [:= :card.id :total_runtime.card_id] + :query_runs [:= :card.id :query_runs.card_id]] + :where [:= :card.archived false]} + (common/add-search-clause question-filter :card.name) + (common/add-search-clause collection-filter :coll.name) + (common/add-sort-clause + (or sort-column "card.name") + (or sort-direction "asc"))))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/query_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/query_detail.clj new file mode 100644 index 000000000000..18b57abb27bb --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/query_detail.clj @@ -0,0 +1,72 @@ +(ns metabase-enterprise.audit-app.pages.query-detail + "Queries to show details about a (presumably ad-hoc) query." + (:require [cheshire.core :as json] + [honeysql.core :as hsql] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.cards :as cards] + [metabase.util.schema :as su] + [ring.util.codec :as codec] + [schema.core :as s])) + +(defmethod audit.i/internal-query ::bad-card + [_ card-id] + {:metadata [[:card_id {:display_name "Question ID", :base_type :type/Integer :remapped_from :card_name}] + [:card_name {:display_name "Question", :base_type :type/Text :remapped_from :card_id}] + [:error_str {:display_name "Error", :base_type :type/Text :code true}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text :remapped_from :collection_id}] + [:database_id {:display_name "Database ID", :base_type :type/Integer :remapped_to :database_name}] + [:database_name {:display_name "Database", :base_type :type/Text :remapped_from :database_id}] + [:schema_name {:display_name "Schema", :base_type :type/Text}] + [:table_id {:display_name "Table ID", :base_type :type/Integer :remapped_to :table_name}] + [:table_name {:display_name "Table", :base_type :type/Text :remapped_from :table_id}] + [:last_run_at {:display_name "Last run at", :base_type :type/DateTime}] + [:total_runs {:display_name "Total runs", :base_type :type/Integer}] + ;; Denormalize by string_agg in order to avoid having to deal with complicated left join + [:dash_name_str {:display_name "Dashboards it's in", :base_type :type/Text}] + [:user_id {:display_name "Created By ID", :base_type :type/Integer :remapped_to :user_name}] + [:user_name {:display_name "Created By", :base_type :type/Text :remapped_from :user_id}] + [:updated_at {:display_name "Updated At", :base_type :type/DateTime}]] + :results (common/reducible-query + {:with [cards/query-runs + cards/latest-qe + cards/dashboards-ids] + :select [[:card.id :card_id] + [:card.name :card_name] + [:latest_qe.error :error_str] + :collection_id + [(hsql/call :coalesce :coll.name "Our Analytics") :collection_name] + :card.database_id + [:db.name :database_name] + [:t.schema :schema_name] + :card.table_id + [:t.name :table_name] + [:latest_qe.started_at :last_run_at] + [:query_runs.count :total_runs] + [:dash_card.name_str :dash_name_str] + [:card.creator_id :user_id] + [(common/user-full-name :u) :user_name] + [:card.updated_at :updated_at]] + :from [[:report_card :card]] + :left-join [[:collection :coll] [:= :card.collection_id :coll.id] + [:metabase_database :db] [:= :card.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id] + [:core_user :u] [:= :card.creator_id :u.id] + :latest_qe [:= :card.id :latest_qe.card_id] + :query_runs [:= :card.id :query_runs.card_id] + :dash_card [:= :card.id :dash_card.card_id]] + :where [:= :card.id card-id] })}) + +;; Details about a specific query (currently just average execution time). +(s/defmethod audit.i/internal-query ::details + [_ query-hash :- su/NonBlankString] + {:metadata [[:query {:display_name "Query", :base_type :type/Dictionary}] + [:average_execution_time {:display_name "Avg. Exec. Time (ms)", :base_type :type/Number}]] + :results (common/reducible-query + {:select [:query + :average_execution_time] + :from [:query] + :where [:= :query_hash (codec/base64-decode query-hash)] + :limit 1}) + :xform (map #(update (vec %) 0 json/parse-string))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/question_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/question_detail.clj new file mode 100644 index 000000000000..e9561abdde5a --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/question_detail.clj @@ -0,0 +1,33 @@ +(ns metabase-enterprise.audit-app.pages.question-detail + "Detail page for a single Card (Question)." + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.card-and-dashboard-detail :as card-and-dash-detail] + [metabase.models.card :refer [Card]] + [metabase.util.schema :as su] + [schema.core :as s])) + +;; Get views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`. +(s/defmethod audit.i/internal-query ::views-by-time + [_ card-id :- su/IntGreaterThanZero datetime-unit :- common/DateTimeUnitStr] + (card-and-dash-detail/views-by-time "card" card-id datetime-unit)) + +;; Get cached views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`. +(s/defmethod audit.i/internal-query ::cached-views-by-time + [_ card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] + (card-and-dash-detail/cached-views-by-time card-id datetime-unit)) + +;; Get the revision history for a Card. +(s/defmethod audit.i/internal-query ::revision-history + [_ card-id :- su/IntGreaterThanZero] + (card-and-dash-detail/revision-history Card card-id)) + +;; Get a view log for a Card. +(s/defmethod audit.i/internal-query ::audit-log + [_ card-id :- su/IntGreaterThanZero] + (card-and-dash-detail/audit-log "card" card-id)) + +;; Average execution time broken out by period +(s/defmethod audit.i/internal-query ::avg-execution-time-by-time + [_ card-id :- su/IntGreaterThanZero datetime-unit :- common/DateTimeUnitStr] + (card-and-dash-detail/avg-execution-time-by-time card-id datetime-unit)) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/schemas.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/schemas.clj similarity index 67% rename from enterprise/backend/src/metabase_enterprise/audit/pages/schemas.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/schemas.clj index 276b06964a38..1704e6df11c2 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/schemas.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/schemas.clj @@ -1,5 +1,6 @@ -(ns metabase-enterprise.audit.pages.schemas - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.schemas + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [schema.core :as s])) @@ -22,9 +23,10 @@ ;; GROUP BY db_name, db_schema ;; ORDER BY count(*) DESC ;; LIMIT 10 -(defn ^:internal-query-fn ^:deprecated most-queried - "Query that returns the top 10 most-queried schemas, in descending order." - [] +;; +;; DEPRECATED Query that returns the top 10 most-queried schemas, in descending order. +(defmethod audit.i/internal-query ::most-queried + [_] {:metadata [[:schema {:display_name "Schema", :base_type :type/Title}] [:executions {:display_name "Executions", :base_type :type/Integer}]] :results (common/reducible-query @@ -64,9 +66,10 @@ ;; GROUP BY db_name, db_schema ;; ORDER BY avg_running_time DESC ;; LIMIT 10 -(defn ^:internal-query-fn ^:deprecated slowest-schemas - "Query that returns the top 10 schemas with the slowest average query execution time in descending order." - [] +;; +;; DEPRECATED Query that returns the top 10 schemas with the slowest average query execution time in descending order. +(defmethod audit.i/internal-query ::slowest-schemas + [_] {:metadata [[:schema {:display_name "Schema", :base_type :type/Title}] [:avg_running_time {:display_name "Average Running Time (ms)", :base_type :type/Decimal}]] :results (common/reducible-query @@ -109,13 +112,15 @@ ;; SELECT s.database_name AS "database", s."schema", s.tables, c.saved_count AS saved_queries ;; FROM schemas ;; LEFT JOIN cards c -;; ON s.database_id = c.database_id AND s."schema" = c."schema" -(s/defn ^:internal-query-fn ^:deprecated table - "Query that returns a data for a table full of fascinating information about the different schemas in use in our - application." - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; ON s.database_id = c.database_id +;; AND s."schema" = c."schema" +;; +;; DEPRECATED Query that returns a data for a table full of fascinating information about the different schemas in use +;; in our application. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database}] [:database {:display_name "Database", :base_type :type/Title, :remapped_from :database_id}] [:schema_id {:display_name "Schema ID", :base_type :type/Text, :remapped_to :schema}] @@ -123,30 +128,30 @@ [:tables {:display_name "Tables", :base_type :type/Integer}] [:saved_queries {:display_name "Saved Queries", :base_type :type/Integer}]] :results (common/reducible-query - (-> - {:with [[:cards {:select [[:t.db_id :database_id] - :t.schema - [:%count.* :saved_count]] - :from [[:report_card :c]] - :left-join [[:metabase_table :t] [:= :c.table_id :t.id]] - :where [:not= :c.table_id nil] - :group-by [:t.db_id :t.schema]}] - [:schemas {:select [[:db.id :database_id] - [:db.name :database_name] - :t.schema - [:%count.* :tables]] - :from [[:metabase_table :t]] - :left-join [[:metabase_database :db] [:= :t.db_id :db.id]] - :group-by [:db.id :t.schema] - :order-by [[:db.id :asc] [:t.schema :asc]]}]] - :select [:s.database_id - [:s.database_name :database] - [(hx/concat :s.database_id (hx/literal ".") :s.schema) :schema_id] - :s.schema - :s.tables - [:c.saved_count :saved_queries]] - :from [[:schemas :s]] - :left-join [[:cards :c] [:and - [:= :s.database_id :c.database_id] - [:= :s.schema :c.schema]]]} - (common/add-search-clause query-string :s.schema)))})) + (-> + {:with [[:cards {:select [[:t.db_id :database_id] + :t.schema + [:%count.* :saved_count]] + :from [[:report_card :c]] + :left-join [[:metabase_table :t] [:= :c.table_id :t.id]] + :where [:not= :c.table_id nil] + :group-by [:t.db_id :t.schema]}] + [:schemas {:select [[:db.id :database_id] + [:db.name :database_name] + :t.schema + [:%count.* :tables]] + :from [[:metabase_table :t]] + :left-join [[:metabase_database :db] [:= :t.db_id :db.id]] + :group-by [:db.id :t.schema] + :order-by [[:db.id :asc] [:t.schema :asc]]}]] + :select [:s.database_id + [:s.database_name :database] + [(hx/concat :s.database_id (hx/literal ".") :s.schema) :schema_id] + :s.schema + :s.tables + [:c.saved_count :saved_queries]] + :from [[:schemas :s]] + :left-join [[:cards :c] [:and + [:= :s.database_id :c.database_id] + [:= :s.schema :c.schema]]]} + (common/add-search-clause query-string :s.schema)))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/table_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/table_detail.clj similarity index 82% rename from enterprise/backend/src/metabase_enterprise/audit/pages/table_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/table_detail.clj index 2309d9eb4ae2..4dd12773928e 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/table_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/table_detail.clj @@ -1,11 +1,13 @@ -(ns metabase-enterprise.audit.pages.table-detail - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.table-detail + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.schema :as su] [ring.util.codec :as codec] [schema.core :as s])) -(s/defn ^:internal-query-fn audit-log - [table-id :- su/IntGreaterThanZero] +;; View log for a specific Table. +(s/defmethod audit.i/internal-query ::audit-log + [_ table-id :- su/IntGreaterThanZero] {:metadata [[:started_at {:display_name "Viewed on", :base_type :type/DateTime}] [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :query}] [:query {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/tables.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/tables.clj similarity index 67% rename from enterprise/backend/src/metabase_enterprise/audit/pages/tables.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/tables.clj index e852de402976..99c0b8f593d1 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/tables.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/tables.clj @@ -1,5 +1,6 @@ -(ns metabase-enterprise.audit.pages.tables - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.tables + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [schema.core :as s])) @@ -39,23 +40,21 @@ [:metabase_database :db] [:= :t.db_id :db.id]] :order-by [[:executions asc-or-desc]]})}) -(defn ^:internal-query-fn most-queried - "Query that returns the top-10 most-queried Tables, in descending order." - [] +;; Query that returns the top-10 most-queried Tables, in descending order. +(defmethod audit.i/internal-query ::most-queried + [_] (query-counts :desc)) -(defn ^:internal-query-fn least-queried - "Query that returns the top-10 least-queried Tables (with at least one query execution), in ascending order." - [] +;; Query that returns the top-10 least-queried Tables (with at least one query execution), in ascending order. +(defmethod audit.i/internal-query ::least-queried + [_] (query-counts :asc)) - - -(s/defn ^:internal-query-fn table - "A table of Tables." - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; A table of Tables. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] [:schema_id {:display_name "Schema ID", :base_type :type/Text, :remapped_to :schema_name}] @@ -64,18 +63,18 @@ [:table_name {:display_name "Table Name in DB", :base_type :type/Name, :remapped_from :table_id}] [:table_display_name {:display_name "Table Display Name", :base_type :type/Text}]] :results (common/reducible-query - (-> - {:select [[:db.id :database_id] - [:db.name :database_name] - [(hx/concat :db.id (hx/literal ".") :t.schema) :schema_id] - [:t.schema :table_schema] - [:t.id :table_id] - [:t.name :table_name] - [:t.display_name :table_display_name]] - :from [[:metabase_table :t]] - :join [[:metabase_database :db] [:= :t.db_id :db.id]] - :order-by [[:%lower.db.name :asc] - [:%lower.t.schema :asc] - [:%lower.t.name :asc]] - :where [:= :t.active true]} - (common/add-search-clause query-string :db.name :t.schema :t.name :t.display_name)))})) + (-> + {:select [[:db.id :database_id] + [:db.name :database_name] + [(hx/concat :db.id (hx/literal ".") :t.schema) :schema_id] + [:t.schema :table_schema] + [:t.id :table_id] + [:t.name :table_name] + [:t.display_name :table_display_name]] + :from [[:metabase_table :t]] + :join [[:metabase_database :db] [:= :t.db_id :db.id]] + :order-by [[:%lower.db.name :asc] + [:%lower.t.schema :asc] + [:%lower.t.name :asc]] + :where [:= :t.active true]} + (common/add-search-clause query-string :db.name :t.schema :t.name :t.display_name)))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/user_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/user_detail.clj similarity index 79% rename from enterprise/backend/src/metabase_enterprise/audit/pages/user_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/user_detail.clj index 652e0a915d3a..1fedecc936b1 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/user_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/user_detail.clj @@ -1,19 +1,21 @@ -(ns metabase-enterprise.audit.pages.user-detail +(ns metabase-enterprise.audit-app.pages.user-detail (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.cards :as cards] - [metabase-enterprise.audit.pages.common.dashboards :as dashboards] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.cards :as cards] + [metabase-enterprise.audit-app.pages.common.dashboards :as dashboards] [metabase.util.honeysql-extensions :as hx] [metabase.util.schema :as su] [metabase.util.urls :as urls] [ring.util.codec :as codec] [schema.core :as s])) -(s/defn ^:internal-query-fn table - "Query that probides a single row of information about a given User, similar to the `users/table` query but restricted - to a single result. - (TODO - in the designs, this is pivoted; should we do that here in Clojure-land?)" - [user-id :- su/IntGreaterThanZero] +;; Query that probides a single row of information about a given User, similar to the `users/table` query but +;; restricted to a single result. +;; +;; (TODO - in the designs, this is pivoted; should we do that here in Clojure-land?) +(s/defmethod audit.i/internal-query ::table + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:name {:display_name "Name", :base_type :type/Name}] [:role {:display_name "Role", :base_type :type/Text}] [:groups {:display_name "Groups", :base_type :type/Text}] @@ -42,18 +44,18 @@ :where [:= :creator_id user-id]}] [:users {:select [[(common/user-full-name :u) :name] [(hsql/call :case - [:= :u.is_superuser true] - (hx/literal "Admin") - :else - (hx/literal "User")) + [:= :u.is_superuser true] + (hx/literal "Admin") + :else + (hx/literal "User")) :role] :id :date_joined [(hsql/call :case - [:= nil :u.sso_source] - (hx/literal "Email") - :else - :u.sso_source) + [:= nil :u.sso_source] + (hx/literal "Email") + :else + :u.sso_source) :signup_method] :last_name] :from [[:core_user :u]] @@ -74,9 +76,9 @@ :dashboards_saved :pulses_saved]})}) -(s/defn ^:internal-query-fn most-viewed-dashboards - "Return the 10 most-viewed Dashboards for a given User, in descending order." - [user-id :- su/IntGreaterThanZero] +;; Return the 10 most-viewed Dashboards for a given User, in descending order. +(s/defmethod audit.i/internal-query ::most-viewed-dashboards + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Name, :remapped_from :dashboard_id}] [:count {:display_name "Views", :base_type :type/Integer}]] @@ -93,9 +95,9 @@ :order-by [[:%count.* :desc]] :limit 10})}) -(s/defn ^:internal-query-fn most-viewed-questions - "Return the 10 most-viewed Questions for a given User, in descending order." - [user-id :- su/IntGreaterThanZero] +;; Return the 10 most-viewed Questions for a given User, in descending order. +(s/defmethod audit.i/internal-query ::most-viewed-questions + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Query", :base_type :type/Name, :remapped_from :card_id}] [:count {:display_name "Views", :base_type :type/Integer}]] @@ -112,8 +114,9 @@ :order-by [[:%count.* :desc]] :limit 10})}) -(s/defn ^:internal-query-fn query-views - [user-id :- su/IntGreaterThanZero] +;; Query views by a specific User. +(s/defmethod audit.i/internal-query ::query-views + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:viewed_on {:display_name "Viewed On", :base_type :type/DateTime}] [:card_id {:display_name "Card ID" :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] @@ -151,8 +154,9 @@ :order-by [[:qe.started_at :desc]]}) :xform (map #(update (vec %) 3 codec/base64-encode))}) -(s/defn ^:internal-query-fn dashboard-views - [user-id :- su/IntGreaterThanZero] +;; Dashboard views by a specific User. +(s/defmethod audit.i/internal-query ::dashboard-views + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:timestamp {:display_name "Viewed on", :base_type :type/DateTime}] [:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Text, :remapped_from :dashboard_id}] @@ -172,9 +176,12 @@ :left-join [[:collection :coll] [:= :dash.collection_id :coll.id]] :order-by [[:vl.timestamp :desc]]})}) -(s/defn ^:internal-query-fn object-views-by-time - "Timeseries chart that shows the number of Question or Dashboard views for a User, broken out by `datetime-unit`." - [user-id :- su/IntGreaterThanZero, model :- (s/enum "card" "dashboard"), datetime-unit :- common/DateTimeUnitStr] +;; Timeseries chart that shows the number of Question or Dashboard views for a User, broken out by `datetime-unit`. +(s/defmethod audit.i/internal-query ::object-views-by-time + [_ + user-id :- su/IntGreaterThanZero + model :- (s/enum "card" "dashboard") + datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:views {:display_name "Views", :base_type :type/Integer}]] :results (common/reducible-query @@ -187,14 +194,16 @@ :group-by [(common/grouped-datetime datetime-unit :timestamp)] :order-by [[(common/grouped-datetime datetime-unit :timestamp) :asc]]})}) -(s/defn ^:internal-query-fn created-dashboards - ([user-id] - (created-dashboards user-id nil)) - ([user-id :- su/IntGreaterThanZero, query-string :- (s/maybe s/Str)] +;; Dashboards created by a specific User. +(s/defmethod audit.i/internal-query ::created-dashboards + ([query-type user-id] + (audit.i/internal-query query-type user-id nil)) + ([_ user-id :- su/IntGreaterThanZero query-string :- (s/maybe s/Str)] (dashboards/table query-string [:= :u.id user-id]))) -(s/defn ^:internal-query-fn created-questions - [user-id :- su/IntGreaterThanZero] +;; Questions created by a specific User. +(s/defmethod audit.i/internal-query ::created-questions + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Title", :base_type :type/Name, :remapped_from :card_id}] [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] @@ -205,7 +214,7 @@ [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] [:avg_running_time_ms {:display_name "Avg. exec. time (ms)", :base_type :type/Number}] - [:cache_ttl {:display_name "Cache TTL", :base_type :type/Number}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] [:public_link {:display_name "Public Link", :base_type :type/URL}] [:total_views {:display_name "Total Views", :base_type :type/Integer}]] :results (common/reducible-query @@ -236,10 +245,10 @@ :where [:= :card.creator_id user-id] :order-by [[:%lower.card.name :asc]]})}) -(s/defn ^:internal-query-fn downloads - "Table of query downloads (i.e., queries whose results are returned as CSV/JSON/XLS) done by this user, ordered by - most recent." - [user-id :- su/IntGreaterThanZero] +;; Table of query downloads (i.e., queries whose results are returned as CSV/JSON/XLS) done by this user, ordered by +;; most recent. +(s/defmethod audit.i/internal-query ::downloads + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:downloaded_at {:display_name "Downloaded At", :base_type :type/DateTime}] [:rows_downloaded {:display_name "Rows Downloaded", :base_type :type/Integer}] [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] @@ -250,20 +259,20 @@ [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :source_table}] [:source_table {:display_name "Source Table", :base_type :type/Text, :remapped_from :table_id}]] :results (common/reducible-query - {:select [[:qe.started_at :downloaded_at] - [:qe.result_rows :rows_downloaded] - [:card.id :card_id] - [(common/card-name-or-ad-hoc :card) :card_name] - [(common/native-or-gui :qe) :query_type] - [:db.id :database_id] - [:db.name :database] - [:t.id :table_id] - [:t.name :source_table]] - :from [[:query_execution :qe]] - :left-join [[:report_card :card] [:= :card.id :qe.card_id] - [:metabase_database :db] [:= :qe.database_id :db.id] - [:metabase_table :t] [:= :card.table_id :t.id]] - :where [:and - [:= :executor_id user-id] - (common/query-execution-is-download :qe)] - :order-by [[:qe.started_at :desc]]})}) + {:select [[:qe.started_at :downloaded_at] + [:qe.result_rows :rows_downloaded] + [:card.id :card_id] + [(common/card-name-or-ad-hoc :card) :card_name] + [(common/native-or-gui :qe) :query_type] + [:db.id :database_id] + [:db.name :database] + [:t.id :table_id] + [:t.name :source_table]] + :from [[:query_execution :qe]] + :left-join [[:report_card :card] [:= :card.id :qe.card_id] + [:metabase_database :db] [:= :qe.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id]] + :where [:and + [:= :executor_id user-id] + (common/query-execution-is-download :qe)] + :order-by [[:qe.started_at :desc]]})}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/users.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/users.clj similarity index 74% rename from enterprise/backend/src/metabase_enterprise/audit/pages/users.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/users.clj index c7b18a63741c..00e7b00c9d0a 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/users.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/users.clj @@ -1,14 +1,16 @@ -(ns metabase-enterprise.audit.pages.users +(ns metabase-enterprise.audit-app.pages.users (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [ring.util.codec :as codec] [schema.core :as s])) -(defn ^:internal-query-fn ^:deprecated active-users-and-queries-by-day - "Query that returns data for a two-series timeseries: the number of DAU (a User is considered active for purposes of - this query if they ran at least one query that day), and total number of queries ran. Broken out by day." - [] +;; DEPRECATED Query that returns data for a two-series timeseries: the number of DAU (a User is considered active for +;; purposes of this query if they ran at least one query that day), and total number of queries ran. Broken out by +;; day. +(defmethod audit.i/internal-query ::active-users-and-queries-by-day + [_] {:metadata [[:users {:display_name "Users", :base_type :type/Integer}] [:queries {:display_name "Queries", :base_type :type/Integer}] [:day {:display_name "Date", :base_type :type/Date}]] @@ -25,27 +27,26 @@ :group-by [:day] :order-by [[:day :asc]]})}) - -(s/defn ^:internal-query-fn active-and-new-by-time - "Two-series timeseries that returns number of active Users (Users who ran at least one query) and number of new Users, - broken out by `datetime-unit`." - [datetime-unit :- common/DateTimeUnitStr] +;; Two-series timeseries that returns number of active Users (Users who ran at least one query) and number of new +;; Users, broken out by `datetime-unit`. +(s/defmethod audit.i/internal-query ::active-and-new-by-time + [_ datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:active_users {:display_name "Active Users", :base_type :type/Integer}] [:new_users {:display_name "New Users", :base_type :type/Integer}]] ;; this is so nice and easy to implement in a single query with FULL OUTER JOINS but unfortunately only pg supports ;; them(!) :results (let [active (common/query - {:select [[(common/grouped-datetime datetime-unit :started_at) :date] - [:%distinct-count.executor_id :count]] - :from [:query_execution] - :group-by [(common/grouped-datetime datetime-unit :started_at)]}) + {:select [[(common/grouped-datetime datetime-unit :started_at) :date] + [:%distinct-count.executor_id :count]] + :from [:query_execution] + :group-by [(common/grouped-datetime datetime-unit :started_at)]}) date->active (zipmap (map :date active) (map :count active)) new (common/query - {:select [[(common/grouped-datetime datetime-unit :date_joined) :date] - [:%count.* :count]] - :from [:core_user] - :group-by [(common/grouped-datetime datetime-unit :date_joined)]}) + {:select [[(common/grouped-datetime datetime-unit :date_joined) :date] + [:%count.* :count]] + :from [:core_user] + :group-by [(common/grouped-datetime datetime-unit :date_joined)]}) date->new (zipmap (map :date new) (map :count new)) all-dates (sort (keep identity (distinct (concat (keys date->active) (keys date->new)))))] @@ -54,10 +55,9 @@ :active_users (date->active date 0) :new_users (date->new date 0)}))}) - -(defn ^:internal-query-fn most-active - "Query that returns the 10 most active Users (by number of query executions) in descending order." - [] +;; Query that returns the 10 most active Users (by number of query executions) in descending order. +(defmethod audit.i/internal-query ::most-active + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :name}] [:name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:count {:display_name "Query Executions", :base_type :type/Integer}]] @@ -79,73 +79,72 @@ [:%lower.u.first_name :asc]] :limit 10})}) - -(defn ^:internal-query-fn most-saves - "Query that returns the 10 Users with the most saved objects in descending order." - [] +;; Query that returns the 10 Users with the most saved objects in descending order. +(defmethod audit.i/internal-query ::most-saves + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user_name}] [:user_name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:saves {:display_name "Saved Objects", :base_type :type/Integer}]] :results (common/reducible-query - {:with [[:card_saves {:select [:creator_id - [:%count.* :count]] - :from [:report_card] - :group-by [:creator_id]}] - [:dashboard_saves {:select [:creator_id + {:with [[:card_saves {:select [:creator_id [:%count.* :count]] - :from [:report_dashboard] + :from [:report_card] :group-by [:creator_id]}] - [:pulse_saves {:select [:creator_id - [:%count.* :count]] - :from [:pulse] - :group-by [:creator_id]}]] - :select [[:u.id :user_id] - [(common/user-full-name :u) :user_name] - [(hx/+ (common/zero-if-null :card_saves.count) - (common/zero-if-null :dashboard_saves.count) - (common/zero-if-null :pulse_saves.count)) - :saves]] - :from [[:core_user :u]] - :left-join [:card_saves [:= :u.id :card_saves.creator_id] - :dashboard_saves [:= :u.id :dashboard_saves.creator_id] - :pulse_saves [:= :u.id :pulse_saves.creator_id]] - :order-by [[:saves :desc] - [:u.last_name :asc] - [:u.first_name :asc]] - :limit 10})}) - + [:dashboard_saves {:select [:creator_id + [:%count.* :count]] + :from [:report_dashboard] + :group-by [:creator_id]}] + [:pulse_saves {:select [:creator_id + [:%count.* :count]] + :from [:pulse] + :group-by [:creator_id]}]] + :select [[:u.id :user_id] + [(common/user-full-name :u) :user_name] + [(hx/+ (common/zero-if-null :card_saves.count) + (common/zero-if-null :dashboard_saves.count) + (common/zero-if-null :pulse_saves.count)) + :saves]] + :from [[:core_user :u]] + :left-join [:card_saves [:= :u.id :card_saves.creator_id] + :dashboard_saves [:= :u.id :dashboard_saves.creator_id] + :pulse_saves [:= :u.id :pulse_saves.creator_id]] + :order-by [[:saves :desc] + [:u.last_name :asc] + [:u.first_name :asc]] + :limit 10})}) -(defn ^:internal-query-fn query-execution-time-per-user - "Query that returns the total time spent executing queries, broken out by User, for the top 10 Users." - [] +;; Query that returns the total time spent executing queries, broken out by User, for the top 10 Users. +(defmethod audit.i/internal-query ::query-execution-time-per-user + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :name}] [:name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:execution_time_ms {:display_name "Total Execution Time (ms)", :base_type :type/Decimal}]] :results (common/reducible-query - {:with [[:exec_time {:select [[:%sum.running_time :execution_time_ms] - :qe.executor_id] - :from [[:query_execution :qe]] - :where [:not= nil :qe.executor_id] - :group-by [:qe.executor_id] - :order-by [[:%sum.running_time :desc]] - :limit 10}]] - :select [[:u.id :user_id] - [(common/user-full-name :u) :name] - [(hsql/call :case [:not= :exec_time.execution_time_ms nil] :exec_time.execution_time_ms - :else 0) - :execution_time_ms]] - :from [[:core_user :u]] - :left-join [:exec_time [:= :exec_time.executor_id :u.id]] - :order-by [[:execution_time_ms :desc] - [:%lower.u.last_name :asc] - [:%lower.u.first_name :asc]] - :limit 10})}) + {:with [[:exec_time {:select [[:%sum.running_time :execution_time_ms] + :qe.executor_id] + :from [[:query_execution :qe]] + :where [:not= nil :qe.executor_id] + :group-by [:qe.executor_id] + :order-by [[:%sum.running_time :desc]] + :limit 10}]] + :select [[:u.id :user_id] + [(common/user-full-name :u) :name] + [(hsql/call :case [:not= :exec_time.execution_time_ms nil] :exec_time.execution_time_ms + :else 0) + :execution_time_ms]] + :from [[:core_user :u]] + :left-join [:exec_time [:= :exec_time.executor_id :u.id]] + :order-by [[:execution_time_ms :desc] + [:%lower.u.last_name :asc] + [:%lower.u.first_name :asc]] + :limit 10})}) -(s/defn ^:internal-query-fn table - ([] - (table nil)) +;; A table of all the Users for this instance, and various statistics about them (see metadata below). +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) - ([query-string :- (s/maybe s/Str)] + ([_ query-string :- (s/maybe s/Str)] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :name}] [:name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:role {:display_name "Role", :base_type :type/Text}] @@ -221,11 +220,10 @@ [:%lower.u.first_name :asc]]} (common/add-search-clause query-string :u.first_name :u.last_name)))})) - -(defn ^:internal-query-fn query-views - "Return a log of all query executions, including information about the Card associated with the query and the - Collection it is in (both, if applicable) and Database/Table referenced by the query." - [] +;; Return a log of all query executions, including information about the Card associated with the query and the +;; Collection it is in (both, if applicable) and Database/Table referenced by the query. +(defmethod audit.i/internal-query ::query-views + [_] {:metadata [[:viewed_on {:display_name "Viewed On", :base_type :type/DateTime}] [:card_id {:display_name "Card ID" :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] @@ -267,9 +265,9 @@ :order-by [[:qe.started_at :desc]]}) :xform (map #(update (vec %) 3 codec/base64-encode))}) -(defn ^:internal-query-fn dashboard-views - "Return a log of when all Dashboard views, including the Collection the Dashboard belongs to." - [] +;; Return a log of when all Dashboard views, including the Collection the Dashboard belongs to. +(defmethod audit.i/internal-query ::dashboard-views + [_] {:metadata [[:timestamp {:display_name "Viewed on", :base_type :type/DateTime}] [:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Text, :remapped_from :dashboard_id}] diff --git a/enterprise/backend/src/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries.clj b/enterprise/backend/src/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries.clj similarity index 66% rename from enterprise/backend/src/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries.clj index 489e38f478cf..4c9c5b59d32e 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries.clj @@ -1,17 +1,18 @@ -(ns metabase-enterprise.audit.query-processor.middleware.handle-audit-queries - "Middleware that handles special `internal` type queries. `internal` queries are implementeed directly by Clojure - functions, and do not neccesarily need to query a database to provide results; by default, they completely skip - the rest of the normal QP pipeline. `internal` queries should look like the following: +(ns metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries + "Middleware that handles special `internal` type queries. `internal` queries are implemented directly by various + implementations of the [[metabase-enterprise.audit-app.interface/internal-query]] multimethod, and do not necessarily + need to query a database to provide results; by default, they completely skip the rest of the normal QP pipeline. + `internal` queries as passed to the Query Processor should look like the following: {:type :internal - :fn \"metabase-enterprise.audit.pages.dashboards/table\" + :fn \"metabase-enterprise.audit-app.pages.dashboards/table\" :args []} ; optional vector of args to pass to the fn above - To run an `internal` query, you must have superuser permissions, and the function itself must be tagged as an - `:internal-query-fn`. This middleware will automatically resolve the function as appropriate, loading its namespace - if needed. + To run an `internal` query, you must have superuser permissions. This middleware will automatically resolve the + function as appropriate, loading its namespace if needed. - (defn ^:internal-query-fn table [] + (defmethod audit.i/internal-query ::table + [_] {:metadata ..., :results ...}) The function should return a map with two keys, `:metadata` and `:results`, in either the 'legacy' or 'reducible' @@ -19,7 +20,7 @@ LEGACY FORMAT: - * `:metadata` is a series of [col-name metadata-map] pairs. + * `:metadata` is a series of [col-name metadata-map] pairs. See [[metabase-enterprise.audit-app.interface/ResultsMetadata]] * `:results` is a series of maps. {:metadata [[:title {:display_name \"Title\", :base_type :type/Text}] @@ -37,22 +38,15 @@ :results (fn [context] ...) :xform ...}" (:require [clojure.data :as data] - [clojure.string :as str] + [metabase-enterprise.audit-app.interface :as audit.i] [metabase.api.common :as api] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.query-processor.context :as context] [metabase.query-processor.error-type :as error-type] [metabase.util.i18n :refer [tru]] [metabase.util.schema :as su] [schema.core :as s])) -(def ^:private ResultsMetadata - "Schema for the expected format for `:metadata` returned by an internal query function." - (su/non-empty - [[(s/one su/KeywordOrString "field name") - (s/one {:base_type su/FieldType, :display_name su/NonBlankString, s/Keyword s/Any} - "field metadata")]])) - (defn- check-results-and-metadata-keys-match "Primarily for dev and debugging purposes. We can probably take this out when shipping the finished product." [results metadata] @@ -74,7 +68,7 @@ (assoc v :name (name k)))) (s/defn ^:private format-results [{:keys [results metadata]} :- {:results [su/Map] - :metadata ResultsMetadata}] + :metadata audit.i/ResultsMetadata}] (check-results-and-metadata-keys-match results metadata) {:cols (metadata->cols metadata) :rows (for [row results] @@ -94,26 +88,6 @@ to implement paging for all audit app queries automatically." nil) -(def ^:private resolve-internal-query-fn-lock (Object.)) - -(defn- resolve-internal-query-fn - "Returns the varr for the internal query fn." - [qualified-fn-str] - (let [[ns-str] (str/split qualified-fn-str #"/")] - (or - ;; resolve if already available... - (locking resolve-internal-query-fn-lock - (resolve (symbol qualified-fn-str)) - ;; if not, load the namespace... - (require (symbol ns-str)) - ;; ...then try resolving again - (resolve (symbol qualified-fn-str))) - ;; failing that, throw an Exception - (throw - (Exception. - (str (tru "Unable to run internal query function: cannot resolve {0}" - qualified-fn-str))))))) - (defn- reduce-reducible-results [rff context {:keys [metadata results xform], :or {xform identity}}] (let [cols (metadata->cols metadata) reducible-rows (results context) @@ -140,18 +114,12 @@ (api/check-superuser) ;; Make sure audit app is enabled (currently the only use case for internal queries). We can figure out a way to ;; allow non-audit-app queries if and when we add some - (when-not (metastore/enable-audit-app?) + (when-not (premium-features/enable-audit-app?) (throw (ex-info (tru "Audit App queries are not enabled on this instance.") {:type error-type/invalid-query}))) - ;;now resolve the query - (let [fn-varr (resolve-internal-query-fn qualified-fn-str)] - ;; Make sure this is actually allowed to be a internal query fn & has the results metadata we'll need - (when-not (:internal-query-fn (meta fn-varr)) - (throw (Exception. (str (tru "Invalid internal query function: {0} is not marked as an ^:internal-query-fn" - qualified-fn-str))))) - (binding [*additional-query-params* (dissoc query :fn :args)] - (let [results (apply @fn-varr args)] - (reduce-results rff context results))))) + (binding [*additional-query-params* (dissoc query :fn :args)] + (let [resolved (apply audit.i/resolve-internal-query qualified-fn-str args)] + (reduce-results rff context resolved)))) (defn handle-internal-queries "Middleware that handles `internal` type queries." diff --git a/enterprise/backend/src/metabase_enterprise/content_management/api/review.clj b/enterprise/backend/src/metabase_enterprise/content_management/api/review.clj new file mode 100644 index 000000000000..3e83095e1108 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/content_management/api/review.clj @@ -0,0 +1,25 @@ +(ns metabase-enterprise.content-management.api.review + (:require [compojure.core :refer [POST]] + [metabase.api.common :as api] + [metabase.models.moderation-review :as moderation-review] + [metabase.moderation :as moderation] + [metabase.util.schema :as su] + [schema.core :as s])) + +(api/defendpoint POST "/" + "Create a new `ModerationReview`." + [:as {{:keys [text moderated_item_id moderated_item_type status]} :body}] + {text (s/maybe s/Str) + moderated_item_id su/IntGreaterThanZero + moderated_item_type moderation/moderated-item-types + status moderation-review/Statuses} + (api/check-superuser) + (let [review-data {:text text + :moderated_item_id moderated_item_id + :moderated_item_type moderated_item_type + :moderator_id api/*current-user-id* + :status status}] + (api/check-404 (moderation/moderated-item review-data)) + (moderation-review/create-review! review-data))) + +(api/define-routes) diff --git a/enterprise/backend/src/metabase_enterprise/content_management/api/routes.clj b/enterprise/backend/src/metabase_enterprise/content_management/api/routes.clj new file mode 100644 index 000000000000..24744bf74d51 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/content_management/api/routes.clj @@ -0,0 +1,12 @@ +(ns metabase-enterprise.content-management.api.routes + (:require [compojure.core :as compojure :refer [context]] + [metabase-enterprise.api.routes.common :as ee.api.common] + [metabase-enterprise.content-management.api.review :as review] + [metabase.api.routes.common :refer [+auth]])) + +(defn- +require-content-management [handler] + (ee.api.common/+require-premium-feature :content-management handler)) + +(compojure/defroutes ^{:doc "API routes only available if we have a premium token with the `:content-management` feature."} + routes + (context "/moderation-review" [] (+require-content-management (+auth review/routes)))) diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj b/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj index 524cf82745ca..9c0c7eea62bd 100644 --- a/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj +++ b/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj @@ -65,7 +65,7 @@ ;; For `MyProtocol` methods: invoke `ee-impl` if EE enhancements are enabled, otherwise invoke `oss-impl` (def impl - (reify-ee-strategy-impl #'settings.metastore/enable-enhancements? ee-impl oss-impl + (reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? ee-impl oss-impl MyProtocol)) At the time of this writing, this only works with first-class Clojure Protocols (as opposed to plain Java diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj b/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj index ca1a4d995706..5b79cd6f45c3 100644 --- a/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj +++ b/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj @@ -6,7 +6,7 @@ [metabase.integrations.ldap.interface :as i] [metabase.models.setting :as setting :refer [defsetting]] [metabase.models.user :as user :refer [User]] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util :as u] [metabase.util.i18n :refer [deferred-tru trs]] [metabase.util.schema :as su] @@ -113,5 +113,5 @@ forwards method invocations to `impl`; if EE features *are not* enabled, forwards method invocations to the default OSS impl." ;; TODO -- should we require `:sso` token features for using the LDAP enhancements? - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? impl default-impl/impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? impl default-impl/impl LDAPIntegration)) diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj b/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj index 53a6ec4ed69b..ff2b077c73c0 100644 --- a/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj +++ b/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj @@ -4,7 +4,7 @@ [metabase.models.interface :as i] [metabase.models.native-query-snippet.permissions :as snippet.perms] [metabase.models.permissions :as perms] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util.schema :as su] [pretty.core :refer [PrettyPrintable]] [schema.core :as s] @@ -46,7 +46,7 @@ "EE implementation of NativeQuerySnippet permissions. Uses Collection permissions instead allowing anyone to view or edit all Snippets. (Only when a valid Enterprise Edition token is present. Otherwise, this forwards method invocations to the default impl)." - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? ee-impl* snippet.perms/default-impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? ee-impl* snippet.perms/default-impl snippet.perms/PermissionsImpl)) (snippet.perms/set-impl! ee-impl) diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/models/permissions/block_permissions.clj b/enterprise/backend/src/metabase_enterprise/enhancements/models/permissions/block_permissions.clj new file mode 100644 index 000000000000..9d7c399c889f --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/enhancements/models/permissions/block_permissions.clj @@ -0,0 +1,35 @@ +(ns metabase-enterprise.enhancements.models.permissions.block-permissions + (:require [metabase.api.common :as api] + [metabase.models.permissions :as perms] + [metabase.public-settings.premium-features :as settings.premium-features] + [metabase.query-processor.error-type :as qp.error-type] + [metabase.util.i18n :refer [tru]])) + +(defn- current-user-has-block-permissions-for-database? + [database-or-id] + (contains? @api/*current-user-permissions-set* (perms/database-block-perms-path database-or-id))) + +(defn check-block-permissions + "Assert that block permissions are not in effect for Database for a query that's only allowed to run because of + Collection perms; throw an Exception if they are. Otherwise returns a keyword explaining why the check + succeeded (this is mostly for test/debug purposes). The query is still allowed to run if the current User has + appropriate data permissions from another Group. See the namespace documentation for [[metabase.models.collection]] + for more details. + + Note that this feature is Metabase© Enterprise Edition™ only and only enabled if we have a valid Enterprise Edition™ + token. [[metabase.query-processor.middleware.permissions/check-block-permissions]] invokes this function if it + exists." + [{database-id :database, :as query}] + (cond + (not (settings.premium-features/enable-enhancements?)) + ::enhancements-not-enabled + + (not (current-user-has-block-permissions-for-database? database-id)) + ::no-block-permissions-for-db + + :else + ;; TODO -- come up with a better error message. + (throw (ex-info (tru "Blocked: you are not allowed to run queries against Database {0}." database-id) + {:type qp.error-type/missing-required-permissions + :actual-permissions @api/*current-user-permissions-set* + :permissions-error? true})))) diff --git a/enterprise/backend/src/metabase_enterprise/pulse.clj b/enterprise/backend/src/metabase_enterprise/pulse.clj index 2352b06f9d1b..f364b531f5ac 100644 --- a/enterprise/backend/src/metabase_enterprise/pulse.clj +++ b/enterprise/backend/src/metabase_enterprise/pulse.clj @@ -1,6 +1,8 @@ (ns metabase-enterprise.pulse + "TODO -- this should be moved to `metabase-enterprise..pulse` once we figure out which feature this belongs + to." (:require [metabase-enterprise.enhancements.ee-strategy-impl :as ee-strategy-impl] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.pulse.interface :as i]) (:import metabase.pulse.interface.SubscriptionParameters)) @@ -20,5 +22,5 @@ (def ee-strategy-parameters-impl "Enterprise way of getting dashboard filter parameters" - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? parameters-impl i/default-parameters-impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? parameters-impl i/default-parameters-impl i/SubscriptionParameters)) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj b/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj index 882157a71597..b26f54d32cdf 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj @@ -3,7 +3,7 @@ (:require [compojure.core :refer [DELETE GET POST PUT]] [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] [metabase.api.common :as api] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.util :as u] [metabase.util.i18n :refer [tru]] [metabase.util.schema :as su] @@ -67,7 +67,7 @@ "Wrap the Ring handler to make sure sandboxes are enabled before allowing access to the API endpoints." [handler] (fn [request respond raise] - (if-not (metastore/enable-sandboxes?) + (if-not (premium-features/enable-sandboxes?) (raise (ex-info (str (tru "Error: sandboxing is not enabled for this instance.") " " (tru "Please check you have set a valid Enterprise token and try again.")) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj b/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj index 52e4f343801c..ca59d2997330 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj @@ -1,23 +1,23 @@ (ns metabase-enterprise.sandbox.api.routes - "Multi-tenant API routes." + "API routes that are only enabled if we have a premium token with the `:sandboxes` feature." (:require [compojure.core :as compojure] + [metabase-enterprise.api.routes.common :as ee.api.common] [metabase-enterprise.sandbox.api.gtap :as gtap] [metabase-enterprise.sandbox.api.table :as table] [metabase-enterprise.sandbox.api.user :as user] - [metabase.server.middleware.auth :as middleware.auth])) - -;; this is copied from `metabase.api.routes` because if we require that above we will destroy startup times for `lein -;; ring server` -(def ^:private +auth - "Wrap `routes` so they may only be accessed with proper authentiaction credentials." - middleware.auth/enforce-authentication) + [metabase.api.routes.common :refer [+auth]])) (compojure/defroutes ^{:doc "Ring routes for mt API endpoints."} routes + ;; EE-only sandboxing routes live under `/mt` for historical reasons. `/mt` is for multi-tenant. + ;; + ;; TODO - We should change this to `/sandboxes` or something like that. (compojure/context - "/mt" - [] - - (compojure/routes - (compojure/context "/gtap" [] (+auth gtap/routes)) - (compojure/context "/user" [] (+auth user/routes)))) - (compojure/context "/table" [] (+auth table/routes))) + "/mt" [] + (ee.api.common/+require-premium-feature + :sandboxes + (compojure/routes + (compojure/context "/gtap" [] (+auth gtap/routes)) + (compojure/context "/user" [] (+auth user/routes))))) + ;; when sandboxing is enabled we *replace* GET /api/table/:id/query_metadata with a special EE version. If + ;; sandboxing is not enabled, this passes thru to the OSS implementation of the endpoint. + (compojure/context "/table" [] (ee.api.common/+when-premium-feature :sandboxes (+auth table/routes)))) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj b/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj index 3802cf4394cd..6561060ebdb7 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj @@ -15,4 +15,4 @@ ;; access they shouldn't have. If we don't have permissions, we can't determine whether they are segmented, so ;; throw. (throw (ex-info (str (tru "No permissions found for current user")) - {:status-code 403})))))) + {:status-code 403})))))) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj b/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj index 111fafcd2dd8..a8939352b11c 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj @@ -1,7 +1,9 @@ (ns metabase-enterprise.sandbox.models.group-table-access-policy "Model definition for Group Table Access Policy, aka GTAP. A GTAP is useed to control access to a certain Table for a certain PermissionsGroup. Whenever a member of that group attempts to query the Table in question, a Saved Question - specified by the GTAP is instead used as the source of the query." + specified by the GTAP is instead used as the source of the query. + + See documentation in [[metabase.models.permissions]] for more information about the Metabase permissions system." (:require [clojure.tools.logging :as log] [medley.core :as m] [metabase.mbql.normalize :as normalize] @@ -20,6 +22,19 @@ (models/defmodel GroupTableAccessPolicy :group_table_access_policy) +;; This guard is to make sure this file doesn't get compiled twice when building the uberjar -- that will totally +;; screw things up because Toucan models use Potemkin `defrecord+` under the hood. +(when *compile-files* + (defonce previous-compilation-trace (atom nil)) + (when @previous-compilation-trace + (println "THIS FILE HAS ALREADY BEEN COMPILED!!!!!") + (println "This compilation trace:") + ((requiring-resolve 'clojure.pprint/pprint) (vec (.getStackTrace (Thread/currentThread)))) + (println "Previous compilation trace:") + ((requiring-resolve 'clojure.pprint/pprint) @previous-compilation-trace) + (throw (ex-info "THIS FILE HAS ALREADY BEEN COMPILED!!!!!" {}))) + (reset! previous-compilation-trace (vec (.getStackTrace (Thread/currentThread))))) + (defn- normalize-attribute-remapping-targets [attribute-remappings] (m/map-vals normalize/normalize diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj b/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj index 8ee2cffcae42..59c80750a311 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj @@ -6,7 +6,7 @@ [metabase.models.field :as field :refer [Field]] [metabase.models.field-values :as field-values :refer [FieldValues]] [metabase.models.params.field-values :as params.field-values] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util :as u] [pretty.core :as pretty] [toucan.db :as db] @@ -83,5 +83,5 @@ "Enterprise version of the fetch FieldValues for current User logic. Uses our EE strategy pattern adapter: if EE features *are* enabled, forwards method invocations to `impl`; if EE features *are not* enabled, forwards method invocations to the default OSS impl." - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-sandboxes? impl params.field-values/default-impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-sandboxes? impl params.field-values/default-impl params.field-values/FieldValuesForCurrentUser)) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj b/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj index 9aa41048fe41..a3a005b11d07 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj @@ -4,7 +4,7 @@ [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] [metabase.models.permissions.delete-sandboxes :as delete-sandboxes] [metabase.models.table :refer [Table]] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util :as u] [metabase.util.i18n :refer [tru]] [pretty.core :as pretty] @@ -90,19 +90,17 @@ (defn- delete-gtaps-for-group-database! [{:keys [group-id database-id], :as context} changes] (log/debugf "Deleting unneeded GTAPs for Group %d for Database %d. Graph changes: %s" - group-id database-id (pr-str changes)) - (cond - (= changes :none) - (do - (log/debugf "Group %d no longer has any perms for Database %d, deleting all GTAPs for this DB" group-id database-id) - (delete-gtaps-with-condition! group-id [:= :table.db_id database-id])) - - (= changes :all) + group-id database-id (pr-str changes)) + (if (#{:none :all :block} changes) (do - (log/debugf "Group %d now has full data perms for Database %d, deleting all GTAPs for this DB" group-id database-id) + (log/debugf "Group %d %s for Database %d, deleting all GTAPs for this DB" + group-id + (case changes + :none "no longer has any perms" + :all "now has full data perms" + :block "is now BLOCKED from all non-data-perms access") + database-id) (delete-gtaps-with-condition! group-id [:= :table.db_id database-id])) - - :else (doseq [schema-name (set (keys changes))] (delete-gtaps-for-group-schema! (assoc context :schema-name schema-name) @@ -134,7 +132,7 @@ (def ee-strategy-impl "EE impl for Sandbox (GTAP) deletion behavior. Don't use this directly." (ee-strategy-impl/reify-ee-strategy-impl - #'settings.metastore/enable-sandboxes? + #'settings.premium-features/enable-sandboxes? impl delete-sandboxes/oss-default-impl delete-sandboxes/DeleteSandboxes)) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj b/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj index d5635d779248..d93d35e38a5c 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj @@ -1,4 +1,7 @@ (ns metabase-enterprise.sandbox.query-processor.middleware.row-level-restrictions + "Apply segmented a.k.a. sandboxing anti-permissions to the query, i.e. replace sandboxed Tables with the + appropriate [[metabase-enterprise.sandbox.models.group-table-access-policy]]s (GTAPs). See dox + for [[metabase.models.permissions]] for a high-level overview of the Metabase permissions system." (:require [clojure.core.memoize :as memoize] [clojure.tools.logging :as log] [metabase-enterprise.sandbox.models.group-table-access-policy :as gtap :refer [GroupTableAccessPolicy]] @@ -236,7 +239,7 @@ preprocess-source-query (source-query-form-ensure-metadata table-id card-id))) -(s/defn ^:private gtap->perms-set :- #{perms/ObjectPath} +(s/defn ^:private gtap->perms-set :- #{perms/Path} "Calculate the set of permissions needed to run the query associated with a GTAP; this set of permissions is excluded during the normal QP perms check. diff --git a/enterprise/backend/src/metabase_enterprise/search/scoring.clj b/enterprise/backend/src/metabase_enterprise/search/scoring.clj index 585726190de0..7e6e0cef865b 100644 --- a/enterprise/backend/src/metabase_enterprise/search/scoring.clj +++ b/enterprise/backend/src/metabase_enterprise/search/scoring.clj @@ -1,6 +1,7 @@ (ns metabase-enterprise.search.scoring + ;; TODO -- move to `metabase-enterprise..*` (:require [metabase-enterprise.enhancements.ee-strategy-impl :as ee-strategy-impl] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.search.scoring :as scoring])) (defn- official-collection-score @@ -10,6 +11,13 @@ 1 0)) +(defn- verified-score + "A scorer for verified items." + [{:keys [moderated_status]}] + (if (contains? #{"verified"} moderated_status) + 1 + 0)) + (def scoring-impl "Scoring implementation that adds score for items in official collections." (reify scoring/ResultScore @@ -17,10 +25,13 @@ (conj (scoring/score-result scoring/oss-score-impl result) {:weight 2 :score (official-collection-score result) - :name "official collection score"})))) + :name "official collection score"} + {:weight 2 + :score (verified-score result) + :name "verified"})))) (def ee-scoring "Enterprise scoring of results, falling back to the open source version if enterprise is not enabled." - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? scoring-impl scoring/oss-score-impl scoring/ResultScore)) diff --git a/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj b/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj index d66ef4b37969..cf9a946a969a 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj @@ -55,10 +55,10 @@ (load/load-settings path context) (load/load-dependencies path context)] reload-fns (filter fn? all-res)] - (if-not (empty? reload-fns) - (do (log/info (trs "Finished first pass of load; now performing second pass")) - (doseq [reload-fn reload-fns] - (reload-fn)))) + (when (seq reload-fns) + (log/info (trs "Finished first pass of load; now performing second pass")) + (doseq [reload-fn reload-fns] + (reload-fn))) (log/info (trs "END LOAD from {0} with context {1}" path context)))) (catch Throwable e (log/error e (trs "ERROR LOAD from {0}: {1}" path (.getMessage e))) diff --git a/enterprise/backend/src/metabase_enterprise/serialization/load.clj b/enterprise/backend/src/metabase_enterprise/serialization/load.clj index 188e99c76ce0..2295bf2bfe82 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/load.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/load.clj @@ -194,8 +194,8 @@ (unresolved-names->string entity nil)) ([entity insert-id] (str - (if-let [nm (:name entity)] (str "\"" nm "\"")) - (if insert-id (format " (inserted as ID %d) " insert-id)) + (when-let [nm (:name entity)] (str "\"" nm "\"")) + (when insert-id (format " (inserted as ID %d) " insert-id)) "missing:\n " (str/join "\n " @@ -347,17 +347,17 @@ (-> (if-let [link-type (::mb.viz/link-type click-behavior)] (case link-type ::mb.viz/card (let [card-id (::mb.viz/link-target-id click-behavior)] - (if (string? card-id) + (when (string? card-id) (update-existing-in-capture-missing click-behavior [::mb.viz/link-target-id] (comp :card fully-qualified-name->context)))) ::mb.viz/dashboard (let [dashboard-id (::mb.viz/link-target-id click-behavior)] - (if (string? dashboard-id) - (update-existing-in-capture-missing - click-behavior - [::mb.viz/link-target-id] - (comp :dashboard fully-qualified-name->context)))) + (when (string? dashboard-id) + (update-existing-in-capture-missing + click-behavior + [::mb.viz/link-target-id] + (comp :dashboard fully-qualified-name->context)))) click-behavior) click-behavior) (m/update-existing ::mb.viz/parameter-mapping resolve-click-behavior-parameter-mapping))) @@ -434,11 +434,11 @@ {:added "0.40.0"} [context dashboards] (let [dashboard-ids (maybe-upsert-many! context Dashboard - (for [dashboard dashboards] - (-> dashboard - (dissoc :dashboard_cards) - (assoc :collection_id (:collection context) - :creator_id @default-user)))) + (for [dashboard dashboards] + (-> dashboard + (dissoc :dashboard_cards) + (assoc :collection_id (:collection context) + :creator_id @default-user)))) dashboard-cards (map :dashboard_cards dashboards) ;; a function that prepares a dash card for insertion, while also validating to ensure the underlying ;; card_id could be resolved from the fully qualified name @@ -455,12 +455,12 @@ (let [add-keys [:dashboard_cards card-idx :visualization_settings] fixed-names (m/map-vals #(concat add-keys %) unresolved) with-fixed-names (assoc with-viz ::unresolved-names fixed-names)] - (-> acc - (update ::revisit (fn [revisit-map] - (update revisit-map dash-idx #(cons with-fixed-names %)))) - ;; index means something different here than in the Card case (it's actually the index - ;; of the dashboard) - (update ::revisit-index #(conj % dash-idx)))) + (-> acc + (update ::revisit (fn [revisit-map] + (update revisit-map dash-idx #(cons with-fixed-names %)))) + ;; index means something different here than in the Card case (it's actually the index + ;; of the dashboard) + (update ::revisit-index #(conj % dash-idx)))) (update acc ::process #(conj % with-viz))))) prep-init-acc {::process [] ::revisit-index #{} ::revisit {}} filtered-cards (reduce-kv @@ -476,14 +476,14 @@ dashcard-ids (maybe-upsert-many! context DashboardCard (map #(dissoc % :series) proceed-cards)) series-pairs (map vector (map :series proceed-cards) dashcard-ids)] (maybe-upsert-many! context DashboardCardSeries - (for [[series dashboard-card-id] series-pairs - dashboard-card-series series - :when (and dashboard-card-series dashboard-card-id)] - (-> dashboard-card-series - (assoc :dashboardcard_id dashboard-card-id) - (update :card_id fully-qualified-name->card-id)))) + (for [[series dashboard-card-id] series-pairs + dashboard-card-series series + :when (and dashboard-card-series dashboard-card-id)] + (-> dashboard-card-series + (assoc :dashboardcard_id dashboard-card-id) + (update :card_id fully-qualified-name->card-id)))) (let [revisit-dashboards (map (partial nth dashboards) revisit-indexes)] - (if-not (empty? revisit-dashboards) + (when (seq revisit-dashboards) (let [revisit-map (::revisit filtered-cards) revisit-inf-fn (fn [[dash-idx dashcards]] (format @@ -533,7 +533,7 @@ channel channels :when pulse-id] (assoc channel :pulse_id pulse-id))) - (if-not (empty? revisit) + (when (seq revisit) (let [revisit-info-map (group-by ::pulse-name revisit)] (log/infof "Unresolved references for pulses in collection %s; will reload after first pass complete:%n%s%n" (or (:collection context) "root") @@ -642,7 +642,11 @@ dummy-insert-cards (not-empty (::revisit grouped-cards)) process-cards (::process grouped-cards)] (maybe-upsert-many! context Card process-cards) +<<<<<<< HEAD (if dummy-insert-cards +======= + (when dummy-insert-cards +>>>>>>> tags/v0.41.0 (let [dummy-inserted-ids (maybe-upsert-many! context Card @@ -708,7 +712,7 @@ (defn- make-reload-fn [all-results] (let [all-fns (filter fn? all-results)] - (if-not (empty? all-fns) + (when (seq all-fns) (let [new-fns (doall all-fns)] (fn [] (make-reload-fn (for [reload-fn new-fns] diff --git a/enterprise/backend/src/metabase_enterprise/serialization/names.clj b/enterprise/backend/src/metabase_enterprise/serialization/names.clj index 0cb368e869cf..e4eaca955cc3 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/names.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/names.clj @@ -72,7 +72,7 @@ (str (->> segment :table_id (fully-qualified-name Table)) "/segments/" (safe-name segment))) (defn- local-collection-name [collection] - (let [ns-part (if-let [coll-ns (:namespace collection)] + (let [ns-part (when-let [coll-ns (:namespace collection)] (str ":" (if (keyword? coll-ns) (name coll-ns) coll-ns) "/"))] (str "/collections/" ns-part (safe-name collection)))) diff --git a/enterprise/backend/src/metabase_enterprise/sso/api/interface.clj b/enterprise/backend/src/metabase_enterprise/sso/api/interface.clj new file mode 100644 index 000000000000..76e70a5eccfa --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/sso/api/interface.clj @@ -0,0 +1,34 @@ +(ns metabase-enterprise.sso.api.interface + (:require [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] + [metabase.util.i18n :refer [tru]])) + +(defn- sso-backend + "Function that powers the defmulti in figuring out which SSO backend to use. It might be that we need to have more + complex logic around this, but now it's just a simple priority. If SAML is configured use that otherwise JWT" + [_] + (cond + (sso-settings/saml-configured?) :saml + (sso-settings/jwt-enabled) :jwt + :else nil)) + +(defmulti sso-get + "Multi-method for supporting the first part of an SSO signin request. An implementation of this method will usually + result in a redirect to an SSO backend" + sso-backend) + +(defmulti sso-post + "Multi-method for supporting a POST-back from an SSO signin request. An implementation of this method will need to + validate the POST from the SSO backend and successfully log the user into Metabase." + sso-backend) + +(defn- throw-not-configured-error [] + (throw (ex-info (str (tru "SSO has not been enabled and/or configured")) + {:status-code 400}))) + +(defmethod sso-get :default + [_] + (throw-not-configured-error)) + +(defmethod sso-post :default + [_] + (throw-not-configured-error)) diff --git a/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj b/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj index c2ee1580ef62..d5484ad96967 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj @@ -2,7 +2,12 @@ (:require [compojure.core :as compojure] [metabase-enterprise.sso.api.sso :as sso])) -;; This needs to be installed in the `metabase.server.routes/routes` -- not `metabase.api.routes/routes` !!! +;; This needs to be injected into [[metabase.server.routes/routes]] -- not [[metabase.api.routes/routes]] !!! +;; +;; TODO -- should we make a `metabase-enterprise.routes` namespace where this can live instead of injecting it +;; directly? +;; +;; TODO -- we need to feature-flag this based on the `:sso` feature (compojure/defroutes ^{:doc "Ring routes for auth (SAML) API endpoints."} routes (compojure/context "/auth" diff --git a/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj b/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj index f6bc02576fac..39f7cad3f011 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj @@ -5,60 +5,30 @@ we can have a uniform interface both via the API and code" (:require [clojure.tools.logging :as log] [compojure.core :refer [GET POST]] - [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] + [metabase-enterprise.sso.api.interface :as sso.i] + metabase-enterprise.sso.integrations.jwt + metabase-enterprise.sso.integrations.saml [metabase.api.common :as api] - [metabase.plugins.classloader :as classloader] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.util :as u] [metabase.util.i18n :refer [trs tru]] [stencil.core :as stencil])) -(defn- sso-backend - "Function that powers the defmulti in figuring out which SSO backend to use. It might be that we need to have more - complex logic around this, but now it's just a simple priority. If SAML is configured use that otherwise JWT" - [_] - ;; load the SSO integrations so their implementations for the multimethods below are available. Can't load in - ;; `:require` because it would cause a circular ref / those namespaces aren't used here at any rate - ;; (`cljr-clean-namespace` would remove them) - (classloader/require '[metabase-enterprise.sso.integrations jwt saml]) - (cond - (sso-settings/saml-configured?) :saml - (sso-settings/jwt-enabled) :jwt - :else nil)) +;; load the SSO integrations so their implementations for the multimethods below are available. +(comment metabase-enterprise.sso.integrations.jwt/keep-me + metabase-enterprise.sso.integrations.saml/keep-me) -(defmulti sso-get - "Multi-method for supporting the first part of an SSO signin request. An implementation of this method will usually - result in a redirect to an SSO backend" - sso-backend) - -(defmulti sso-post - "Multi-method for supporting a POST-back from an SSO signin request. An implementation of this method will need to - validate the POST from the SSO backend and successfully log the user into Metabase." - sso-backend) - -(defn- throw-not-configured-error [] - (throw (ex-info (str (tru "SSO has not been enabled and/or configured")) - {:status-code 400}))) - -(defmethod sso-get :default - [_] - (throw-not-configured-error)) - -(defmethod sso-post :default - [_] - (throw-not-configured-error)) - -(defn- throw-if-no-metastore-token [] - (when-not (metastore/enable-sso?) +(defn- throw-if-no-premium-features-token [] + (when-not (premium-features/enable-sso?) (throw (ex-info (str (tru "SSO requires a valid token")) {:status-code 403})))) (api/defendpoint GET "/" "SSO entry-point for an SSO user that has not logged in yet" {:as req} - (throw-if-no-metastore-token) + (throw-if-no-premium-features-token) (try - (sso-get req) + (sso.i/sso-get req) (catch Throwable e (log/error #_e (trs "Error returning SSO entry point")) (throw e)))) @@ -76,9 +46,9 @@ (api/defendpoint POST "/" "Route the SSO backends call with successful login details" {:as req} - (throw-if-no-metastore-token) + (throw-if-no-premium-features-token) (try - (sso-post req) + (sso.i/sso-post req) (catch Throwable e (log/error e (trs "Error logging in")) (sso-error-page e)))) diff --git a/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj b/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj index cd5d07e1f76c..0b5848cf57a8 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj @@ -1,7 +1,7 @@ (ns metabase-enterprise.sso.integrations.jwt "Implementation of the JWT backend for sso" (:require [buddy.sign.jwt :as jwt] - [metabase-enterprise.sso.api.sso :as sso] + [metabase-enterprise.sso.api.interface :as sso.i] [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] [metabase-enterprise.sso.integrations.sso-utils :as sso-utils] [metabase.api.common :as api] @@ -90,7 +90,7 @@ (api/check (sso-settings/jwt-configured?) [400 (tru "JWT SSO has not been enabled and/or configured")])) -(defmethod sso/sso-get :jwt +(defmethod sso.i/sso-get :jwt [{{:keys [jwt redirect]} :params, :as request}] (check-jwt-enabled) (if jwt @@ -99,6 +99,6 @@ (when redirect (str "?return_to=" redirect)))))) -(defmethod sso/sso-post :jwt +(defmethod sso.i/sso-post :jwt [req] (throw (ex-info "POST not valid for JWT SSO requests" {:status-code 400}))) diff --git a/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj b/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj index d9fbd0935c24..89d22dc27f52 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj @@ -20,7 +20,7 @@ [clojure.string :as str] [clojure.tools.logging :as log] [medley.core :as m] - [metabase-enterprise.sso.api.sso :as sso] + [metabase-enterprise.sso.api.interface :as sso.i] [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] [metabase-enterprise.sso.integrations.sso-utils :as sso-utils] [metabase.api.common :as api] @@ -107,7 +107,7 @@ (api/check (sso-settings/saml-configured?) [400 (tru "SAML has not been enabled and/or configured")])) -(defmethod sso/sso-get :saml +(defmethod sso.i/sso-get :saml ;; Initial call that will result in a redirect to the IDP along with information about how the IDP can authenticate ;; and redirect them back to us [req] @@ -170,7 +170,7 @@ (when (u/base64-string? s) (codecs/bytes->str (codec/base64-decode s)))) -(defmethod sso/sso-post :saml +(defmethod sso.i/sso-post :saml ;; Does the verification of the IDP's response and 'logs the user in'. The attributes are available in the response: ;; `(get-in saml-info [:assertions :attrs]) [{:keys [params], :as request}] diff --git a/enterprise/backend/test/metabase_enterprise/advanced_config/api/pulse_test.clj b/enterprise/backend/test/metabase_enterprise/advanced_config/api/pulse_test.clj new file mode 100644 index 000000000000..47b0c02c02cb --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/advanced_config/api/pulse_test.clj @@ -0,0 +1,61 @@ +(ns metabase-enterprise.advanced-config.api.pulse-test + (:require [clojure.test :refer :all] + [metabase.models :refer [Card]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [metabase.util :as u])) + +(deftest test-pulse-endpoint-should-respect-email-domain-allow-list-test + (testing "POST /api/pulse/test" + (mt/with-temp Card [card {:dataset_query (mt/mbql-query venues)}] + ;; make sure we validate raw emails whether they're part of `:details` or part of `:recipients` -- we + ;; technically allow either right now + (doseq [channel [{:details {:emails ["test@metabase.com"]}} + {:recipients [{:email "test@metabase.com"}] + :details {}}]] + (testing (format "\nChannel = %s\n" (u/pprint-to-str channel)) + (letfn [(send! [expected-status-code] + (let [pulse-name (mt/random-name)] + (mt/with-fake-inbox + {:response (mt/user-http-request + :rasta :post expected-status-code "pulse/test" + {:name pulse-name + :cards [{:id (u/the-id card) + :include_csv false + :include_xls false + :dashboard_card_id nil}] + :channels [(merge {:enabled true + :channel_type "email" + :schedule_type "daily" + :schedule_hour 12 + :schedule_day nil} + channel)] + :skip_if_empty false}) + :recipients (set (keys (mt/regex-email-bodies (re-pattern pulse-name))))})))] + (testing "allowed email -- should pass" + (mt/with-temporary-setting-values [subscription-allowed-domains "metabase.com"] + (premium-features-test/with-premium-features #{:advanced-config} + (let [{:keys [response recipients]} (send! 200)] + (is (= {:ok true} + response)) + (is (contains? recipients "test@metabase.com")))) + (testing "No :advanced-config token" + (premium-features-test/with-premium-features #{} + (let [{:keys [response recipients]} (send! 200)] + (is (= {:ok true} + response)) + (is (contains? recipients "test@metabase.com"))))))) + (testing "disallowed email" + (mt/with-temporary-setting-values [subscription-allowed-domains "example.com"] + (testing "should fail when :advanced-config is enabled" + (premium-features-test/with-premium-features #{:advanced-config} + (let [{:keys [response recipients]} (send! 403)] + (is (= "You cannot create new subscriptions for the domain \"metabase.com\". Allowed domains are: example.com" + (:message response))) + (is (not (contains? recipients "test@metabase.com")))))) + (testing "No :advanced-config token -- should still pass" + (premium-features-test/with-premium-features #{} + (let [{:keys [response recipients]} (send! 200)] + (is (= {:ok true} + response)) + (is (contains? recipients "test@metabase.com"))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/advanced_config/models/pulse_channel_test.clj b/enterprise/backend/test/metabase_enterprise/advanced_config/models/pulse_channel_test.clj new file mode 100644 index 000000000000..bd52cdc1e3cd --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/advanced_config/models/pulse_channel_test.clj @@ -0,0 +1,51 @@ +(ns metabase-enterprise.advanced-config.models.pulse-channel-test + (:require [clojure.string :as str] + [clojure.test :refer :all] + [metabase.models :refer [Pulse PulseChannel]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [metabase.util :as u] + [toucan.db :as db] + [toucan.util.test :as tt])) + +(deftest validate-email-domains-test + (mt/with-temp Pulse [{pulse-id :id}] + (doseq [operation [:create :update] + enable-advanced-config? [true false] + allowed-domains [nil + #{"metabase.com"} + #{"metabase.com" "toucan.farm"}] + emails [nil + ["cam@metabase.com"] + ["cam@metabase.com" "cam@toucan.farm"] + ["cam@metabase.com" "cam@disallowed-domain.com"]] + :let [fail? (and enable-advanced-config? + allowed-domains + (not (every? (fn [email] + (contains? allowed-domains (u/email->domain email))) + emails)))]] + (premium-features-test/with-premium-features (if enable-advanced-config? + #{:advanced-config} + #{}) + (mt/with-temporary-setting-values [subscription-allowed-domains (str/join "," allowed-domains)] + ;; `with-premium-features` and `with-temporary-setting-values` will add `testing` context for the other + ;; stuff. + (testing (str (format "\nOperation = %s" operation) + (format "\nEmails = %s" (pr-str emails))) + (let [thunk (case operation + :create + #(db/insert! PulseChannel + (merge (tt/with-temp-defaults PulseChannel) + {:pulse_id pulse-id, :details {:emails emails}})) + + :update + #(mt/with-temp PulseChannel [{pulse-channel-id :id} {:pulse_id pulse-id}] + (db/update! PulseChannel pulse-channel-id, :details {:emails emails})))] + (if fail? + (testing "should fail" + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"You cannot create new subscriptions for the domain \"[\w@\.-]+\". Allowed domains are: .+" + (thunk)))) + (testing "should succeed" + (is (thunk))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit/pages_test.clj b/enterprise/backend/test/metabase_enterprise/audit/pages_test.clj deleted file mode 100644 index 16fd594c2487..000000000000 --- a/enterprise/backend/test/metabase_enterprise/audit/pages_test.clj +++ /dev/null @@ -1,91 +0,0 @@ -(ns metabase-enterprise.audit.pages-test - (:require [clojure.java.classpath :as classpath] - [clojure.string :as str] - [clojure.test :refer :all] - [clojure.tools.namespace.find :as ns-find] - [metabase.models :refer [Card Dashboard DashboardCard Database Table]] - [metabase.plugins.classloader :as classloader] - [metabase.public-settings.metastore-test :as metastore-test] - [metabase.query-processor :as qp] - [metabase.query-processor.util :as qp-util] - [metabase.test :as mt] - [metabase.test.fixtures :as fixtures] - [metabase.util :as u] - [ring.util.codec :as codec] - [schema.core :as s])) - -(use-fixtures :once (fixtures/initialize :db)) - -(deftest preconditions-test - (classloader/require 'metabase-enterprise.audit.pages.dashboards) - (testing "the query should exist" - (is (some? (resolve (symbol "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed"))))) - - (testing "test that a query will fail if not ran by an admin" - (metastore-test/with-metastore-token-features #{:audit-app} - (is (= {:status "failed", :error "You don't have permissions to do that."} - (-> (mt/user-http-request :lucky :post 202 "dataset" - {:type :internal - :fn "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed"}) - (select-keys [:status :error])))))) - - (testing "ok, now try to run it. Should fail because we don't have audit-app enabled" - (metastore-test/with-metastore-token-features nil - (is (= {:status "failed", :error "Audit App queries are not enabled on this instance."} - (-> (mt/user-http-request :crowberto :post 202 "dataset" - {:type :internal - :fn "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed"}) - (select-keys [:status :error]))))))) - -(defn- all-queries [] - (for [ns-symb (ns-find/find-namespaces (classpath/system-classpath)) - :when (and (str/starts-with? (name ns-symb) "metabase-enterprise.audit.pages") - (not (str/ends-with? (name ns-symb) "-test"))) - [_ varr] (do (classloader/require ns-symb) - (ns-interns ns-symb)) - :when (:internal-query-fn (meta varr))] - varr)) - -(defn- varr->query [varr {:keys [database table card dash]}] - (let [mta (meta varr) - fn-str (str (ns-name (:ns mta)) "/" (:name mta)) - arglist (mapv keyword (first (:arglists mta)))] - {:type :internal - :fn fn-str - :args (for [arg arglist] - (case arg - :datetime-unit "day" - :dashboard-id (u/the-id dash) - :card-id (u/the-id card) - :user-id (mt/user->id :crowberto) - :database-id (u/the-id database) - :table-id (u/the-id table) - :model "card" - :query-hash (codec/base64-encode (qp-util/query-hash {:database 1, :type :native}))))})) - -(defn- test-varr - [varr objects] - (testing (format "%s %s:%d" varr (ns-name (:ns (meta varr))) (:line (meta varr))) - (let [query (varr->query varr objects)] - (testing (format "\nquery =\n%s" (u/pprint-to-str query)) - (is (schema= {:status (s/eq :completed) - s/Keyword s/Any} - (qp/process-query query))))))) - -(defn- do-with-temp-objects [f] - (mt/with-temp* [Database [database] - Table [table {:db_id (u/the-id database)}] - Card [card {:table_id (u/the-id table), :database_id (u/the-id database)}] - Dashboard [dash] - DashboardCard [_ {:card_id (u/the-id card), :dashboard_id (u/the-id dash)}]] - (f {:database database, :table table, :card card, :dash dash}))) - -(defmacro ^:private with-temp-objects [[objects-binding] & body] - `(do-with-temp-objects (fn [~objects-binding] ~@body))) - -(deftest all-queries-test - (mt/with-test-user :crowberto - (with-temp-objects [objects] - (metastore-test/with-metastore-token-features #{:audit-app} - (doseq [varr (all-queries)] - (test-varr varr objects)))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/api/user_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/api/user_test.clj new file mode 100644 index 000000000000..4ea38ed8df0b --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/api/user_test.clj @@ -0,0 +1,84 @@ +(ns metabase-enterprise.audit-app.api.user-test + (:require [clojure.test :refer :all] + [metabase.models :refer [Card Dashboard DashboardCard Pulse PulseCard PulseChannel PulseChannelRecipient User]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [toucan.db :as db])) + +(deftest delete-subscriptions-test + (testing "DELETE /api/ee/audit-app/user/:id/subscriptions" + (testing "Should require a token with `:audit-app`" + (premium-features-test/with-premium-features #{} + (mt/with-temp User [{user-id :id}] + (is (= "This API endpoint is only enabled if you have a premium token with the :audit-app feature." + (mt/user-http-request user-id + :delete 402 + (format "ee/audit-app/user/%d/subscriptions" user-id))))))) + + (premium-features-test/with-premium-features #{:audit-app} + (doseq [run-type [:admin :non-admin]] + (mt/with-temp* [User [{user-id :id}] + Card [{card-id :id}] + ;; Alert, created by a different User + Pulse [{alert-id :id} {:alert_condition "rows" + :alert_first_only false + :name nil}] + PulseCard [_ {:pulse_id alert-id + :card_id card-id}] + PulseChannel [{alert-chan-id :id} {:pulse_id alert-id}] + PulseChannelRecipient [_ {:user_id user-id + :pulse_channel_id alert-chan-id}] + ;; DashboardSubscription, created by this User; multiple recipients + Dashboard [{dashboard-id :id}] + DashboardCard [{dashcard-id :id} {:dashboard_id dashboard-id + :card_id card-id}] + Pulse [{dash-sub-id :id} {:dashboard_id dashboard-id + :creator_id user-id}] + PulseCard [_ {:pulse_id dash-sub-id + :card_id card-id + :dashboard_card_id dashcard-id}] + PulseChannel [{dash-sub-chan-id :id} {:pulse_id dash-sub-id}] + PulseChannelRecipient [_ {:user_id user-id + :pulse_channel_id dash-sub-chan-id}] + PulseChannelRecipient [_ {:user_id (mt/user->id :rasta) + :pulse_channel_id dash-sub-chan-id}]] + (letfn [(describe-objects [] + {:num-subscriptions (db/count PulseChannelRecipient :user_id user-id) + :alert-archived? (db/select-one-field :archived Pulse :id alert-id) + :dashboard-subscription-archived? (db/select-one-field :archived Pulse :id dash-sub-id)}) + (api-delete-subscriptions! [request-user-name-or-id expected-status-code] + (mt/user-http-request request-user-name-or-id + :delete expected-status-code + (format "ee/audit-app/user/%d/subscriptions" user-id)))] + (testing "Sanity check: User should have 2 subscriptions (1 Alert, 1 DashboardSubscription)" + (is (= {:num-subscriptions 2 + :alert-archived? false + :dashboard-subscription-archived? false} + (describe-objects)))) + (case run-type + :non-admin + (testing "Non-admin" + (testing "should not be allowed to delete all subscriptions for another User" + (is (= "You don't have permissions to do that." + (api-delete-subscriptions! :rasta 403))) + (is (= {:num-subscriptions 2 + :alert-archived? false + :dashboard-subscription-archived? false} + (describe-objects)))) + (testing "should be allowed to delete all subscriptions for themselves." + (is (nil? (api-delete-subscriptions! user-id 204))) + (testing (str "\nAlert should get archived because this User was the last subscriber." + "\nDashboardSubscription should get archived because this User created it.") + (is (= {:num-subscriptions 0 + :alert-archived? true + :dashboard-subscription-archived? true} + (describe-objects)))))) + + :admin + (testing "Admin should be allowed to delete all subscriptions for another User" + (is (nil? (api-delete-subscriptions! :crowberto 204))) + (testing "\nAlert and DashboardSubscription should have gotten archived as well" + (is (= {:num-subscriptions 0 + :alert-archived? true + :dashboard-subscription-archived? true} + (describe-objects)))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/pages/alerts_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages/alerts_test.clj new file mode 100644 index 000000000000..ebb4a759bfa0 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages/alerts_test.clj @@ -0,0 +1,88 @@ +(ns metabase-enterprise.audit-app.pages.alerts-test + (:require [clojure.string :as str] + [clojure.test :refer :all] + [metabase-enterprise.audit-app.pages.alerts :as audit.alerts] + [metabase.models :refer [Card Collection Pulse PulseCard PulseChannel PulseChannelRecipient]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.test :as mt] + [metabase.util :as u] + [toucan.db :as db])) + +(defn- alerts [card-name] + (mt/with-test-user :crowberto + (premium-features-test/with-premium-features #{:audit-app} + (qp/process-query + {:type :internal + :fn (u/qualified-name ::audit.alerts/table) + :args [card-name]})))) + +(deftest table-test + (is (= [] + (mt/rows (alerts (mt/random-name))))) + (let [card-name (mt/random-name)] + (mt/with-temp Collection [{collection-id :id, collection-name :name}] + ;; test with both the Root Collection and a non-Root Collection + (doseq [{:keys [collection-id collection-name]} [{:collection-id collection-id + :collection-name collection-name} + {:collection-id nil + :collection-name "Our analytics"}]] + (testing (format "Collection = %d %s" collection-id collection-name) + (mt/with-temp* [Card [{card-id :id} {:name card-name + :collection_id collection-id}] + Pulse [{pulse-id :id} {:collection_id collection-id + :alert_condition "rows"}] + PulseCard [_ {:card_id card-id + :pulse_id pulse-id}] + PulseChannel [{channel-id :id} {:pulse_id pulse-id + :channel_type "email" + :details {:emails ["amazing@fake.com"]} + :schedule_type "monthly" + :schedule_frame "first" + :schedule_day "mon" + :schedule_hour 8}] + PulseChannelRecipient [_ {:pulse_channel_id channel-id + :user_id (mt/user->id :rasta)}] + PulseChannel [{channel-2-id :id} {:pulse_id pulse-id + :channel_type "slack" + :details {:channel "#wow"} + :schedule_type "hourly"}]] + (is (= {:columns ["card_id" + "card_name" + "pulse_id" + "recipients" + "subscription_type" + "collection_id" + "collection_name" + "frequency" + "creator_id" + "creator_name" + "created_at" + "num_filters"] + ;; sort by newest first. + :rows [[card-id + card-name + pulse-id + nil + "Slack" + collection-id + collection-name + "Every hour" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-2-id) + 0] + [card-id + card-name + pulse-id + 2 + "Email" + collection-id + collection-name + "At 8:00 AM, on the first Tuesday of the month" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-id) + 0]]} + (mt/rows+column-names + (alerts (str/join (rest (butlast card-name))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit/pages/common_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages/common_test.clj similarity index 70% rename from enterprise/backend/test/metabase_enterprise/audit/pages/common_test.clj rename to enterprise/backend/test/metabase_enterprise/audit_app/pages/common_test.clj index be846a696ec0..27d41f816f87 100644 --- a/enterprise/backend/test/metabase_enterprise/audit/pages/common_test.clj +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages/common_test.clj @@ -1,22 +1,25 @@ -(ns metabase-enterprise.audit.pages.common-test +(ns metabase-enterprise.audit-app.pages.common-test (:require [clojure.test :refer :all] - [metabase-enterprise.audit.pages.common :as pages.common] + [honeysql.core :as hsql] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as pages.common] [metabase.db :as mdb] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.query-processor :as qp] - [metabase.test :as mt])) + [metabase.test :as mt] + [metabase.util :as u] + [metabase.util.honeysql-extensions :as hx])) (defn- run-query - [varr & {:as additional-query-params}] + [query-type & {:as additional-query-params}] (mt/with-test-user :crowberto - (metastore-test/with-metastore-token-features #{:audit-app} + (premium-features-test/with-premium-features #{:audit-app} (qp/process-query (merge {:type :internal - :fn (let [mta (meta varr)] - (format "%s/%s" (ns-name (:ns mta)) (:name mta)))} + :fn (u/qualified-name query-type)} additional-query-params))))) -(defn- ^:private ^:internal-query-fn legacy-format-query-fn - [a1] +(defmethod audit.i/internal-query ::legacy-format-query-fn + [_ a1] (let [h2? (= (mdb/db-type) :h2)] {:metadata [[:A {:display_name "A", :base_type :type/DateTime}] [:B {:display_name "B", :base_type :type/Integer}]] @@ -24,8 +27,8 @@ {:union-all [{:select [[a1 :A] [2 :B]]} {:select [[3 :A] [4 :B]]}]})})) -(defn- ^:private ^:internal-query-fn reducible-format-query-fn - [a1] +(defmethod audit.i/internal-query ::reducible-format-query-fn + [_ a1] {:metadata [[:A {:display_name "A", :base_type :type/DateTime}] [:B {:display_name "B", :base_type :type/Integer}]] :results (pages.common/reducible-query @@ -35,13 +38,13 @@ (deftest transform-results-test (testing "Make sure query function result are transformed to QP results correctly" - (metastore-test/with-metastore-token-features #{:audit-app} - (doseq [[format-name {:keys [varr expected-rows]}] {"legacy" {:varr #'legacy-format-query-fn - :expected-rows [[100 2] [3 4]]} - "reducible" {:varr #'reducible-format-query-fn - :expected-rows [[101 2] [4 4]]}}] + (premium-features-test/with-premium-features #{:audit-app} + (doseq [[format-name {:keys [query-type expected-rows]}] {"legacy" {:query-type ::legacy-format-query-fn + :expected-rows [[100 2] [3 4]]} + "reducible" {:query-type ::reducible-format-query-fn + :expected-rows [[101 2] [4 4]]}}] (testing (format "format = %s" format-name) - (let [results (delay (run-query varr :args [100]))] + (let [results (delay (run-query query-type :args [100]))] (testing "cols" (is (= [{:display_name "A", :base_type :type/DateTime, :name "A"} {:display_name "B", :base_type :type/Integer, :name "B"}] @@ -50,20 +53,36 @@ (is (= expected-rows (mt/rows @results)))))))))) +(deftest add-45-days-clause-test + (testing "add 45 days clause" + (is (= + {:where + [:> + (hx/with-type-info + (hsql/call :cast :bob.dobbs #honeysql.types.SqlRaw{:s "date"}) + {::hx/database-type "date"}) + nil]} + (assoc-in (#'pages.common/add-45-days-clause {} :bob.dobbs) [:where 2] nil))))) + +(deftest add-search-clause-test + (testing "add search clause" + (is (= {:where `(:or [:like ~(hsql/call :lower :t.name) "%birds%"] [:like ~(hsql/call :lower :db.name) "%birds%"])} + (#'pages.common/add-search-clause {} "birds" :t.name :db.name))))) + (deftest query-limit-and-offset-test (testing "Make sure params passed in as part of the query map are respected" - (metastore-test/with-metastore-token-features #{:audit-app} - (doseq [[format-name {:keys [varr expected-rows]}] {"legacy" {:varr #'legacy-format-query-fn - :expected-rows [[100 2] [3 4]]} - "reducible" {:varr #'reducible-format-query-fn - :expected-rows [[101 2] [4 4]]}}] + (premium-features-test/with-premium-features #{:audit-app} + (doseq [[format-name {:keys [query-type expected-rows]}] {"legacy" {:query-type ::legacy-format-query-fn + :expected-rows [[100 2] [3 4]]} + "reducible" {:query-type ::reducible-format-query-fn + :expected-rows [[101 2] [4 4]]}}] (testing (format "format = %s" format-name) (testing :limit (is (= [(first expected-rows)] - (mt/rows (run-query varr :args [100], :limit 1))))) + (mt/rows (run-query query-type :args [100], :limit 1))))) (testing :offset (is (= [(second expected-rows)] - (mt/rows (run-query varr :args [100], :offset 1)))))))))) + (mt/rows (run-query query-type :args [100], :offset 1)))))))))) (deftest CTES->subselects-test (testing "FROM substitution" diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/pages/dashboard_subscriptions_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages/dashboard_subscriptions_test.clj new file mode 100644 index 000000000000..6726b3737c21 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages/dashboard_subscriptions_test.clj @@ -0,0 +1,86 @@ +(ns metabase-enterprise.audit-app.pages.dashboard-subscriptions-test + (:require [clojure.string :as str] + [clojure.test :refer :all] + [metabase-enterprise.audit-app.pages.dashboard-subscriptions :as audit.dashboard-subscriptions] + [metabase.models :refer [Collection Dashboard Pulse PulseChannel PulseChannelRecipient]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.test :as mt] + [metabase.util :as u] + [toucan.db :as db])) + +(defn- dashboard-subscriptions [dashboard-name] + (mt/with-test-user :crowberto + (premium-features-test/with-premium-features #{:audit-app} + (qp/process-query + {:type :internal + :fn (u/qualified-name ::audit.dashboard-subscriptions/table) + :args [dashboard-name]})))) + +(deftest table-test + (is (= [] + (mt/rows (dashboard-subscriptions (mt/random-name))))) + (let [dashboard-name (mt/random-name)] + (mt/with-temp Collection [{collection-id :id, collection-name :name}] + ;; test with both the Root Collection and a non-Root Collection + (doseq [{:keys [collection-id collection-name]} [{:collection-id collection-id + :collection-name collection-name} + {:collection-id nil + :collection-name "Our analytics"}]] + (testing (format "Collection = %d %s" collection-id collection-name) + (mt/with-temp* [Dashboard [{dashboard-id :id} {:name dashboard-name + :collection_id collection-id}] + Pulse [{pulse-id :id} {:dashboard_id dashboard-id + :collection_id collection-id}] + PulseChannel [{channel-id :id} {:pulse_id pulse-id + :channel_type "email" + :details {:emails ["amazing@fake.com"]} + :schedule_type "monthly" + :schedule_frame "first" + :schedule_day "mon" + :schedule_hour 8}] + PulseChannelRecipient [_ {:pulse_channel_id channel-id + :user_id (mt/user->id :rasta)}] + PulseChannel [{channel-2-id :id} {:pulse_id pulse-id + :channel_type "slack" + :details {:channel "#wow"} + :schedule_type "hourly"}]] + (is (= {:columns ["dashboard_id" + "dashboard_name" + "pulse_id" + "recipients" + "subscription_type" + "collection_id" + "collection_name" + "frequency" + "creator_id" + "creator_name" + "created_at" + "num_filters"] + ;; sort by newest first. + :rows [[dashboard-id + dashboard-name + pulse-id + nil + "Slack" + collection-id + collection-name + "Every hour" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-2-id) + 0] + [dashboard-id + dashboard-name + pulse-id + 2 + "Email" + collection-id + collection-name + "At 8:00 AM, on the first Tuesday of the month" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-id) + 0]]} + (mt/rows+column-names + (dashboard-subscriptions (str/join (rest (butlast dashboard-name))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/pages_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages_test.clj new file mode 100644 index 000000000000..c57be3ae51f5 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages_test.clj @@ -0,0 +1,169 @@ +(ns metabase-enterprise.audit-app.pages-test + (:require [clojure.java.classpath :as classpath] + [clojure.java.io :as io] + [clojure.string :as str] + [clojure.test :refer :all] + [clojure.tools.namespace.find :as ns-find] + [clojure.tools.reader :as tools.reader] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase.models :refer [Card Dashboard DashboardCard Database Table]] + [metabase.plugins.classloader :as classloader] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.query-processor.util :as qp-util] + [metabase.test :as mt] + [metabase.test.fixtures :as fixtures] + [metabase.util :as u] + [ring.util.codec :as codec] + [schema.core :as s])) + +(use-fixtures :once (fixtures/initialize :db)) + +(deftest preconditions-test + (classloader/require 'metabase-enterprise.audit-app.pages.dashboards) + (testing "the method should exist" + (is (fn? (get-method audit.i/internal-query :metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed)))) + + (testing "test that a query will fail if not ran by an admin" + (premium-features-test/with-premium-features #{:audit-app} + (is (= {:status "failed", :error "You don't have permissions to do that."} + (-> (mt/user-http-request :lucky :post 202 "dataset" + {:type :internal + :fn "metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed"}) + (select-keys [:status :error])))))) + + (testing "ok, now try to run it. Should fail because we don't have audit-app enabled" + (premium-features-test/with-premium-features nil + (is (= {:status "failed", :error "Audit App queries are not enabled on this instance."} + (-> (mt/user-http-request :crowberto :post 202 "dataset" + {:type :internal + :fn "metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed"}) + (select-keys [:status :error]))))))) + +(defn- all-query-methods + "Return a set of all audit/internal query types (excluding test/`:default` impls)." + [] + ;; load all `metabase-enterprise.audit-app.pages` namespaces. + (doseq [ns-symb (ns-find/find-namespaces (classpath/system-classpath)) + :when (and (str/starts-with? (name ns-symb) "metabase-enterprise.audit-app.pages") + (not (str/ends-with? (name ns-symb) "-test")))] + (classloader/require ns-symb)) + ;; now find all the impls of [[metabase-enterprise.audit-app.interface/internal-query]] from the pages namespaces + (into (sorted-set) + (filter (fn [query-type] + (when-let [ns-str (namespace query-type)] + (and (str/starts-with? ns-str "metabase-enterprise.audit-app.pages.") + (not (str/ends-with? ns-str "-test")))))) + (keys (methods audit.i/internal-query)))) + +(defn- query-defmethod-source-form + "Find the source [[defmethod]] or [[schema.core/defmethod]] form for the internal query named by `query-type`." + [query-type] + (let [file (-> (namespace query-type) + munge + (str/replace #"\." "/") + (str ".clj")) + ns-symb (symbol (namespace query-type))] + (with-open [reader (java.io.PushbackReader. (io/reader (io/resource file)))] + (binding [*ns* (the-ns ns-symb)] + (loop [] + (let [form (tools.reader/read reader false ::eof)] + (cond + (= form ::eof) + (throw (ex-info (str "Cannot find source for " query-type) + {:namespace ns-symb, :file file})) + + (and (seq? form) + (#{'defmethod 's/defmethod} (first form)) + (= (second form) 'audit.i/internal-query) + (= (nth form 2) query-type)) + form + + :else + (recur)))))))) + +(defn- arglist-strip-schema-annotations + "Remove Schema `:-` annotations from `arglist`." + [arglist] + (let [remove-next? (volatile! false)] + (into [] + (remove (fn [value] + (cond + (= value :-) + (do + (vreset! remove-next? true) + true) + + @remove-next? + (do + (vreset! remove-next? false) + true) + + :else + false))) + arglist))) + +(defn- query-defmethod-arglists + "Return a sequence of arglists for the internal query named by `query-type`." + [query-type] + (let [fn-tail (drop 3 (query-defmethod-source-form query-type))] + (mapv arglist-strip-schema-annotations + (if (vector? (first fn-tail)) + [(first fn-tail)] + (map first fn-tail))))) + +(defn- test-query-maps + "Generate a sequence of test query maps (as you'd pass to the QP) for the internal query named by `query-type`. + Generates one map for arity of the method." + [query-type {:keys [database table card dash]}] + (for [arglist (query-defmethod-arglists query-type)] + {:type :internal + :fn (u/qualified-name query-type) + :args (for [arg (mapv keyword (rest arglist))] + (case arg + :datetime-unit "day" + :dashboard-id (u/the-id dash) + :card-id (u/the-id card) + :user-id (mt/user->id :crowberto) + :database-id (u/the-id database) + :table-id (u/the-id table) + :model "card" + :query-hash (codec/base64-encode (qp-util/query-hash {:database 1, :type :native})) + :query-string "toucans" + :question-filter "bird sales" + :collection-filter "coin collection" + :error-filter "a" + :db-filter "PU" + :sort-column "card.id" + :sort-direction "desc" + :dashboard-name "wow" + :card-name "Credit Card"))})) + +(defn- do-tests-for-query-type + "Run test(s) for the internal query named by `query-type`. Runs one test for each map returned + by [[test-query-maps]]." + [query-type objects] + (doseq [query (test-query-maps query-type objects)] + (testing (format "\nquery =\n%s" (u/pprint-to-str query)) + (is (schema= {:status (s/eq :completed) + s/Keyword s/Any} + (qp/process-query query)))))) + +(defn- do-with-temp-objects [f] + (mt/with-temp* [Database [database] + Table [table {:db_id (u/the-id database)}] + Card [card {:table_id (u/the-id table), :database_id (u/the-id database)}] + Dashboard [dash] + DashboardCard [_ {:card_id (u/the-id card), :dashboard_id (u/the-id dash)}]] + (f {:database database, :table table, :card card, :dash dash}))) + +(defmacro ^:private with-temp-objects [[objects-binding] & body] + `(do-with-temp-objects (fn [~objects-binding] ~@body))) + +(deftest all-queries-test + (mt/with-test-user :crowberto + (with-temp-objects [objects] + (premium-features-test/with-premium-features #{:audit-app} + (doseq [query-type (all-query-methods)] + (testing query-type + (do-tests-for-query-type query-type objects))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries_test.clj similarity index 56% rename from enterprise/backend/test/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries_test.clj rename to enterprise/backend/test/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries_test.clj index 560e15c1a813..c2f0e95c3be2 100644 --- a/enterprise/backend/test/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries_test.clj +++ b/enterprise/backend/test/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries_test.clj @@ -1,28 +1,29 @@ -(ns metabase-enterprise.audit.query-processor.middleware.handle-audit-queries-test - "Additional tests for this namespace can be found in `metabase-enterprise.audit.pages-test`." +(ns metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries-test + "Additional tests for this namespace can be found in `metabase-enterprise.audit-app.pages-test`." (:require [clojure.test :refer :all] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.query-processor :as qp] - [metabase.test :as mt])) + [metabase.test :as mt] + [metabase.util :as u])) (defn- run-query - [varr & {:as additional-query-params}] + [query-type & {:as additional-query-params}] (mt/with-test-user :crowberto - (metastore-test/with-metastore-token-features #{:audit-app} + (premium-features-test/with-premium-features #{:audit-app} (qp/process-query (merge {:type :internal - :fn (let [mta (meta varr)] - (format "%s/%s" (ns-name (:ns mta)) (:name mta)))} + :fn (u/qualified-name query-type)} additional-query-params))))) -(defn- ^:private ^:internal-query-fn legacy-format-query-fn - [a1] +(defmethod audit.i/internal-query ::legacy-format-query-fn + [_ a1] {:metadata [[:a {:display_name "A", :base_type :type/DateTime}] [:b {:display_name "B", :base_type :type/Integer}]] :results [{:a a1, :b 2} {:a 3, :b 5}]}) -(defn- ^:private ^:internal-query-fn reducible-format-query-fn - [a1] +(defmethod audit.i/internal-query ::reducible-format-query-fn + [_ a1] {:metadata [[:a {:display_name "A", :base_type :type/DateTime}] [:b {:display_name "B", :base_type :type/Integer}]] :results (constantly [[a1 2] @@ -31,12 +32,12 @@ (deftest transform-results-test (testing "Make sure query function result are transformed to QP results correctly" - (doseq [[format-name {:keys [varr expected-rows]}] {"legacy" {:varr #'legacy-format-query-fn - :expected-rows [[100 2] [3 5]]} - "reducible" {:varr #'reducible-format-query-fn - :expected-rows [[101 2] [4 5]]}}] + (doseq [[format-name {:keys [query-type expected-rows]}] {"legacy" {:query-type ::legacy-format-query-fn + :expected-rows [[100 2] [3 5]]} + "reducible" {:query-type ::reducible-format-query-fn + :expected-rows [[101 2] [4 5]]}}] (testing (format "format = %s" format-name) - (let [results (delay (run-query varr :args [100]))] + (let [results (delay (run-query query-type :args [100]))] (testing "cols" (is (= [{:display_name "A", :base_type :type/DateTime, :name "a"} {:display_name "B", :base_type :type/Integer, :name "b"}] diff --git a/enterprise/backend/test/metabase_enterprise/content_management/api/review_test.clj b/enterprise/backend/test/metabase_enterprise/content_management/api/review_test.clj new file mode 100644 index 000000000000..ac403de6f53d --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/content_management/api/review_test.clj @@ -0,0 +1,97 @@ +(ns metabase-enterprise.content-management.api.review-test + (:require [clojure.test :refer :all] + [metabase.models.card :refer [Card]] + [metabase.models.moderation-review :as mod-review :refer [ModerationReview]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [toucan.db :as db])) + +(defn- normalized-response + [moderation-review] + (dissoc moderation-review :id :updated_at :created_at)) + +;;todo: check it can review dashboards, and that it cannot review other models +(deftest create-test + (testing "POST /api/moderation-review" + (testing "Should require a token with `:content-management`" + (premium-features-test/with-premium-features #{} + (is (= "This API endpoint is only enabled if you have a premium token with the :content-management feature." + (mt/user-http-request :rasta :post 402 "moderation-review" + {:text "review" + :status "verified" + :moderated_item_id 1 + :moderated_item_type "card"}))))) + + (premium-features-test/with-premium-features #{:content-management} + (mt/with-temp* [Card [{card-id :id :as card} {:name "Test Card"}]] + (mt/with-model-cleanup [ModerationReview] + (letfn [(moderate! [status text] + (normalized-response + (mt/user-http-request :crowberto :post 200 "moderation-review" + {:text text + :status status + :moderated_item_id card-id + :moderated_item_type "card"}))) + (review-count [] (db/count ModerationReview + :moderated_item_id card-id + :moderated_item_type "card"))] + (testing "Non admin cannot create a moderation review" + (is (= 0 (review-count))) + (is (= "You don't have permissions to do that." + (mt/user-http-request :rasta :post 403 "moderation-review" + {:text "review" + :status "verified" + :moderated_item_id card-id + :moderated_item_type "card"}))) + (is (= 0 (review-count)))) + (is (= {:text "Looks good to me" + :moderated_item_id card-id + :moderated_item_type "card" + :moderator_id (mt/user->id :crowberto) + :status "verified" + :most_recent true} + (moderate! "verified" "Looks good to me"))) + (testing "When adding a new moderation review, marks it as most recent" + (is (= {:text "hmm" + :status nil + :most_recent true} + (select-keys (moderate! nil "hmm") [:text :status :most_recent]))) + (testing "And previous moderation reviews are marked as not :most_recent" + (is (= #{{:text "hmm" :most_recent true :status nil} + {:text "Looks good to me" :most_recent false :status "verified"}} + (into #{} + (map #(select-keys % [:text :status :most_recent])) + (db/select ModerationReview + :moderated_item_id card-id + :moderated_item_type "card")))))) + (testing "Ensures we never have more than `modreview/max-moderation-reviews`" + (db/insert-many! ModerationReview (repeat (* 2 mod-review/max-moderation-reviews) + {:moderated_item_id card-id + :moderated_item_type "card" + :moderator_id (mt/user->id :crowberto) + :most_recent false + :status "verified" + :text "old review"})) + ;; manually inserted many + + (is (> (review-count) mod-review/max-moderation-reviews)) + (moderate! "verified" "lookin good") + ;; api ensures we never have more than our limit + + (is (<= (review-count) mod-review/max-moderation-reviews))) + (testing "Only allows for valid status" + (doseq [status mod-review/statuses] + (is (= status (:status (moderate! status "good"))))) + ;; i wish this was better. Should have a better error message and honestly shouldn't be a 500 + (tap> (mt/user-http-request :crowberto :post 400 "moderation-review" + {:text "not a chance this works" + :status "invalid status" + :moderated_item_id card-id + :moderated_item_type "card"}))) + (testing "Can't moderate a card that doesn't exist" + (is (= "Not found." + (mt/user-http-request :crowberto :post 404 "moderation-review" + {:text "card doesn't exist" + :status "verified" + :moderated_item_id Integer/MAX_VALUE + :moderated_item_type "card"})))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj index 90c06d8e59ab..6b693f45daea 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj @@ -4,7 +4,7 @@ [metabase.models.collection :as collection] [metabase.models.permissions :as perms] [metabase.models.permissions-group :as group] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt])) (deftest ee-disabled-snippets-graph-test @@ -17,14 +17,14 @@ (= (:id snippet) (:id a-snippet))) response))))] (testing "\nIf we have a valid EE token, we should only see Snippets in the Root Collection with valid perms" - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (is (= false (can-see-snippet?))) (perms/grant-collection-read-permissions! (group/all-users) (assoc collection/root-collection :namespace "snippets")) (is (= true (can-see-snippet?))))) (testing "\nIf we do not have a valid EE token, all Snippets should come back from the graph regardless of our perms" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (is (= true (can-see-snippet?))) (perms/revoke-collection-permissions! (group/all-users) (assoc collection/root-collection :namespace "snippets")) diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj index f0744d196cd8..32893dccd7cf 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj @@ -4,7 +4,7 @@ [metabase.models.collection :as collection] [metabase.models.permissions :as perms] [metabase.models.permissions-group :as group] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt] [metabase.util :as u] [toucan.db :as db])) @@ -22,12 +22,12 @@ (testing (format "\nSnippet in %s" collection-name) (mt/with-temp NativeQuerySnippet [snippet {:collection_id (:id collection)}] (testing "\nShould be allowed regardless if EE features aren't enabled" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (is (= true (has-perms? snippet)) "allowed?"))) (testing "\nWith EE features enabled" - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (testing (format "\nShould not be allowed with no perms for %s" collection-name) (is (= false (has-perms? snippet)) @@ -113,10 +113,10 @@ (when-not (= source-collection dest-collection) (testing (format "\nMove from %s -> %s should need write ('curate') perms for both" (:name source-collection) (:name dest-collection)) (testing "\nShould be allowed if EE perms aren't enabled" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (is (= true (has-perms?))))) - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (doseq [c [source-collection dest-collection]] (testing (format "\nPerms for only %s should fail" (:name c)) (try diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj index 7de093a2058a..4f40ff6ecc56 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj @@ -1,7 +1,7 @@ (ns metabase-enterprise.enhancements.ee-strategy-impl-test (:require [clojure.test :refer :all] [metabase-enterprise.enhancements.ee-strategy-impl :as ee-strategy-impl] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [pretty.core :refer [PrettyPrintable]])) (defprotocol ^:private MyProtocol @@ -20,17 +20,17 @@ (deftest generate-method-impl-test (is (= '((m1 [_] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss)) (m1 [_ a] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss a))) (#'ee-strategy-impl/generate-method-impl - (list 'var 'metabase.public-settings.metastore/enable-enhancements?) + (list 'var 'metabase.public-settings.premium-features/enable-enhancements?) 'ee 'oss {:var #'MyProtocol} {:name 'm1 @@ -45,23 +45,23 @@ (is (= '(metabase_enterprise.enhancements.ee_strategy_impl_test.MyProtocol (m1 [_] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss)) (m1 [_ a] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss a)) (m2 [_ a b] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m2 ee oss a b))) (#'ee-strategy-impl/generate-protocol-impl - (list 'var 'metabase.public-settings.metastore/enable-enhancements?) + (list 'var 'metabase.public-settings.premium-features/enable-enhancements?) 'ee 'oss protocol-symb))))))) (deftest e2e-test @@ -77,20 +77,20 @@ MyProtocol (m2 [_ x y] (- x y))) - impl (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? ee oss MyProtocol)] + impl (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? ee oss MyProtocol)] (testing "sanity check" (is (= 3 (m2 ee 1 2))) (is (= -1 (m2 oss 1 2)))) - (with-redefs [settings.metastore/enable-enhancements? (constantly false)] + (with-redefs [settings.premium-features/enable-enhancements? (constantly false)] (is (= -1 (m2 impl 1 2)))) - (with-redefs [settings.metastore/enable-enhancements? (constantly true)] + (with-redefs [settings.premium-features/enable-enhancements? (constantly true)] (is (= 3 (m2 impl 1 2)))) (testing "Should pretty print" (is (= (str "(metabase-enterprise.enhancements.ee-strategy-impl/reify-ee-strategy-impl" - " #'metabase.public-settings.metastore/enable-enhancements?" + " #'metabase.public-settings.premium-features/enable-enhancements?" " (ee) (oss))") (pr-str impl)))))) diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj index d6d0e5554067..2210da61d166 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj @@ -2,11 +2,11 @@ (:require [clojure.test :refer :all] [metabase.integrations.google :as google] [metabase.models.user :as user :refer [User]] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.test :as mt])) (deftest google-auth-create-new-user!-test - (with-redefs [metastore/enable-sso? (constantly true)] + (with-redefs [premium-features/enable-sso? (constantly true)] (testing "should support multiple domains (#5218)" (mt/with-temporary-setting-values [google-auth-auto-create-accounts-domain "metabase.com,example.com"] (mt/with-model-cleanup [User] diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj index f9a9a0320021..e1d3065caa3b 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj @@ -3,7 +3,7 @@ [metabase-enterprise.enhancements.integrations.ldap :as ldap-ee] [metabase.integrations.ldap :as ldap] [metabase.models.user :as user :refer [User]] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.test :as mt] [metabase.test.integrations.ldap :as ldap.test] [metabase.util.schema :as su] @@ -11,7 +11,7 @@ [toucan.db :as db])) (deftest find-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "find by username" (is (= {:dn "cn=John Smith,ou=People,dc=metabase,dc=com" @@ -92,7 +92,7 @@ (ldap/find-user "sally.brown@metabase.com")))))))) (deftest attribute-sync-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "find by email/username should return other attributes as well" (is (= {:dn "cn=Lucky Pigeon,ou=Birds,dc=metabase,dc=com" @@ -165,7 +165,7 @@ (db/delete! User :%lower.email "john.smith@metabase.com")))))))) (deftest update-attributes-on-login-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "Existing user's attributes are updated on fetch" (try @@ -214,7 +214,7 @@ (db/delete! User :%lower.email "john.smith@metabase.com"))))))) (deftest fetch-or-create-user-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "a new user is created when they don't already exist" (try diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj index 458f0b97eb27..654a93bf1921 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj @@ -5,7 +5,7 @@ [metabase.models.interface :as i] [metabase.models.permissions :as perms] [metabase.models.permissions-group :as group] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt])) (def ^:private root-collection (assoc collection/root-collection :name "Root Collection", :namespace "snippets")) @@ -22,9 +22,9 @@ (is (= expected (has-perms-for-id?)))))))] (testing "should be allowed if EE perms aren't enabled" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (test-perms* true))) - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (testing "should NOT be allowed if EE perms are enabled and you do not have perms" (test-perms* false)) (testing "should be allowed if you have perms" diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/models/permissions/block_permissions_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/models/permissions/block_permissions_test.clj new file mode 100644 index 000000000000..aa6080430782 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/enhancements/models/permissions/block_permissions_test.clj @@ -0,0 +1,244 @@ +(ns metabase-enterprise.enhancements.models.permissions.block-permissions-test + (:require [clojure.test :refer :all] + [metabase-enterprise.enhancements.models.permissions.block-permissions :as block-perms] + [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] + [metabase.api.common :as api] + [metabase.models :refer [Card Collection Database Permissions PermissionsGroup PermissionsGroupMembership User]] + [metabase.models.permissions :as perms] + [metabase.models.permissions-group :as group] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.query-processor.middleware.permissions :as qp.perms] + [metabase.test :as mt] + [metabase.util :as u] + [schema.core :as s] + [toucan.db :as db])) + +;;;; Graph-related stuff + +(defn- test-db-perms [group-id] + (get-in (perms/data-perms-graph) [:groups group-id (mt/id)])) + +(defn- api-test-db-perms [group-id] + (into {} + (map (fn [[k v]] + [k (cond-> v (string? v) keyword)])) + (get-in (mt/user-http-request :crowberto :get 200 "permissions/graph") + [:groups + (keyword (str group-id)) + (keyword (str (mt/id)))]))) + +(deftest graph-test + (testing "block permissions should come back from" + (doseq [[message perms] {"the graph function" + test-db-perms + + "the API" + api-test-db-perms}] + (testing (str message "\n")) + (mt/with-temp* [PermissionsGroup [{group-id :id}] + Permissions [_ {:group_id group-id + :object (perms/database-block-perms-path (mt/id))}]] + (is (= {:schemas :block} + (perms group-id))) + (testing (str "\nBlock perms and data perms shouldn't exist together at the same time, but if they do for some " + "reason, then the graph endpoint should ignore the data perms.") + (doseq [path [(perms/data-perms-path (mt/id)) + (perms/data-perms-path (mt/id) "public") + (perms/data-perms-path (mt/id) "public" (mt/id :venues))]] + (testing (format "\nPath = %s" (pr-str path)) + (mt/with-temp* [Permissions [_ {:group_id group-id + :object path}]] + (is (= (merge {:schemas :block} + ;; block perms won't affect the value of `:native`; if a given group has both + ;; `/db/1/` and `/block/db/1/` then the graph will come back with `:native + ;; :write` and `:schemas :block`. This state isn't normally allowed, but the + ;; graph code doesn't currently correct it if it happens. Not sure it's worth + ;; the extra code complexity since it should never happen in the first place. + (when (= path (perms/data-perms-path (mt/id))) + {:native :write})) + (perms group-id))))))))))) + +(defn- grant-block-perms! [group-id] + (perms/update-data-perms-graph! [group-id (mt/id)] {:schemas :block})) + +(defn- api-grant-block-perms! [group-id] + (let [current-graph (perms/data-perms-graph) + new-graph (assoc-in current-graph [:groups group-id (mt/id)] {:schemas :block}) + result (premium-features-test/with-premium-features #{:advanced-permissions} + (mt/user-http-request :crowberto :put 200 "permissions/graph" new-graph))] + (is (= "block" + (get-in result [:groups + (keyword (str group-id)) + (keyword (str (mt/id))) + :schemas]))))) + +(deftest api-throws-error-if-premium-feature-not-enabled + (testing "PUT /api/permissions/graph" + (testing (str "fails when a group has a block permission set, and the instance doesn't have the " + ":advanced-permissions premium feature enabled") + (mt/with-temp PermissionsGroup [{group-id :id}] + (let [current-graph (perms/data-perms-graph) + new-graph (assoc-in current-graph [:groups group-id (mt/id)] {:schemas :block}) + result (premium-features-test/with-premium-features #{} ; disable premium features + (mt/user-http-request :crowberto :put 402 "permissions/graph" new-graph))] + (is (= "Can't use block permissions without having the advanced-permissions premium feature" + result))))))) + +(deftest update-graph-test + (testing "Should be able to set block permissions with" + (doseq [[description grant!] {"the graph update function" + (fn [group-id] + (premium-features-test/with-premium-features #{:advanced-permissions} + (grant-block-perms! group-id))) + + "the perms graph API endpoint" + api-grant-block-perms!}] + (testing (str description "\n") + (mt/with-temp PermissionsGroup [{group-id :id}] + (testing "Group should have no perms upon creation" + (is (= nil + (test-db-perms group-id)))) + (testing "group has no existing permissions" + (mt/with-model-cleanup [Permissions] + (grant! group-id) + (is (= {:schemas :block} + (test-db-perms group-id))))) + (testing "group has existing data permissions... :block should remove them" + (mt/with-model-cleanup [Permissions] + (perms/grant-full-db-permissions! group-id (mt/id)) + (grant! group-id) + (is (= {:schemas :block} + (test-db-perms group-id))) + (is (= #{(perms/database-block-perms-path (mt/id))} + (db/select-field :object Permissions :group_id group-id)))))))))) + +(deftest update-graph-delete-sandboxes-test + (testing "When setting `:block` permissions any GTAP rows for that Group/Database should get deleted." + (premium-features-test/with-premium-features #{:sandboxes :advanced-permissions} + (mt/with-model-cleanup [Permissions] + (mt/with-temp* [PermissionsGroup [{group-id :id}] + GroupTableAccessPolicy [_ {:table_id (mt/id :venues) + :group_id group-id}]] + (grant-block-perms! group-id) + (is (= {:schemas :block} + (test-db-perms group-id))) + (is (not (db/exists? GroupTableAccessPolicy :group_id group-id)))))))) + +(deftest update-graph-data-perms-should-delete-block-perms-test + (testing "granting data permissions should delete existing block permissions" + (mt/with-temp* [PermissionsGroup [{group-id :id}] + Permissions [_ {:group_id group-id, :object (perms/database-block-perms-path (mt/id))}]] + (is (= {:schemas :block} + (test-db-perms group-id))) + (perms/update-data-perms-graph! [group-id (mt/id) :schemas] {"public" {(mt/id :venues) {:read :all}}}) + (is (= {:schemas {"public" {(mt/id :venues) {:read :all}}}} + (test-db-perms group-id)))))) + +(deftest update-graph-disallow-native-query-perms-test + (testing "Disallow block permissions + native query permissions" + (mt/with-temp* [PermissionsGroup [{group-id :id}] + Permissions [_ {:group_id group-id, :object (perms/data-perms-path (mt/id))}]] + (testing "via the fn" + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + ;; TODO -- this error message is totally garbage, fix this + #"DB permissions with a valid combination of values for :native and :schemas" + ;; #"DB permissions with a valid combination of values for :native and :schemas" + (perms/update-data-perms-graph! [group-id (mt/id)] + {:schemas :block, :native :write})))) + (testing "via the API" + (let [current-graph (perms/data-perms-graph) + new-graph (assoc-in current-graph + [:groups group-id (mt/id)] + {:schemas :block, :native :write})] + (is (schema= {:message #".*DB permissions with a valid combination of values for :native and :schemas.*" + s/Keyword s/Any} + (premium-features-test/with-premium-features #{:advanced-permissions} + (mt/user-http-request :crowberto :put 500 "permissions/graph" new-graph))))))))) + +(deftest delete-database-delete-block-perms-test + (testing "If a Database gets DELETED, any block permissions for it should get deleted too." + (mt/with-temp* [Database [{db-id :id}] + Permissions [_ {:group_id (u/the-id (group/all-users)) + :object (perms/database-block-perms-path db-id)}]] + (letfn [(perms-exist? [] + (db/exists? Permissions :object (perms/database-block-perms-path db-id)))] + (is (perms-exist?)) + (db/delete! Database :id db-id) + (is (not (perms-exist?))))))) + +;;;; QP perms-check related stuff. + +(deftest qp-block-permissions-test + (mt/with-temp-copy-of-db + (let [query {:database (mt/id) + :type :query + :query {:source-table (mt/id :venues) + :limit 1}}] + (mt/with-temp* [User [{user-id :id}] + PermissionsGroup [{group-id :id}] + PermissionsGroupMembership [_ {:group_id group-id, :user_id user-id}] + Collection [{collection-id :id}] + Card [{card-id :id} {:collection_id collection-id + :dataset_query query}] + Permissions [_ {:group_id group-id, :object (perms/collection-read-path collection-id)}]] + (premium-features-test/with-premium-features #{:enhancements} + (perms/revoke-data-perms! (group/all-users) (mt/id)) + (perms/revoke-data-perms! group-id (mt/id)) + (letfn [(run-ad-hoc-query [] + (mt/with-current-user user-id + (qp/process-query query))) + (run-saved-question [] + (binding [qp.perms/*card-id* card-id] + (run-ad-hoc-query))) + (check-block-perms [] + (mt/with-current-user user-id + (#'qp.perms/check-block-permissions query)))] + (testing "sanity check: should not be able to run ad-hoc query" + (is (not (contains? @api/*current-user-permissions-set* + (perms/data-perms-path (mt/id))))) + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"You do not have permissions to run this query" + (run-ad-hoc-query)))) + (testing "sanity check: should be able to run query as saved Question before block perms are set." + (is (run-saved-question)) + (is (= ::block-perms/no-block-permissions-for-db + (check-block-perms)))) + ;; 'grant' the block permissions. + (mt/with-temp Permissions [_ {:group_id group-id, :object (perms/database-block-perms-path (mt/id))}] + (testing "if EE token does not have the `:enhancements` feature: should not do check" + (premium-features-test/with-premium-features #{} + (is (= ::block-perms/enhancements-not-enabled + (check-block-perms))))) + (testing "disallow running the query" + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"Blocked: you are not allowed to run queries against Database \d+" + (check-block-perms))) + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"Blocked: you are not allowed to run queries against Database \d+" + (run-saved-question)))) + (testing "\nAllow running if current User has data permissions from another group." + (mt/with-temp* [PermissionsGroup [{group-2-id :id}] + PermissionsGroupMembership [_ {:group_id group-2-id, :user_id user-id}]] + (doseq [[message perms] {"with full DB perms" (perms/data-perms-path (mt/id)) + "with perms for the Table in question" (perms/table-query-path (mt/id :venues))}] + (mt/with-temp Permissions [_ {:group_id group-2-id, :object perms}] + (testing "Should be able to run the query" + (doseq [[message f] {"ad-hoc queries" run-ad-hoc-query + "Saved Questions" run-saved-question}] + (testing message + (is (f))))))) + (testing "\nSandboxed permissions" + (premium-features-test/with-premium-features #{:enhancements :sandboxing} + (mt/with-temp* [Permissions [_ {:group_id group-2-id + :object (perms/table-segmented-query-path (mt/id :venues))}] + GroupTableAccessPolicy [_ {:table_id (mt/id :venues), :group_id group-id}]] + (testing "Should be able to run the query" + (doseq [[message f] {"ad-hoc queries" run-ad-hoc-query + "Saved Questions" run-saved-question}] + (testing message + (is (f))))))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj index 694151b31019..2b4664bf43b5 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj @@ -18,7 +18,7 @@ PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta) :group_id (u/the-id group)}]] (mt/with-db db - (perms/revoke-permissions! (perms-group/all-users) db) + (perms/revoke-data-perms! (perms-group/all-users) db) (perms/grant-permissions! group (perms/table-segmented-query-path table)) (perms/grant-collection-readwrite-permissions! group collection) (is (some? ((mt/user->client :rasta) :post 202 "card" @@ -36,7 +36,7 @@ Card [card {:name "Some Name" :collection_id (u/the-id collection)}]] (mt/with-db db - (perms/revoke-permissions! (perms-group/all-users) db) + (perms/revoke-data-perms! (perms-group/all-users) db) (perms/grant-permissions! group (perms/table-segmented-query-path table)) (perms/grant-collection-readwrite-permissions! group collection) (is (= "Another Name" diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj index 043257dbdb91..bd464e979fed 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj @@ -3,18 +3,15 @@ [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] [metabase.http-client :as http] [metabase.models :refer [Card Field PermissionsGroup Table]] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.server.middleware.util :as middleware.u] [metabase.test :as mt] [schema.core :as s])) -(defmacro ^:private with-sandboxes-enabled [& body] - `(with-redefs [metastore/enable-sandboxes? (constantly true)] - ~@body)) - (deftest require-auth-test (testing "Must be authenticated to query for GTAPs" - (with-sandboxes-enabled + (premium-features-test/with-premium-features #{:sandboxes} (is (= (get middleware.u/response-unauthentic :body) (http/client :get 401 "mt/gtap"))) @@ -32,7 +29,7 @@ "Invokes `body` ensuring any `GroupTableAccessPolicy` created will be removed afterward. Leaving behind a GTAP can case referential integrity failures for any related `Card` that would be cleaned up as part of a `with-temp*` call" [& body] - `(with-sandboxes-enabled + `(premium-features-test/with-premium-features #{:sandboxes} (mt/with-model-cleanup [GroupTableAccessPolicy] ~@body))) @@ -44,16 +41,16 @@ (deftest validate-token-test (testing "POST /api/mt/gtap" (testing "Must have a valid token to use GTAPs" - (with-redefs [metastore/enable-sandboxes? (constantly false)] + (with-redefs [premium-features/enable-sandboxes? (constantly false)] (mt/with-temp* [Table [{table-id :id}] PermissionsGroup [{group-id :id}] Card [{card-id :id}]] - (is (re= #".*sandboxing is not enabled.*" - (mt/user-http-request :crowberto :post 403 "mt/gtap" - {:table_id table-id - :group_id group-id - :card_id card-id - :attribute_remappings {"foo" 1}})))))))) + (is (= "This API endpoint is only enabled if you have a premium token with the :sandboxes feature." + (mt/user-http-request :crowberto :post 402 "mt/gtap" + {:table_id table-id + :group_id group-id + :card_id card-id + :attribute_remappings {"foo" 1}})))))))) (deftest create-gtap-test (testing "POST /api/mt/gtap" (mt/with-temp* [Table [{table-id :id}] @@ -120,7 +117,7 @@ (mt/with-temp* [Table [{table-id :id}] PermissionsGroup [{group-id :id}] Card [{card-id :id}]] - (with-sandboxes-enabled + (premium-features-test/with-premium-features #{:sandboxes} (testing "Test that we can update only the attribute remappings for a GTAP" (mt/with-temp GroupTableAccessPolicy [{gtap-id :id} {:table_id table-id :group_id group-id diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj index 2c6c4386a1c4..e3bdeed9a248 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj @@ -3,7 +3,6 @@ [clojure.java.io :as io] [clojure.test :refer :all] [medley.core :as m] - [metabase-enterprise.sandbox.test-util :as mt.tu] [metabase.email.messages :as messages] [metabase.models :refer [Card Pulse PulseCard PulseChannel PulseChannelRecipient]] [metabase.models.pulse :as models.pulse] @@ -41,7 +40,7 @@ :user_id (mt/user->id :rasta)}]] (mt/with-temporary-setting-values [email-from-address "metamailman@metabase.com"] (mt/with-fake-inbox - (with-redefs [messages/render-pulse-email (fn [_ _ [{:keys [result]}]] + (with-redefs [messages/render-pulse-email (fn [_ _ _ [{:keys [result]}]] [{:result result}])] (mt/with-test-user nil (pulse/send-pulse! pulse))) @@ -118,7 +117,7 @@ :enabled :true :recipients [{:id (mt/user->id :rasta) :email "rasta@metabase.com"}]}]}) - (let [[{html :content} {attachment :content}] (get-in @mt/inbox ["rasta@metabase.com" 0 :body])] + (let [[{html :content} {_icon :content} {attachment :content}] (get-in @mt/inbox ["rasta@metabase.com" 0 :body])] (testing "email" (is (= 22 (html->row-count html)))) @@ -146,7 +145,7 @@ (mt/with-test-user nil (pulse/send-pulse! (models.pulse/retrieve-pulse pulse-id))) (let [email-results @mt/inbox - [{html :content} {attachment :content}] (get-in email-results ["rasta@metabase.com" 0 :body])] + [{html :content} {_icon :attachment} {attachment :content}] (get-in email-results ["rasta@metabase.com" 0 :body])] (testing "email" (is (= 22 (html->row-count html)))) diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj index e37270e8b9cb..73393962db62 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj @@ -107,7 +107,7 @@ {:query (mt/native-query {:query (format-honeysql - {:select [(identifier :venues :name)] + {:select [(identifier :venues :id) (identifier :venues :name)] :from [(identifier :venues)] :order-by [(identifier :venues :id)]})})}) @@ -271,10 +271,11 @@ (run-venues-count-query))))) (testing "Make sure that you can still use a SQL-based GTAP without needing to have SQL read perms for the Database" - (is (= [["Red Medicine"] ["Stout Burgers & Beers"]] - (mt/rows + (is (= [[1 "Red Medicine"] + [2 "Stout Burgers & Beers"]] + (mt/formatted-rows [int str] (mt/with-gtaps {:gtaps {:venues (venue-names-native-gtap-def)}} - (mt/run-mbql-query venues {:limit 2})))))) + (mt/run-mbql-query venues {:limit 2, :order-by [[:asc [:field (mt/id :venues :id)]]]})))))) (testing (str "When no card_id is included in the GTAP, should default to a query against the table, with the GTAP " "criteria applied") @@ -294,7 +295,7 @@ (mt/with-temp* [Collection [collection] Card [card {:collection_id (u/the-id collection)}]] (mt/with-group [group] - (perms/revoke-permissions! (perms-group/all-users) (mt/id)) + (perms/revoke-data-perms! (perms-group/all-users) (mt/id)) (perms/grant-collection-read-permissions! group collection) (mt/with-test-user :rasta (binding [qp.perms/*card-id* (u/the-id card)] @@ -327,14 +328,16 @@ (defn- row-level-restrictions-fk-drivers "Drivers to test row-level restrictions against foreign keys with. Includes BigQuery, which for whatever reason does - not normally have FK tests ran for it." + not normally have FK tests ran for it. Excludes Presto JDBC, because that driver does NOT support fetching foreign + keys from the JDBC metadata, even though we enable the feature in the UI." [] (cond-> (mt/normal-drivers-with-feature :nested-queries :foreign-keys) - (@tx.env/test-drivers :bigquery) (conj :bigquery))) + (@tx.env/test-drivers :bigquery) (conj :bigquery) + true (disj :presto-jdbc))) (deftest e2e-fks-test (mt/test-drivers (row-level-restrictions-fk-drivers) - (mt/with-bigquery-fks + (mt/with-bigquery-fks :bigquery (testing (str "1 - Creates a GTAP filtering question, looking for any checkins happening on or after 2014\n" "2 - Apply the `user` attribute, looking for only our user (i.e. `user_id` = 5)\n" "3 - Checkins are related to Venues, query for checkins, grouping by the Venue's price\n" @@ -674,7 +677,7 @@ {:gtaps {:reviews {:remappings {"user_id" [:dimension $product_id]}}} :attributes {"user_id" 1}}) ;; grant full data perms for products - (perms/grant-permissions! (perms-group/all-users) (perms/object-path + (perms/grant-permissions! (perms-group/all-users) (perms/data-perms-path (mt/id) (db/select-one-field :schema Table :id (mt/id :products)) (mt/id :products))) @@ -798,7 +801,7 @@ {:gtaps {:orders {:remappings {:user_id [:dimension $orders.user_id]}}} :attributes {:user_id "1"}}) ;; make sure the sandboxed group can still access the Products table, which is referenced below. - (perms/grant-permissions! &group (perms/object-path (mt/id) "PUBLIC" (mt/id :products))) + (perms/grant-permissions! &group (perms/data-perms-path (mt/id) "PUBLIC" (mt/id :products))) (letfn [(do-tests [] ;; create a query based on the sandboxed Table (testing "should be able to run the query. Results should come back with correct metadata" @@ -859,10 +862,6 @@ (is (seq metadata)) (db/update! Card card-id :result_metadata metadata))) -(defn- unset-query-metadata-for-gtap-card! [group table-name] - (let [card-id (db/select-one-field :card_id GroupTableAccessPolicy :group_id (u/the-id group), :table_id (mt/id table-name))] - (db/update! Card card-id :result_metadata nil))) - (deftest native-fk-remapping-test (testing "FK remapping should still work for questions with native sandboxes (EE #520)" (mt/dataset sample-dataset @@ -914,8 +913,11 @@ (mt/rows (mt/run-mbql-query orders {:limit 1}))))))))))))) (deftest pivot-query-test - ;; sample-dataset doesn't work on Redshift yet -- see #14784 - (mt/test-drivers (disj (mt/normal-drivers-with-feature :foreign-keys :nested-queries :left-join) :redshift) + (mt/test-drivers (disj + (mt/normal-drivers-with-feature :foreign-keys :nested-queries :left-join) + ;; this test relies on a FK relation between $product_id->products.category, so skip for Presto + ;; JDBC, because that driver doesn't support resolving FKs from the JDBC metadata + :presto-jdbc) (testing "Pivot table queries should work with sandboxed users (#14969)" (mt/dataset sample-dataset (mt/with-gtaps {:gtaps (mt/$ids @@ -929,7 +931,7 @@ [nil "Widget" 1 498.59] ["Twitter" nil 2 900.1] [nil nil 3 900.1]] - (sort-by (let [nil-first? (mt/sorts-nil-first? driver/*driver*) + (sort-by (let [nil-first? (mt/sorts-nil-first? driver/*driver* :type/Text) sort-str (fn [s] (cond (some? s) s diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj b/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj index f95485ea8eff..3363b1606ab2 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj @@ -6,7 +6,7 @@ [metabase.models.permissions-group :as perms-group] [metabase.models.table :refer [Table]] [metabase.models.user :refer [User]] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.server.middleware.session :as mw.session] [metabase.test.data :as data] [metabase.test.data.impl :as data.impl] @@ -62,13 +62,13 @@ (defn do-with-gtaps-for-user [args-fn test-user-name-or-user-id f] (letfn [(thunk [] ;; remove perms for All Users group - (perms/revoke-permissions! (perms-group/all-users) (data/db)) + (perms/revoke-data-perms! (perms-group/all-users) (data/db)) ;; create new perms group (users/with-group-for-user [group test-user-name-or-user-id] (let [{:keys [gtaps attributes]} (s/validate WithGTAPsArgs (args-fn))] ;; set user login_attributes (with-user-attributes test-user-name-or-user-id attributes - (metastore-test/with-metastore-token-features #{:sandboxes} + (premium-features-test/with-premium-features #{:sandboxes} ;; create Cards/GTAPs from defs (do-with-gtap-defs group gtaps (fn [] diff --git a/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj b/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj index f808b23312d2..6b38ee3b5467 100644 --- a/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj +++ b/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj @@ -4,6 +4,18 @@ [metabase-enterprise.search.scoring :as ee-scoring] [metabase.search.scoring :as scoring])) +(deftest verified-score-test + (let [score #'ee-scoring/verified-score + item (fn [id status] {:moderated_status status + :id id + :model "card"}) + score (fn [items] (into [] (map :id) (reverse (sort-by score items))))] + (testing "verification bumps result" + ;; stable sort all with score 0 and then reverse to get descending rather than ascending + (is (= [3 2 1] (score [(item 1 nil) (item 2 nil) (item 3 nil)]))) + ;; verified item is promoted + (is (= [1 3 2] (score [(item 1 "verified") (item 2 nil) (item 3 nil)])))))) + (deftest official-collection-tests (testing "it should bump up the value of items in official collections" ;; using the ee implementation that isn't wrapped by enable-enhancements? check @@ -36,4 +48,28 @@ "examples of custom expressions" "custom expression examples"] (map :name (sort-by ee-score [a b c - (assoc d :collection_authority_level "official")])))))))) + (assoc d :collection_authority_level "official")]))))))) + (testing "It should bump up the value of verified items" + (let [search-string "foo" + dashboard-count #(assoc % :dashboardcard_count 0) + ee-score (comp :score + (partial scoring/score-and-result ee-scoring/scoring-impl search-string) + dashboard-count) + os-score (comp :score + (partial scoring/score-and-result scoring/oss-score-impl search-string) + dashboard-count) + labeled-results {:a {:name "foobar" :model "card" :id :a} + :b {:name "foo foo" :model "card" :id :b} + :c {:name "foo foo foo" :model "card" :id :c}} + {:keys [a b c]} labeled-results] + (doseq [item [a b c]] + (is (> (ee-score (assoc item :moderated_status "verified")) (ee-score item)) + (str "Item not greater for model: " (:model item)))) + (let [items (shuffle [a b c])] + (is (= (sort-by os-score items) (sort-by ee-score items)))) + ;; a is sorted lowest here (sort-by is ascending) + (is (= [:a :c :b] (map :id (sort-by ee-score [a b c])))) + ;; a is verified and is now last or highest score + (is (= [:c :b :a] + (map :id + (sort-by ee-score [(assoc a :moderated_status "verified") b c]))))))) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj b/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj index 6befb6bf86d7..e3386243540a 100644 --- a/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj +++ b/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj @@ -2,15 +2,13 @@ (:refer-clojure :exclude [load]) (:require [clojure.data :as diff] [clojure.java.io :as io] + [clojure.string :as str] [clojure.test :refer [deftest is testing use-fixtures]] [metabase-enterprise.serialization.cmd :refer [dump load]] [metabase-enterprise.serialization.test-util :as ts] [metabase.models :refer [Card Collection Dashboard DashboardCard DashboardCardSeries Database Dependency Dimension Field FieldValues Metric NativeQuerySnippet Pulse PulseCard PulseChannel Segment Table User]] - [metabase.test.data.users :as test-users] - [metabase.util :as u] - [toucan.db :as db] [metabase.query-processor :as qp] [metabase.query-processor.middleware.permissions :as qp.perms] [metabase.query-processor.store :as qp.store] @@ -18,9 +16,11 @@ [metabase.shared.models.visualization-settings-test :as mb.viz-test] [metabase.shared.util.log :as log] [metabase.test :as mt] + [metabase.test.data.users :as test-users] [metabase.test.fixtures :as fixtures] - [metabase.util.i18n :refer [deferred-trs trs]] - [clojure.string :as str]) + [metabase.util :as u] + [metabase.util.i18n :refer [trs]] + [toucan.db :as db]) (:import org.apache.commons.io.FileUtils)) (use-fixtures :once @@ -140,7 +140,7 @@ card)) (defn- collection-parent-name [collection] - (let [[_ parent-id] (re-matches #".*/(\d+)/$" (:location collection))] + (let [[_ ^String parent-id] (re-matches #".*/(\d+)/$" (:location collection))] (db/select-one-field :name Collection :id (Integer. parent-id)))) (defmethod assert-loaded-entity (type Collection) @@ -155,8 +155,10 @@ (collection-parent-name collection))) "Deeply Nested Personal Collection" (is (= "Nested Personal Collection" (collection-parent-name collection))) - "Felicia's Personal Collection" (is false "Should not have loaded different user's PC") - "Felicia's Nested Collection" (is false "Should not have loaded different user's PC")) + "Felicia's Personal Collection" (is (nil? (:name collection)) + "Should not have loaded different user's PC") + "Felicia's Nested Collection" (is (nil? (:name collection)) + "Should not have loaded different user's PC")) collection) (defmethod assert-loaded-entity (type NativeQuerySnippet) @@ -200,12 +202,12 @@ ;; check that the linked :card_id matches the expected name for each in the series ;; based on the entities declared in test_util.clj (let [series-pos (:position series) - expected-name (case series-pos + expected-name (case (int series-pos) 0 "My Card" 1 "My Nested Card" 2 ts/root-card-name)] (is (= expected-name (db/select-one-field :name Card :id (:card_id series)))) - (case series-pos + (case (int series-pos) 1 (testing "Top level click action was preserved for dashboard card" (let [viz-settings (:visualization_settings dashcard) @@ -265,7 +267,7 @@ ;; in case it already exists (u/ignore-exceptions (delete-directory! dump-dir)) - (mt/test-drivers (-> (mt/normal-drivers-with-feature :basic-aggregations :binning :expressions) + (mt/test-drivers (-> (mt/normal-drivers-with-feature :basic-aggregations :binning :expressions :foreign-keys) ;; We will run this roundtrip test against any database supporting these features ^ except ;; certain ones for specific reasons, outlined below. ;; @@ -286,7 +288,10 @@ :sqlserver ; ORDER BY not allowed not allowed in derived tables (subselects) :vertica ; bare table name doesn't work; it's test_data_venues instead of venues :sqlite ; foreign-keys is not supported by this driver - :sparksql)) ; foreign-keys is not supported by this driver + :sparksql ; foreign-keys is not supported by this driver + ;; foreign-keys is not supported by the below driver even though it has joins + :bigquery-cloud-sdk + )) (let [fingerprint (ts/with-world (qp.store/fetch-and-store-database! db-id) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj b/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj index bb9afe31a785..32b5ed823316 100644 --- a/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj +++ b/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj @@ -3,8 +3,8 @@ [metabase-enterprise.serialization.names :as names] [metabase-enterprise.serialization.test-util :as ts] [metabase.models :refer [Card Collection Dashboard Database Field Metric NativeQuerySnippet Segment Table]] - [metabase.util :as u] - [metabase.test :as mt])) + [metabase.test :as mt] + [metabase.util :as u])) (deftest safe-name-test (are [s expected] (= (names/safe-name {:name s}) expected) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj b/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj index ec527afea2da..4a00d54bdbf1 100644 --- a/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj +++ b/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj @@ -8,8 +8,7 @@ [metabase.test :as mt] [metabase.test.data :as data] [toucan.db :as db] - [toucan.util.test :as tt] - [metabase-enterprise.serialization.names :refer [fully-qualified-name]])) + [toucan.util.test :as tt])) (def root-card-name "My Root Card \\ with a/nasty: (*) //n`me ' * ? \" < > | ŠĐž") (def temp-db-name "Fingerprint test-data copy") diff --git a/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj b/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj index 3ac45958159c..22845dcc4e3c 100644 --- a/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj @@ -9,7 +9,7 @@ [metabase.models.permissions-group :as group :refer [PermissionsGroup]] [metabase.models.permissions-group-membership :refer [PermissionsGroupMembership]] [metabase.models.user :refer [User]] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt] [metabase.test.fixtures :as fixtures] [metabase.util :as u] @@ -32,23 +32,23 @@ (deftest sso-prereqs-test (testing "SSO requests fail if SAML hasn't been enabled" (mt/with-temporary-setting-values [jwt-enabled false] - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (is (= "SSO has not been enabled and/or configured" (saml-test/client :get 400 "/auth/sso")))) - (testing "SSO requests fail if they don't have a valid metastore token" - (metastore-test/with-metastore-token-features nil + (testing "SSO requests fail if they don't have a valid premium-features token" + (premium-features-test/with-premium-features nil (is (= "SSO requires a valid token" (saml-test/client :get 403 "/auth/sso"))))))) (testing "SSO requests fail if SAML is enabled but hasn't been configured" - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (mt/with-temporary-setting-values [jwt-enabled true] (is (= "JWT SSO has not been enabled and/or configured" (saml-test/client :get 400 "/auth/sso")))))) (testing "The IdP provider certificate must also be included for SSO to be configured" - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (mt/with-temporary-setting-values [jwt-enabled true jwt-identity-provider-uri default-idp-uri] (is (= "JWT SSO has not been enabled and/or configured" @@ -63,7 +63,7 @@ (defmacro ^:private with-jwt-default-setup [& body] `(disable-other-sso-types (fn [] - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (saml-test/call-with-login-attributes-cleared! (fn [] (call-with-default-jwt-config diff --git a/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj b/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj index 8b492a9eb330..b03efd7aef9c 100644 --- a/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj @@ -9,7 +9,7 @@ [metabase.models.permissions-group-membership :refer [PermissionsGroupMembership]] [metabase.models.user :refer [User]] [metabase.public-settings :as public-settings] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.server.middleware.session :as mw.session] [metabase.test :as mt] [metabase.test.fixtures :as fixtures] @@ -32,11 +32,11 @@ (use-fixtures :each disable-other-sso-types) -(defmacro with-valid-metastore-token - "Stubs the `metastore/enable-sso?` function to simulate a valid token. This needs to be included to test any of the +(defmacro with-valid-premium-features-token + "Stubs the `premium-features/enable-sso?` function to simulate a valid token. This needs to be included to test any of the SSO features" [& body] - `(metastore-test/with-metastore-token-features #{:sso} + `(premium-features-test/with-premium-features #{:sso} ~@body)) (defn client @@ -107,26 +107,26 @@ g9oYBkdxlhK9zZvkjCgaLCen+0aY67A=") (testing "make sure our test certificate is actually valid" (is (some? (#'sso-settings/validate-saml-idp-cert default-idp-cert))))) -(deftest require-valid-metastore-token-test - (testing "SSO requests fail if they don't have a valid metastore token" - (metastore-test/with-metastore-token-features #{} +(deftest require-valid-premium-features-token-test + (testing "SSO requests fail if they don't have a valid premium-features token" + (premium-features-test/with-premium-features #{} (is (= "SSO requires a valid token" (client :get 403 "/auth/sso")))))) (deftest require-saml-enabled-test (testing "SSO requests fail if SAML hasn't been enabled" - (with-valid-metastore-token + (with-valid-premium-features-token (mt/with-temporary-setting-values [saml-enabled false] (is (some? (client :get 400 "/auth/sso")))))) (testing "SSO requests fail if SAML is enabled but hasn't been configured" - (with-valid-metastore-token + (with-valid-premium-features-token (mt/with-temporary-setting-values [saml-enabled true saml-identity-provider-uri nil] (is (some? (client :get 400 "/auth/sso")))))) (testing "The IDP provider certificate must also be included for SSO to be configured" - (with-valid-metastore-token + (with-valid-premium-features-token (mt/with-temporary-setting-values [saml-enabled true saml-identity-provider-uri default-idp-uri saml-identity-provider-certificate nil] @@ -148,7 +148,7 @@ g9oYBkdxlhK9zZvkjCgaLCen+0aY67A=") (u/ignore-exceptions (db/update-where! User {} :login_attributes nil))))) (defmacro ^:private with-saml-default-setup [& body] - `(with-valid-metastore-token + `(with-valid-premium-features-token (call-with-login-attributes-cleared! (fn [] (call-with-default-saml-config diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_config/index.js b/enterprise/frontend/src/metabase-enterprise/advanced_config/index.js new file mode 100644 index 000000000000..bad7e1d117b3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_config/index.js @@ -0,0 +1,23 @@ +import { t } from "ttag"; +import _ from "underscore"; +import { updateIn } from "icepick"; +import { PLUGIN_ADMIN_SETTINGS_UPDATES } from "metabase/plugins"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; + +if (hasPremiumFeature("advanced_config")) { + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => + updateIn(sections, ["general", "settings"], settings => { + const index = settings.findIndex(({ key }) => key === "admin-email"); + + return [ + ..._.head(settings, index + 1), + { + key: "subscription-allowed-domains", + display_name: t`Approved domains for notifications`, + type: "string", + }, + ..._.tail(settings, index + 1), + ]; + }), + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.jsx b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.jsx new file mode 100644 index 000000000000..e9661f5cdbe5 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.jsx @@ -0,0 +1,66 @@ +import React from "react"; +import { t, jt } from "ttag"; + +import { color } from "metabase/lib/colors"; +import MetabaseSettings from "metabase/lib/settings"; + +import { + PermissionIcon, + DataPermissionsHelpRoot, + DataPermissionsHelpFooter, + DataPermissionsHelpContent, + DataPermissionsHelpLink, + DataPermissionsHelpLinkIcon, +} from "./DataPermissionsHelp.styled"; + +export const DataPermissionsHelp = () => ( + + +

{t`About data permissions`}

+

{t`Each of your user groups can have a level of access for each of your databases on the tables they contain.`}

+

{jt`Users can be members of multiple groups, and are given the ${( + {t`most permissive`} + )} level of access for a database or table across all the groups they’re a member of.`}

+

{t`Unless a user group's access for a given database is set to “block", they’ll be able to view any saved question based on that data if they have access to the collection it’s saved in.`}

+

{t`Access levels`}

+ +

+ + {t`Unrestricted access`} +

+

{t`Users can use the visual query builder to ask questions based on all tables in this database. A user group must have Unrestricted access for a database if you want to give them access to the SQL/native query editor.`}

+ +

+ + {t`Granular access`} +

+

{t`Restrict user access to specific tables in a database. When you select this option, you’ll be taken to the table-level view of that database to set the access level for each table.`}

+ +

+ + {t`No self-service access`} +

+

{t`Prevent users from creating new ad hoc queries or questions based on this data, or from seeing this data in the Browse Data screen. Users with this level of access can still see saved questions and charts based on this data in Collections they have access to.`}

+ +

+ + {t`Block`} +

+

{t`Ensure users can’t ever see the data from this database regardless of their permissions at the Collection level. Keep in mind that if a user belongs to another group that does have data access, that setting will take precedence, and the user's access will not be blocked.`}

+ +

{t`Only available in certain Metabase plans.`}

+
+ + + + + {t`Learn more about permissions`} + + +
+); diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.styled.jsx b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.styled.jsx new file mode 100644 index 000000000000..c6fcb81d37a6 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.styled.jsx @@ -0,0 +1,71 @@ +import styled from "styled-components"; +import Icon from "metabase/components/Icon"; +import { color, lighten } from "metabase/lib/colors"; +import ExternalLink from "metabase/components/ExternalLink"; + +export const DataPermissionsHelpRoot = styled.div` + h2 { + margin-top: 2rem; + margin-bottom: 1rem; + font-size: 18px; + line-height: 20px; + + &:first-of-type { + margin-top: 8px; + } + } + + h3 { + margin-top: 1.5rem; + font-size: 14px; + line-height: 20px; + } + + h2 + h3 { + margin-top: 1rem; + } + + p { + font-size: 13px; + line-height: 18px; + margin: 0.5rem 0; + } +`; + +export const PermissionIcon = styled(Icon).attrs({ size: 16 })` + padding-right: 0.375rem; + vertical-align: text-bottom; + color: ${props => color(props.color)}; +`; + +export const DataPermissionsHelpContent = styled.div` + padding: 1rem 2rem; +`; + +export const DataPermissionsHelpFooter = styled.footer` + padding: 2rem; + border-top: 1px solid ${color("border")}; +`; + +export const DataPermissionsHelpLink = styled(ExternalLink)` + display: flex; + align-items: center; + padding: 16px 24px; + font-size: 14px; + font-weight: 700; + line-height: 20px; + color: ${color("text-dark")}; + border: 1px solid ${color("border")}; + border-radius: 8px; + transition: all 200ms; + + &:hover { + border-color: ${color("brand")}; + background-color: ${lighten("brand", 0.6)}; + } +`; + +export const DataPermissionsHelpLinkIcon = styled(Icon)` + color: ${color("text-light")}; + margin-right: 1rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_permissions/index.js b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/index.js new file mode 100644 index 000000000000..2e5daf30cdae --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/index.js @@ -0,0 +1,30 @@ +import { t } from "ttag"; + +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import { DataPermissionsHelp } from "metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp"; +import { PLUGIN_ADVANCED_PERMISSIONS } from "metabase/plugins"; + +const BLOCK_PERMISSION_OPTION = { + label: t`Block`, + value: "block", + icon: "close", + iconColor: "danger", +}; + +if (hasPremiumFeature("advanced_permissions")) { + PLUGIN_ADVANCED_PERMISSIONS.DataPermissionsHelp = DataPermissionsHelp; + + const addBlockPermissionWhenSelected = (options, value) => + value === BLOCK_PERMISSION_OPTION.value + ? [...options, BLOCK_PERMISSION_OPTION] + : options; + + PLUGIN_ADVANCED_PERMISSIONS.addTablePermissionOptions = addBlockPermissionWhenSelected; + PLUGIN_ADVANCED_PERMISSIONS.addSchemaPermissionOptions = addBlockPermissionWhenSelected; + PLUGIN_ADVANCED_PERMISSIONS.addDatabasePermissionOptions = options => [ + ...options, + BLOCK_PERMISSION_OPTION, + ]; + PLUGIN_ADVANCED_PERMISSIONS.isBlockPermission = value => + value === BLOCK_PERMISSION_OPTION.value; +} diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx index d7beac5ec3a1..dfce219eb80a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx @@ -3,19 +3,30 @@ import React from "react"; import Radio from "metabase/components/Radio"; +import { + AuditContentRoot, + AuditContentHeading, + AuditContentTabs, + AuditContentData, +} from "./AuditContent.styled"; + export default class AuditContent extends React.Component { render() { const { title, subtitle, tabs, children, location, ...props } = this.props; // HACK: remove the last component to get the base page path. won't work with tabs using IndexRoute (IndexRedirect ok) const pagePath = location && location.pathname.replace(/\/\w+$/, ""); + + const hasHeading = title || subtitle; return ( -
-
-

{title}

- {subtitle &&
{subtitle}
} -
+ + {hasHeading && ( + + {title &&

{title}

} + {subtitle &&
{subtitle}
} +
+ )} {tabs && ( -
+ tab.path} onChange={this.props.router.push} /> -
+ )} -
+ {/* This allows the parent component to inject props into child route components, e.x. userId */} {React.Children.count(children) === 1 && // NOTE: workaround for https://github.com/facebook/react/issues/12136 !Array.isArray(children) ? React.cloneElement(React.Children.only(children), props) : children} -
-
+ + ); } } diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.styled.jsx new file mode 100644 index 000000000000..2c78eb91b0e3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.styled.jsx @@ -0,0 +1,23 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; + +export const AuditContentRoot = styled.div` + flex: 1 0 auto; + flex-direction: column; + padding-bottom: 2rem; +`; + +export const AuditContentHeading = styled.div` + padding: 2rem 2rem 0 2rem; +`; + +export const AuditContentTabs = styled.div` + border-bottom: 1px solid ${color("border")}; + padding: 0 2rem; + margin-top: 0.5rem; +`; + +export const AuditContentData = styled.div` + height: 100%; + padding: 0 2rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.jsx new file mode 100644 index 000000000000..f4a6503e2b04 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.jsx @@ -0,0 +1,95 @@ +import React, { useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import { formatChannels } from "metabase/lib/notifications"; +import Button from "metabase/components/Button"; +import CheckBox from "metabase/components/CheckBox"; +import FormMessage from "metabase/components/form/FormMessage"; +import ModalContent from "metabase/components/ModalContent"; +import { CheckboxLabel } from "./AuditNotificationDeleteModal.styled"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + onDelete: PropTypes.func, + onClose: PropTypes.func, +}; + +const AuditNotificationDeleteModal = ({ item, type, onDelete, onClose }) => { + const [error, setError] = useState(); + const [checked, setChecked] = useState(false); + + const handleDeleteClick = async () => { + try { + await onDelete(item, true); + onClose(true); + } catch (error) { + setError(error); + } + }; + + const handleCheckedChange = event => { + setChecked(event.target.checked); + }; + + const handleClose = () => onClose(true); + + const modalFooter = [ + error ? : null, + , + , + ]; + const checkboxLabel = ( + {getChannelMessage(item, type)} + ); + + return ( + + + + ); +}; + +AuditNotificationDeleteModal.propTypes = propTypes; + +const getTitleMessage = (item, type) => { + switch (type) { + case "alert": + return t`Delete this alert?`; + case "pulse": + return t`Delete this subscription to ${item.name}?`; + } +}; + +const getChannelMessage = (item, type) => { + const channelMessage = formatChannels(item.channels); + + switch (type) { + case "alert": + return t`This alert will no longer be ${channelMessage}.`; + case "pulse": + return t`This dashboard will no longer be ${channelMessage}.`; + } +}; + +export default AuditNotificationDeleteModal; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.styled.jsx new file mode 100644 index 000000000000..d87da339081a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.styled.jsx @@ -0,0 +1,8 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import CheckBox from "metabase/components/CheckBox"; + +export const CheckboxLabel = styled(CheckBox.Label)` + color: ${color("danger")}; + font-size: 1.12em; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/index.js new file mode 100644 index 000000000000..512ebf74457c --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditNotificationDeleteModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.jsx new file mode 100644 index 000000000000..e53fc697a4c4 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.jsx @@ -0,0 +1,107 @@ +import React, { useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Button from "metabase/components/Button"; +import FormMessage from "metabase/components/form/FormMessage"; +import ModalContent from "metabase/components/ModalContent"; +import UserPicker from "metabase/components/UserPicker"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + users: PropTypes.array.isRequired, + onUpdate: PropTypes.func, + onDelete: PropTypes.func, + onClose: PropTypes.func, +}; + +const AuditNotificationEditModal = ({ + item, + type, + users, + onUpdate, + onDelete, + onClose, +}) => { + const [channels, setChannels] = useState(item.channels); + const [error, setError] = useState(); + const hasRecipients = channels.some(c => c.recipients.length > 0); + + const handleRecipientsChange = (recipients, index) => { + const newChannels = [...channels]; + newChannels[index] = { ...channels[index], recipients }; + setChannels(newChannels); + }; + + const handleUpdateClick = async () => { + try { + await onUpdate(item, channels); + onClose(true); + } catch (error) { + setError(error); + } + }; + + const handleDeleteClick = () => { + onDelete(item); + }; + + const handleClose = () => onClose(true); + + const modalFooter = [ + error ? : null, + , + , + , + ]; + + return ( + + {channels.map((channel, index) => ( + handleRecipientsChange(recipients, index)} + /> + ))} + + ); +}; + +AuditNotificationEditModal.propTypes = propTypes; + +const getTitleMessage = (item, type) => { + switch (type) { + case "alert": + return t`${item.card.name} alert recipients`; + case "pulse": + return t`${item.name} recipients`; + } +}; + +const getDeleteMessage = type => { + switch (type) { + case "alert": + return t`Delete this alert`; + case "pulse": + return t`Delete this subscription`; + } +}; + +export default AuditNotificationEditModal; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.styled.jsx new file mode 100644 index 000000000000..d1e323f6d074 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.styled.jsx @@ -0,0 +1,11 @@ +import styled from "styled-components"; +import Button from "metabase/components/Button"; +import { space } from "metabase/styled-components/theme"; + +export const ModalButton = styled(Button)` + margin-right: ${({ fullwidth }) => (fullwidth ? "auto" : "")}; + + &:not(:first-child) { + margin-left: ${space(2)}; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/index.js new file mode 100644 index 000000000000..ea5d8983572e --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditNotificationEditModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx index 101a28c09192..8ae3681ac29a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx @@ -1,7 +1,10 @@ import React from "react"; import PropTypes from "prop-types"; +import Button from "metabase/components/Button"; + import _ from "underscore"; +import { AuditParametersInput } from "./AuditParameters.styled"; const DEBOUNCE_PERIOD = 300; @@ -12,7 +15,15 @@ const propTypes = { placeholder: PropTypes.string.isRequired, }), ), + buttons: PropTypes.arrayOf( + PropTypes.shape({ + key: PropTypes.string.isRequired, + onClick: PropTypes.func.isRequired, + label: PropTypes.string.isRequired, + }), + ), children: PropTypes.func, + hasResults: PropTypes.bool, }; export default class AuditParameters extends React.Component { @@ -24,37 +35,55 @@ export default class AuditParameters extends React.Component { }; } - changeValue = (key: string, value: string) => { + changeValue = (key, value) => { this.setState({ inputValues: { ...this.state.inputValues, [key]: value }, }); this.commitValueDebounced(key, value); }; - commitValueDebounced = _.debounce((key: string, value: string) => { + commitValueDebounced = _.debounce((key, value) => { this.setState({ committedValues: { ...this.state.committedValues, [key]: value }, }); }, DEBOUNCE_PERIOD); render() { - const { parameters, children } = this.props; + const { parameters, children, buttons, hasResults } = this.props; const { inputValues, committedValues } = this.state; + + const isEmpty = + hasResults === false && + inputValues && + Object.values(inputValues).every(v => v === ""); + return (
- {parameters.map(({ key, placeholder }) => ( - ( + { - this.changeValue(key, e.target.value); + disabled={isEmpty || disabled} + onChange={value => { + this.changeValue(key, value); }} + icon={icon} /> ))} + {buttons?.map(({ key, label, disabled, onClick }) => ( + + ))}
{children && children(committedValues)}
diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.styled.jsx new file mode 100644 index 000000000000..5186c316d8d1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.styled.jsx @@ -0,0 +1,12 @@ +import styled from "styled-components"; + +import TextInput from "metabase/components/TextInput"; + +export const AuditParametersInput = styled(TextInput)` + display: inline-flex; + width: 240px; + + & + & { + margin-left: 1rem; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx index 1d6f1556ca9d..c5169e8b50c1 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx @@ -1,5 +1,6 @@ /* eslint-disable react/prop-types */ import React from "react"; +import { t } from "ttag"; import { IndexLink } from "react-router"; import Link from "metabase/components/Link"; @@ -54,21 +55,22 @@ const AuditSidebar = ({ className, style, children }: Props) => ( const AuditAppSidebar = (props: Props) => ( - {/* - - */} - - + + - - - - + + + + - - - - + + + + + ); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.jsx new file mode 100644 index 000000000000..dd7c5cde4383 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.jsx @@ -0,0 +1,53 @@ +import React, { useCallback, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import FormMessage from "metabase/components/form/FormMessage"; +import { ModalMessage } from "./UnsubscribeUserForm.styled"; + +const propTypes = { + user: PropTypes.object.isRequired, + onUnsubscribe: PropTypes.func, + onClose: PropTypes.func, +}; + +const UnsubscribeUserForm = ({ user, onUnsubscribe, onClose }) => { + const [error, setError] = useState(); + + const handleConfirmClick = useCallback(async () => { + try { + await onUnsubscribe(user); + onClose(); + } catch (error) { + setError(error); + } + }, [user, onUnsubscribe, onClose]); + + return ( + : null, + , + , + ]} + onClose={onClose} + > + + {t`This will delete any dashboard subscriptions or alerts ${user.common_name} has created, and remove them as a recipient from any other subscriptions or alerts.`} + + + {t`This does not affect email distribution lists that are managed outside of Metabase.`} + + + ); +}; + +UnsubscribeUserForm.propTypes = propTypes; + +export default UnsubscribeUserForm; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.styled.jsx new file mode 100644 index 000000000000..748448f0e643 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; + +export const ModalMessage = styled.div` + &:not(:last-child) { + margin-bottom: 1rem; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.unit.spec.js new file mode 100644 index 000000000000..1844a95fe400 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.unit.spec.js @@ -0,0 +1,54 @@ +import React from "react"; +import { render, screen, waitFor } from "@testing-library/react"; +import UnsubscribeUserForm from "./UnsubscribeUserForm"; + +const getUser = () => ({ + id: 1, + common_name: "John Doe", +}); + +describe("UnsubscribeUserForm", () => { + it("should close on successful submit", () => { + const user = getUser(); + const onUnsubscribe = jest.fn().mockResolvedValue(); + const onClose = jest.fn(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalled(); + expect(onClose).toHaveBeenCalled(); + }); + }); + + it("should display a message on submit failure", () => { + const user = getUser(); + const error = { data: { message: "error" } }; + const onUnsubscribe = jest.fn().mockRejectedValue(); + const onClose = jest.fn(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalled(); + expect(onClose).not.toHaveBeenCalled(); + expect(screen.getByText(error.data.message)).toBeInTheDocument(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/index.js new file mode 100644 index 000000000000..2d548055fc84 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeUserForm"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/AuditAlertDeleteModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/AuditAlertDeleteModal.jsx new file mode 100644 index 000000000000..0bb4328aff24 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/AuditAlertDeleteModal.jsx @@ -0,0 +1,23 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import AuditNotificationDeleteModal from "../../components/AuditNotificationDeleteModal"; + +const mapStateToProps = (state, { alert }) => ({ + item: alert, + type: "alert", +}); + +const mapDispatchToProps = { + onDelete: alert => Alerts.actions.setArchived(alert, true), +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => Number.parseInt(props.params.alertId), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationDeleteModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/index.js new file mode 100644 index 000000000000..234e762b8d3a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditAlertDeleteModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/AuditAlertEditModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/AuditAlertEditModal.jsx new file mode 100644 index 000000000000..11bf8424aa7b --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/AuditAlertEditModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import Users from "metabase/entities/users"; +import AuditNotificationEditModal from "../../components/AuditNotificationEditModal"; + +const mapStateToProps = (state, { alert }) => ({ + item: alert, + type: "alert", +}); + +const mapDispatchToProps = { + onUpdate: (alert, channels) => Alerts.actions.setChannels(alert, channels), + onDelete: alert => + push(`/admin/audit/subscriptions/alerts/${alert.id}/delete`), +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => Number.parseInt(props.params.alertId), + }), + Users.loadList(), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationEditModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/index.js new file mode 100644 index 000000000000..0e165a7bf2dd --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditAlertEditModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/AuditAlertTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/AuditAlertTable.jsx new file mode 100644 index 000000000000..4da7ab4b7fa4 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/AuditAlertTable.jsx @@ -0,0 +1,22 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { t } from "ttag"; +import { AuditEntitiesTable } from "../AuditEntitiesTable"; +import * as AlertCards from "../../lib/cards/alerts"; + +const mapStateToProps = (state, props) => ({ + table: AlertCards.table(), + placeholder: t`Filter by question name`, + getExtraDataForClick: () => ({ type: "alert" }), + entities: state.entities.alerts, +}); + +const mapDispatchToProps = { + onRemoveRow: ({ pulse_id }) => + push(`/admin/audit/subscriptions/alerts/${pulse_id}/delete`), +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(AuditEntitiesTable); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/index.js new file mode 100644 index 000000000000..93ec06ea0a13 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/index.js @@ -0,0 +1 @@ +export { default } from "./AuditAlertTable"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx index d222bfd13eeb..e3c342828afe 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx @@ -9,7 +9,7 @@ import DashboardData from "metabase/dashboard/hoc/DashboardData"; const DashboardWithData = DashboardData(Dashboard); -import { AuditMode } from "../lib/util"; +import { AuditMode } from "../lib/mode"; import type { AuditCard } from "../types"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/AuditEntitiesTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/AuditEntitiesTable.jsx new file mode 100644 index 000000000000..467ca81329cc --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/AuditEntitiesTable.jsx @@ -0,0 +1,22 @@ +import React from "react"; +import PropTypes from "prop-types"; +import _ from "underscore"; + +import AuditTableWithSearch from "../AuditTableWithSearch"; +import { usePrevious } from "metabase/hooks/use-previous"; + +const propTypes = { + entities: PropTypes.array, +}; + +export const AuditEntitiesTable = ({ entities, ...rest }) => { + const previousEntities = usePrevious(entities); + + const shouldReload = + previousEntities?.length === entities?.length && + !_.isEqual(previousEntities, entities); + + return ; +}; + +AuditEntitiesTable.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/index.js new file mode 100644 index 000000000000..c125725c068e --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/index.js @@ -0,0 +1 @@ +export * from "./AuditEntitiesTable"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/AuditSubscriptionDeleteModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/AuditSubscriptionDeleteModal.jsx new file mode 100644 index 000000000000..d5eee26ba500 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/AuditSubscriptionDeleteModal.jsx @@ -0,0 +1,23 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import AuditNotificationDeleteModal from "../../components/AuditNotificationDeleteModal"; + +const mapStateToProps = (state, { pulse }) => ({ + item: pulse, + type: "alert", +}); + +const mapDispatchToProps = { + onDelete: pulse => Pulses.actions.setArchived(pulse, true), +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => Number.parseInt(props.params.pulseId), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationDeleteModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/index.js new file mode 100644 index 000000000000..48113ab0e9eb --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptionDeleteModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/AuditSubscriptionEditModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/AuditSubscriptionEditModal.jsx new file mode 100644 index 000000000000..9598a0963c86 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/AuditSubscriptionEditModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import Users from "metabase/entities/users"; +import AuditNotificationEditModal from "../../components/AuditNotificationEditModal"; + +const mapStateToProps = (state, { pulse }) => ({ + item: pulse, + type: "pulse", +}); + +const mapDispatchToProps = { + onUpdate: (pulse, channels) => Pulses.actions.setChannels(pulse, channels), + onDelete: alert => + push(`/admin/audit/subscriptions/subscriptions/${alert.id}/delete`), +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => Number.parseInt(props.params.pulseId), + }), + Users.loadList(), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationEditModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/index.js new file mode 100644 index 000000000000..995c595d11ef --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptionEditModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/AuditSubscriptionTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/AuditSubscriptionTable.jsx new file mode 100644 index 000000000000..132b5d5e7040 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/AuditSubscriptionTable.jsx @@ -0,0 +1,22 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { t } from "ttag"; +import * as SubscriptionCards from "../../lib/cards/subscriptions"; +import { AuditEntitiesTable } from "../AuditEntitiesTable"; + +const mapStateToProps = (state, props) => ({ + table: SubscriptionCards.table(), + placeholder: t`Filter by dashboard name`, + getExtraDataForClick: () => ({ type: "subscription" }), + entities: state.entities.pulses, +}); + +const mapDispatchToProps = { + onRemoveRow: ({ pulse_id }) => + push(`/admin/audit/subscriptions/subscriptions/${pulse_id}/delete`), +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(AuditEntitiesTable); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/index.js new file mode 100644 index 000000000000..e0162dadb3b0 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptionTable"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx index 1c8f744d1755..4312d39cc120 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx @@ -12,37 +12,49 @@ import Question from "metabase-lib/lib/Question"; import { getMetadata } from "metabase/selectors/metadata"; import { usePagination } from "metabase/hooks/use-pagination"; -import { AuditMode } from "../lib/util"; +import { AuditMode } from "../lib/mode"; import QuestionLoadAndDisplay from "./QuestionLoadAndDisplay"; import "./AuditTableVisualization"; +import { PaginationControlsContainer } from "./AuditTable.styled"; const mapStateToProps = state => ({ metadata: getMetadata(state), }); -const mapDispatchToProps = { - onChangeLocation: push, -}; - const DEFAULT_PAGE_SIZE = 100; AuditTable.propTypes = { metadata: PropTypes.object.isRequired, table: PropTypes.object.isRequired, - onChangeLocation: PropTypes.func.isRequired, pageSize: PropTypes.number.isRequired, + reload: PropTypes.bool, + children: PropTypes.node, + dispatch: PropTypes.func.isRequired, + onLoad: PropTypes.func, + mode: PropTypes.shape({ + name: PropTypes.string.isRequired, + drills: PropTypes.func.isRequired, + }), }; function AuditTable({ metadata, table, - onChangeLocation, pageSize = DEFAULT_PAGE_SIZE, + mode = AuditMode, + children, + dispatch, + onLoad, ...rest }) { const [loadedCount, setLoadedCount] = useState(0); const { handleNextPage, handlePreviousPage, page } = usePagination(); + const handleOnLoad = results => { + setLoadedCount(results[0].row_count); + onLoad(results); + }; + const card = chain(table.card) .assoc("display", "audit-table") .assocIn(["dataset_query", "limit"], pageSize) @@ -51,6 +63,7 @@ function AuditTable({ const question = new Question(card, metadata); const shouldShowPagination = page > 0 || loadedCount === pageSize; + const handleChangeLocation = url => dispatch(push(url)); return (
@@ -59,13 +72,14 @@ function AuditTable({ className="mt3" question={question} metadata={metadata} - mode={AuditMode} - onChangeLocation={onChangeLocation} + mode={mode} + onChangeLocation={handleChangeLocation} onChangeCardAndRun={() => {}} - onLoad={results => setLoadedCount(results[0].row_count)} + onLoad={handleOnLoad} + dispatch={dispatch} {...rest} /> -
+ {shouldShowPagination && ( )} -
+ + {children}
); } -export default _.compose( - connect( - mapStateToProps, - mapDispatchToProps, - ), -)(AuditTable); +export default _.compose(connect(mapStateToProps))(AuditTable); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.styled.jsx new file mode 100644 index 000000000000..18408a6e6a2b --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.styled.jsx @@ -0,0 +1,9 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; + +export const PaginationControlsContainer = styled.div` + display: flex; + justify-content: flex-end; + padding-top: 1rem; + border-top: 1px solid ${color("border")}; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx index de646a03035e..604bad334902 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx @@ -1,5 +1,8 @@ import React from "react"; import PropTypes from "prop-types"; +import { t } from "ttag"; +import _ from "underscore"; +import cx from "classnames"; import { registerVisualization } from "metabase/visualizations/index"; @@ -10,29 +13,32 @@ import Table from "metabase/visualizations/visualizations/Table"; import EmptyState from "metabase/components/EmptyState"; import Icon from "metabase/components/Icon"; +import CheckBox from "metabase/components/CheckBox"; +import { RemoveRowButton } from "./AuditTableVisualization.styled"; +import { getRowValuesByColumns, getColumnName } from "../lib/mode"; import NoResults from "assets/img/no_results.svg"; -import { t } from "ttag"; - -import _ from "underscore"; -import cx from "classnames"; - -const getColumnName = column => column.remapped_to || column.name; - const propTypes = { series: PropTypes.array, visualizationIsClickable: PropTypes.func, onVisualizationClick: PropTypes.func, onSortingChange: PropTypes.func, + onRemoveRow: PropTypes.func, settings: PropTypes.object, isSortable: PropTypes.bool, sorting: PropTypes.shape({ column: PropTypes.string.isRequired, isAscending: PropTypes.bool.isRequired, }), + isSelectable: PropTypes.bool, + rowChecked: PropTypes.object, + onAllSelectClick: PropTypes.func, + onRowSelectClick: PropTypes.func, }; +const ROW_ID_IDX = 0; + export default class AuditTableVisualization extends React.Component { static identifier = "audit-table"; static noHeader = true; @@ -42,6 +48,14 @@ export default class AuditTableVisualization extends React.Component { static settings = Table.settings; static columnSettings = Table.columnSettings; + state = { + rerender: {}, + }; + + constructor(props) { + super(props); + } + handleColumnHeaderClick = column => { const { isSortable, onSortingChange, sorting } = this.props; @@ -57,6 +71,23 @@ export default class AuditTableVisualization extends React.Component { }); }; + handleAllSelectClick = (e, rows) => { + const { onAllSelectClick } = this.props; + this.setState({ rerender: {} }); + onAllSelectClick({ ...e, rows }); + }; + + handleRowSelectClick = (e, row, rowIndex) => { + const { onRowSelectClick } = this.props; + this.setState({ rerender: {} }); + onRowSelectClick({ ...e, row: row, rowIndex: rowIndex }); + }; + + handleRemoveRowClick = (row, cols) => { + const rowData = getRowValuesByColumns(row, cols); + this.props.onRemoveRow(rowData); + }; + render() { const { series: [ @@ -69,8 +100,12 @@ export default class AuditTableVisualization extends React.Component { onVisualizationClick, settings, isSortable, + isSelectable, + rowChecked, + onRemoveRow, } = this.props; + const canRemoveRows = !!onRemoveRow; const columnIndexes = settings["table.columns"] .filter(({ enabled }) => enabled) .map(({ name }) => _.findIndex(cols, col => col.name === name)); @@ -83,11 +118,18 @@ export default class AuditTableVisualization extends React.Component { /> ); } - return ( + {isSelectable && ( + + )} {columnIndexes.map(colIndex => { const column = cols[colIndex]; const isSortedByColumn = @@ -119,12 +161,30 @@ export default class AuditTableVisualization extends React.Component { {rows.map((row, rowIndex) => ( + {isSelectable && ( + + )} + {columnIndexes.map(colIndex => { const value = row[colIndex]; const column = cols[colIndex]; const clicked = { column, value, origin: { row, cols } }; const clickable = visualizationIsClickable(clicked); - const columnSettings = settings.column(column); + const columnSettings = { + ...settings.column(column), + ...settings["table.columns"][colIndex], + }; return ( ); })} + + {canRemoveRows && ( + + )} ))} diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.styled.jsx new file mode 100644 index 000000000000..56a93fddf0ad --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import IconButtonWrapper from "metabase/components/IconButtonWrapper"; + +export const RemoveRowButton = styled(IconButtonWrapper)` + color: ${color("text-light")}; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx index c0f929c6130d..31452a94483b 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx @@ -1,4 +1,7 @@ import React from "react"; +import PropTypes from "prop-types"; + +import Icon from "metabase/components/Icon"; import AuditTable from "./AuditTable"; import AuditParameters from "../components/AuditParameters"; @@ -6,20 +9,18 @@ import AuditParameters from "../components/AuditParameters"; import { t } from "ttag"; import { updateIn } from "icepick"; -import type { AuditDashCard } from "../types"; - -type Props = { - placeholder?: string, - table: AuditDashCard, +const propTypes = { + placeholder: PropTypes.string, + table: PropTypes.object, }; // AuditTable but with a default search parameter that gets appended to `args` -const AuditTableWithSearch = ({ - placeholder = t`Search`, - table, - ...props -}: Props) => ( - +const AuditTableWithSearch = ({ placeholder = t`Search`, table, ...props }) => ( + }, + ]} + > {({ search }) => ( ); +AuditTableWithSearch.propTypes = propTypes; + export default AuditTableWithSearch; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx index 6666eabf8631..df1de4e1fedb 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx @@ -1,34 +1,57 @@ -/* eslint-disable react/prop-types */ -import React from "react"; - +import React, { useEffect, useRef, useImperativeHandle } from "react"; +import PropTypes from "prop-types"; import QuestionResultLoader from "metabase/containers/QuestionResultLoader"; import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper"; import Visualization from "metabase/visualizations/components/Visualization"; +const propTypes = { + question: PropTypes.object, + keepPreviousWhileLoading: PropTypes.bool, + reload: PropTypes.bool, + onLoad: PropTypes.func, + reloadRef: PropTypes.shape({ current: PropTypes.func }), +}; + const QuestionLoadAndDisplay = ({ question, - onLoad, keepPreviousWhileLoading, + reload, + onLoad, + reloadRef, ...props -}) => ( - - {({ loading, error, ...resultProps }) => { - const shouldShowLoader = loading && resultProps.results == null; - return ( - - - - ); - }} - -); +}) => { + const reloadFnRef = useRef(null); + + useImperativeHandle(reloadRef, () => () => reloadFnRef.current?.()); + + useEffect(() => { + reload && reloadFnRef.current?.(); + }, [reload]); + + return ( + + {({ loading, error, reload, ...resultProps }) => { + const shouldShowLoader = loading && resultProps.results == null; + reloadFnRef.current = reload; + + return ( + + + + ); + }} + + ); +}; + +QuestionLoadAndDisplay.propTypes = propTypes; export default QuestionLoadAndDisplay; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/UnsubscribeUserModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/UnsubscribeUserModal.jsx new file mode 100644 index 000000000000..3303781ef94c --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/UnsubscribeUserModal.jsx @@ -0,0 +1,24 @@ +import { connect } from "react-redux"; +import { t } from "ttag"; +import _ from "underscore"; +import Users from "metabase/entities/users"; +import { addUndo } from "metabase/redux/undo"; +import { AuditApi } from "../../lib/services"; +import UnsubscribeUserForm from "../../components/UnsubscribeUserForm"; + +const mapDispatchToProps = dispatch => ({ + onUnsubscribe: async ({ id }) => { + await AuditApi.unsubscribe_user({ id }); + dispatch(addUndo({ message: t`Unsubscribe successful` })); + }, +}); + +export default _.compose( + Users.load({ + id: (state, props) => Number.parseInt(props.params.userId), + }), + connect( + null, + mapDispatchToProps, + ), +)(UnsubscribeUserForm); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/index.js new file mode 100644 index 000000000000..5f15a735bebd --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeUserModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/index.js index 884359248e15..774278101093 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/index.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/index.js @@ -1,11 +1,23 @@ -import { PLUGIN_ADMIN_NAV_ITEMS, PLUGIN_ADMIN_ROUTES } from "metabase/plugins"; - -import { hasPremiumFeature } from "metabase-enterprise/settings"; import { t } from "ttag"; - -import getAuditRoutes from "./routes"; +import { + PLUGIN_ADMIN_NAV_ITEMS, + PLUGIN_ADMIN_ROUTES, + PLUGIN_ADMIN_USER_MENU_ITEMS, + PLUGIN_ADMIN_USER_MENU_ROUTES, +} from "metabase/plugins"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import getAuditRoutes, { getUserMenuRotes } from "./routes"; if (hasPremiumFeature("audit_app")) { PLUGIN_ADMIN_NAV_ITEMS.push({ name: t`Audit`, path: "/admin/audit" }); PLUGIN_ADMIN_ROUTES.push(getAuditRoutes); + + PLUGIN_ADMIN_USER_MENU_ITEMS.push(user => [ + { + title: t`Unsubscribe from all subscriptions / alerts`, + link: `/admin/people/${user.id}/unsubscribe`, + }, + ]); + + PLUGIN_ADMIN_USER_MENU_ROUTES.push(getUserMenuRotes); } diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/alerts.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/alerts.js new file mode 100644 index 000000000000..226063eddb3a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/alerts.js @@ -0,0 +1,27 @@ +export const table = () => ({ + card: { + name: "Alerts", + display: "table", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.alerts/table", + args: [], + }, + visualization_settings: { + "table.columns": [ + { name: "card_id", enabled: true }, + { name: "pulse_id", enabled: false }, + { name: "recipients", enabled: true }, + { name: "subscription_type", enabled: true }, + { name: "collection_id", enabled: true }, + { name: "frequency", enabled: true }, + { name: "creator_id", enabled: true }, + { + name: "created_at", + enabled: true, + date_format: "M/D/YYYY", + }, + ], + }, + }, +}); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js index a37c8ac9056f..f371582cc196 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js @@ -4,7 +4,7 @@ export const viewsByTime = (dashboardId: number) => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/views-by-time", + fn: "metabase-enterprise.audit-app.pages.dashboard-detail/views-by-time", args: [dashboardId, "day"], // FIXME: should this be automatic? }, }, @@ -16,7 +16,8 @@ export const revisionHistory = (dashboardId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/revision-history", + fn: + "metabase-enterprise.audit-app.pages.dashboard-detail/revision-history", args: [dashboardId], }, visualization_settings: { @@ -36,7 +37,7 @@ export const cards = (dashboardId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/cards", + fn: "metabase-enterprise.audit-app.pages.dashboard-detail/cards", args: [dashboardId], }, }, @@ -48,7 +49,7 @@ export const auditLog = (dashboardId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.dashboard-detail/audit-log", args: [dashboardId], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js index 43fa6dc454c5..2596f383f9a9 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js @@ -1,23 +1,11 @@ -// DEPRECATED: use `views-and-saves-by-time ` instead. -export const viewsPerDay = () => ({ - card: { - name: "Total dashboard views per day", - display: "line", - dataset_query: { - type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/views-per-day", - args: [], - }, - }, -}); - export const viewsAndSavesByTime = () => ({ card: { name: "Dashboard views and saves per day", display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/views-and-saves-by-time", + fn: + "metabase-enterprise.audit-app.pages.dashboards/views-and-saves-by-time", args: ["day"], }, visualization_settings: { @@ -33,7 +21,7 @@ export const mostPopularAndSpeed = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed", + "metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed", args: [], }, }, @@ -45,7 +33,8 @@ export const mostCommonQuestions = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/most-common-questions", + fn: + "metabase-enterprise.audit-app.pages.dashboards/most-common-questions", args: [], }, }, @@ -57,7 +46,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/table", + fn: "metabase-enterprise.audit-app.pages.dashboards/table", args: [], }, visualization_settings: { @@ -67,6 +56,7 @@ export const table = (searchString?: string) => ({ { name: "average_execution_time_ms", enabled: true }, { name: "cards", enabled: true }, { name: "saved_by_id", enabled: true }, + { name: "cache_ttl", enabled: true }, { name: "public_link", enabled: true, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js index 89f9d60d61b4..438987f6365e 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js @@ -4,7 +4,7 @@ export const auditLog = (databaseId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.database-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.database-detail/audit-log", args: [databaseId], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js index 99864486fc86..98cebbc4537c 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js @@ -5,7 +5,7 @@ export const totalQueryExecutionsByDb = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.databases/total-query-executions-by-db", + "metabase-enterprise.audit-app.pages.databases/total-query-executions-by-db", args: [], }, visualization_settings: { @@ -27,7 +27,7 @@ export const queryExecutionsPerDbPerDay = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.databases/query-executions-per-db-per-day", + "metabase-enterprise.audit-app.pages.databases/query-executions-per-db-per-day", args: [], }, visualization_settings: { @@ -43,7 +43,8 @@ export const queryExecutionsByTime = () => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.databases/query-executions-by-time", + fn: + "metabase-enterprise.audit-app.pages.databases/query-executions-by-time", args: ["day"], }, visualization_settings: { @@ -59,7 +60,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.databases/table", + fn: "metabase-enterprise.audit-app.pages.databases/table", args: searchString ? [searchString] : [], }, visualization_settings: { @@ -68,6 +69,7 @@ export const table = (searchString?: string) => ({ { name: "schemas", enabled: true }, { name: "tables", enabled: true }, { name: "sync_schedule", enabled: true }, + { name: "cache_ttl", enabled: true }, { name: "added_on", enabled: true, date_format: "M/D/YYYY, h:mm A" }, ], }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js index 0475ffc76c8d..87ef2477ddde 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js @@ -4,7 +4,7 @@ export const perDayBySize = () => ({ display: "scatter", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/per-day-by-size", + fn: "metabase-enterprise.audit-app.pages.downloads/per-day-by-size", args: [], }, visualization_settings: { @@ -21,7 +21,7 @@ export const perUser = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/per-user", + fn: "metabase-enterprise.audit-app.pages.downloads/per-user", args: [], }, visualization_settings: { @@ -37,7 +37,7 @@ export const bySize = () => ({ display: "bar", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/by-size", + fn: "metabase-enterprise.audit-app.pages.downloads/by-size", args: [], }, }, @@ -49,7 +49,7 @@ export const table = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/table", + fn: "metabase-enterprise.audit-app.pages.downloads/table", args: [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js index 35580beb0db4..672428eeb2c1 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js @@ -5,7 +5,7 @@ export const viewsAndAvgExecutionTimeByDay = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.queries/views-and-avg-execution-time-by-day", + "metabase-enterprise.audit-app.pages.queries/views-and-avg-execution-time-by-day", args: [], }, visualization_settings: { @@ -25,7 +25,7 @@ export const mostPopular = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.queries/most-popular", + fn: "metabase-enterprise.audit-app.pages.queries/most-popular", args: [], }, visualization_settings: { @@ -41,7 +41,7 @@ export const slowest = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.queries/slowest", + fn: "metabase-enterprise.audit-app.pages.queries/slowest", args: [], }, visualization_settings: { @@ -51,6 +51,45 @@ export const slowest = () => ({ }, }); +export const bad_table = ( + errorFilter, + dbFilter, + collectionFilter, + sortColumn, + sortDirection, +) => ({ + card: { + name: "Failing Questions", + display: "table", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.queries/bad-table", + args: [ + errorFilter, + dbFilter, + collectionFilter, + sortColumn, + sortDirection, + ], + }, + visualization_settings: { + "table.columns": [ + { name: "card_id", enabled: true }, + { name: "error_substr", enabled: true }, + { name: "collection_id", enabled: true }, + { name: "database_id", enabled: true }, + { name: "schema", enabled: true }, + { name: "table_id", enabled: true }, + { name: "last_run_at", enabled: true }, + { name: "total_runs", enabled: true }, + { name: "num_dashboards", enabled: true }, + { name: "user_id", enabled: true }, + { name: "updated_at", enabled: true }, + ], + }, + }, +}); + export const table = ( questionFilter, collectionFilter, @@ -62,7 +101,7 @@ export const table = ( display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.queries/table", + fn: "metabase-enterprise.audit-app.pages.queries/table", args: [questionFilter, collectionFilter, sortColumn, sortDirection], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js index a2a749904166..23b23a3b3c6a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js @@ -4,7 +4,7 @@ export const details = (queryHash: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.query-detail/details", + fn: "metabase-enterprise.audit-app.pages.query-detail/details", args: [queryHash], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js index 52024fbbfbf3..59b76e5f5f3b 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js @@ -5,7 +5,7 @@ export const viewsByTime = questionId => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.question-detail/cached-views-by-time", + "metabase-enterprise.audit-app.pages.question-detail/cached-views-by-time", args: [questionId, "day"], }, visualization_settings: { @@ -34,7 +34,7 @@ export const averageExecutionTime = questionId => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.question-detail/avg-execution-time-by-time", + "metabase-enterprise.audit-app.pages.question-detail/avg-execution-time-by-time", args: [questionId, "day"], }, }, @@ -46,7 +46,8 @@ export const revisionHistory = (questionId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.question-detail/revision-history", + fn: + "metabase-enterprise.audit-app.pages.question-detail/revision-history", args: [questionId], }, visualization_settings: { @@ -66,13 +67,24 @@ export const auditLog = (questionId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.question-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.question-detail/audit-log", args: [questionId], }, visualization_settings: { "table.columns": [ { name: "user_id", enabled: true }, { name: "when", enabled: true }, + { + name: "what", + enabled: true, + // This needs to combinatorially explore the metadata boolean space: if n grows above 2, replace mustache + markdown_template: ` +{{#json.ignore_cache}}Requested un-cached results{{/json.ignore_cache}} +{{^json.ignore_cache}} +{{#json.cached}}Viewed (cached){{/json.cached}} +{{^json.cached}}Viewed{{/json.cached}} +{{/json.ignore_cache}}`, + }, ], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js index 824d207daa83..57c800c87f64 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js @@ -4,7 +4,7 @@ export const mostQueried = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.schemas/most-queried", + fn: "metabase-enterprise.audit-app.pages.schemas/most-queried", args: [], }, }, @@ -16,7 +16,7 @@ export const slowestSchemas = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.schemas/slowest-schemas", + fn: "metabase-enterprise.audit-app.pages.schemas/slowest-schemas", args: [], }, }, @@ -28,7 +28,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.schemas/table", + fn: "metabase-enterprise.audit-app.pages.schemas/table", args: searchString ? [searchString] : [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/subscriptions.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/subscriptions.js new file mode 100644 index 000000000000..cdebb07552a1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/subscriptions.js @@ -0,0 +1,31 @@ +export const table = () => ({ + card: { + name: "Subscriptions", + display: "table", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.dashboard-subscriptions/table", + args: [], + }, + visualization_settings: { + "table.columns": [ + { name: "dashboard_id", enabled: true }, + { name: "pulse_id", enabled: false }, + { name: "recipients", enabled: true }, + { name: "subscription_type", enabled: true }, + { name: "collection_id", enabled: true }, + { name: "frequency", enabled: true }, + { name: "creator_id", enabled: true }, + { + name: "created_at", + enabled: true, + date_format: "M/D/YYYY", + }, + { + name: "num_filters", + enabled: true, + }, + ], + }, + }, +}); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js index 267e69c2f97e..0f6b6e1b39e5 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js @@ -4,7 +4,7 @@ export const auditLog = (tableId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.table-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.table-detail/audit-log", args: [tableId], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js index ea3e557714c9..14cdb8b5568a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js @@ -4,7 +4,7 @@ export const mostQueried = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.tables/most-queried", + fn: "metabase-enterprise.audit-app.pages.tables/most-queried", args: [], }, visualization_settings: { @@ -20,7 +20,7 @@ export const leastQueried = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.tables/least-queried", + fn: "metabase-enterprise.audit-app.pages.tables/least-queried", args: [], }, visualization_settings: { @@ -36,7 +36,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.tables/table", + fn: "metabase-enterprise.audit-app.pages.tables/table", args: searchString ? [searchString] : [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js index b4be3a0e4bdb..b909cc88af44 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js @@ -4,7 +4,7 @@ export const table = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/table", + fn: "metabase-enterprise.audit-app.pages.user-detail/table", args: [userId], }, }, @@ -16,7 +16,8 @@ export const mostViewedDashboards = (userId: number) => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/most-viewed-dashboards", + fn: + "metabase-enterprise.audit-app.pages.user-detail/most-viewed-dashboards", args: [userId], }, visualization_settings: { @@ -32,7 +33,8 @@ export const mostViewedQuestions = (userId: number) => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/most-viewed-questions", + fn: + "metabase-enterprise.audit-app.pages.user-detail/most-viewed-questions", args: [userId], }, visualization_settings: { @@ -48,7 +50,8 @@ export const objectViewsByTime = (userId: number) => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/object-views-by-time", + fn: + "metabase-enterprise.audit-app.pages.user-detail/object-views-by-time", args: [userId, "card", "day"], }, }, @@ -58,7 +61,8 @@ export const objectViewsByTime = (userId: number) => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/object-views-by-time", + fn: + "metabase-enterprise.audit-app.pages.user-detail/object-views-by-time", args: [userId, "dashboard", "day"], }, }, @@ -71,7 +75,7 @@ export const queryViews = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/query-views", + fn: "metabase-enterprise.audit-app.pages.user-detail/query-views", args: [userId], }, visualization_settings: { @@ -93,7 +97,7 @@ export const dashboardViews = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/dashboard-views", + fn: "metabase-enterprise.audit-app.pages.user-detail/dashboard-views", args: [userId], }, visualization_settings: { @@ -112,7 +116,7 @@ export const createdDashboards = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/created-dashboards", + fn: "metabase-enterprise.audit-app.pages.user-detail/created-dashboards", args: [userId], }, }, @@ -124,7 +128,7 @@ export const createdQuestions = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/created-questions", + fn: "metabase-enterprise.audit-app.pages.user-detail/created-questions", args: [userId], }, }, @@ -136,7 +140,7 @@ export const downloads = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/downloads", + fn: "metabase-enterprise.audit-app.pages.user-detail/downloads", args: [userId], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js index 814c7b922713..2e02148f3b76 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js @@ -4,7 +4,7 @@ export const activeAndNewByTime = () => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/active-and-new-by-time", + fn: "metabase-enterprise.audit-app.pages.users/active-and-new-by-time", args: ["day"], }, visualization_settings: { @@ -26,7 +26,7 @@ export const activeUsersAndQueriesByDay = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.users/active-users-and-queries-by-day", + "metabase-enterprise.audit-app.pages.users/active-users-and-queries-by-day", args: [], }, visualization_settings: { @@ -47,7 +47,7 @@ export const mostActive = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/most-active", + fn: "metabase-enterprise.audit-app.pages.users/most-active", args: [], }, visualization_settings: { @@ -65,7 +65,7 @@ export const mostSaves = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/most-saves", + fn: "metabase-enterprise.audit-app.pages.users/most-saves", args: [], }, visualization_settings: { @@ -81,7 +81,8 @@ export const queryExecutionTimePerUser = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/query-execution-time-per-user", + fn: + "metabase-enterprise.audit-app.pages.users/query-execution-time-per-user", args: [], }, }, @@ -93,7 +94,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/table", + fn: "metabase-enterprise.audit-app.pages.users/table", args: searchString ? [searchString] : [], }, visualization_settings: { @@ -114,7 +115,7 @@ export const auditLog = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/query-views", + fn: "metabase-enterprise.audit-app.pages.users/query-views", args: [], }, visualization_settings: { @@ -135,7 +136,7 @@ export const auditLog = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/dashboard-views", + fn: "metabase-enterprise.audit-app.pages.users/dashboard-views", args: [], }, }, @@ -148,7 +149,7 @@ export const dashboardViews = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/dashboard-views", + fn: "metabase-enterprise.audit-app.pages.users/dashboard-views", args: [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/util.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/mode.js similarity index 61% rename from enterprise/frontend/src/metabase-enterprise/audit_app/lib/util.js rename to enterprise/frontend/src/metabase-enterprise/audit_app/lib/mode.js index 7f214845fcb7..42513af79796 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/util.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/mode.js @@ -1,14 +1,22 @@ +import { push } from "react-router-redux"; import _ from "underscore"; -import Question from "metabase-lib/lib/Question"; +export const getColumnName = column => column.remapped_to || column.name; -import type { - ClickObject, - QueryMode, -} from "metabase-types/types/Visualization"; +export const getRowValuesByColumns = (row, cols) => + cols.reduce((acc, col, index) => { + const columnName = getColumnName(col); + return { + ...acc, + [columnName]: row[index], + }; + }, {}); -const columnNameToUrl = { +export const columnNameToUrl = { + // No admin page for collections but still want to link to it + collection_id: value => `/collection/${value}`, user_id: value => `/admin/audit/member/${value}`, + creator_id: value => `/admin/audit/member/${value}`, viewed_by_id: value => `/admin/audit/member/${value}`, saved_by_id: value => `/admin/audit/member/${value}`, dashboard_id: value => `/admin/audit/dashboard/${value}`, @@ -20,19 +28,24 @@ const columnNameToUrl = { // NOTE: query_hash uses standard Base64 encoding which isn't URL safe so make sure to escape it query_hash: value => `/admin/audit/query/${encodeURIComponent(String(value))}`, + recipients: (_, clicked) => { + const pulseIdIndex = clicked.origin.cols.findIndex( + col => getColumnName(col) === "pulse_id", + ); + const pulseId = clicked.origin.row[pulseIdIndex]; + + return clicked.extraData.type === "subscription" + ? `/admin/audit/subscriptions/subscriptions/${pulseId}/edit` + : `/admin/audit/subscriptions/alerts/${pulseId}/edit`; + }, }; -const AuditDrill = ({ - question, - clicked, -}: { - question: Question, - clicked?: ClickObject, -}) => { +const AuditDrill = ({ question, clicked }) => { if (!clicked) { return []; } const metricAndDimensions = [clicked].concat(clicked.dimensions || []); + for (const { column, value } of metricAndDimensions) { if (column && columnNameToUrl[column.name] != null && value != null) { return [ @@ -40,8 +53,9 @@ const AuditDrill = ({ name: "detail", title: `View this`, default: true, - url() { - return columnNameToUrl[column.name](value); + action() { + const url = columnNameToUrl[column.name](value, clicked); + return push(url); }, }, ]; @@ -72,7 +86,7 @@ const AuditDrill = ({ return []; }; -export const AuditMode: QueryMode = { +export const AuditMode = { name: "audit", drills: () => [AuditDrill], }; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/services.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/services.js new file mode 100644 index 000000000000..a75abe4ac6be --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/services.js @@ -0,0 +1,5 @@ +import { DELETE } from "metabase/lib/api"; + +export const AuditApi = { + unsubscribe_user: DELETE("/api/ee/audit-app/user/:id/subscriptions"), +}; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx index d63f3b3363b7..4f6d154e4e00 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx @@ -64,6 +64,7 @@ import { getMetadata } from "metabase/selectors/metadata"; import NativeQuery from "metabase-lib/lib/queries/NativeQuery"; +import ExplicitSize from "metabase/components/ExplicitSize"; import { loadMetadataForCard } from "metabase/query_builder/actions"; const mapStateToProps = state => ({ metadata: getMetadata(state) }); @@ -73,13 +74,23 @@ const mapDispatchToProps = { loadMetadataForCard }; mapStateToProps, mapDispatchToProps, ) +@ExplicitSize() class QueryBuilderReadOnly extends React.Component { + state = { + isNativeEditorOpen: false, + }; + + setIsNativeEditorOpen = open => { + this.setState({ isNativeEditorOpen: open }); + }; + componentDidMount() { const { card, loadMetadataForCard } = this.props; loadMetadataForCard(card); } + render() { - const { card, metadata } = this.props; + const { card, metadata, height } = this.props; const question = new Question(card, metadata); const query = question.query(); @@ -91,6 +102,9 @@ class QueryBuilderReadOnly extends React.Component { query={query} location={{ query: {} }} readOnly + viewHeight={height} + isNativeEditorOpen={this.state.isNativeEditorOpen} + setIsNativeEditorOpen={this.setIsNativeEditorOpen} /> ); } else { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/AuditSubscriptions.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/AuditSubscriptions.jsx new file mode 100644 index 000000000000..860846f84bac --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/AuditSubscriptions.jsx @@ -0,0 +1,49 @@ +import React from "react"; +import { t } from "ttag"; + +import AuditContent from "../../components/AuditContent"; +import AuditAlertTable from "../../containers/AuditAlertTable"; +import AuditSubscriptionTable from "../../containers/AuditSubscriptionTable"; +import AuditAlertEditModal from "../../containers/AuditAlertEditModal"; +import AuditAlertDeleteModal from "../../containers/AuditAlertDeleteModal"; +import AuditSubscriptionEditModal from "../../containers/AuditSubscriptionEditModal"; +import AuditSubscriptionDeleteModal from "../../containers/AuditSubscriptionDeleteModal"; + +const AuditSubscriptions = props => ( + +); + +AuditSubscriptions.tabs = [ + { + path: "subscriptions", + title: t`Subscriptions`, + component: AuditSubscriptionTable, + modals: [ + { + path: ":pulseId/edit", + modal: AuditSubscriptionEditModal, + }, + { + path: ":pulseId/delete", + modal: AuditSubscriptionDeleteModal, + }, + ], + }, + { + path: "alerts", + title: t`Alerts`, + component: AuditAlertTable, + modals: [ + { + path: ":alertId/edit", + modal: AuditAlertEditModal, + }, + { + path: ":alertId/delete", + modal: AuditAlertDeleteModal, + }, + ], + }, +]; + +export default AuditSubscriptions; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/index.js new file mode 100644 index 000000000000..b6e16b1e0544 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptions"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx index a658c5b5fbf1..72444a17dad0 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx @@ -1,11 +1,13 @@ import React from "react"; import { Route } from "metabase/hoc/Title"; +import { ModalRoute } from "metabase/hoc/ModalRoute"; import { IndexRoute, IndexRedirect } from "react-router"; import { t } from "ttag"; import _ from "underscore"; import AuditApp from "./containers/AuditApp"; +import UnsubscribeUserModal from "./containers/UnsubscribeUserModal/UnsubscribeUserModal"; import AuditOverview from "./pages/AuditOverview"; @@ -15,17 +17,15 @@ import AuditSchemas from "./pages/AuditSchemas"; import AuditSchemaDetail from "./pages/AuditSchemaDetail"; import AuditTables from "./pages/AuditTables"; import AuditTableDetail from "./pages/AuditTableDetail"; - import AuditQuestions from "./pages/AuditQuestions"; import AuditQuestionDetail from "./pages/AuditQuestionDetail"; import AuditDashboards from "./pages/AuditDashboards"; import AuditDashboardDetail from "./pages/AuditDashboardDetail"; import AuditQueryDetail from "./pages/AuditQueryDetail"; - import AuditUsers from "./pages/AuditUsers"; import AuditUserDetail from "./pages/AuditUserDetail"; - import AuditDownloads from "./pages/AuditDownloads"; +import AuditSubscriptions from "./pages/AuditSubscriptions"; type Page = { tabs?: Tab[], @@ -42,13 +42,24 @@ function getPageRoutes(path, page: Page) { // add a redirect for the default tab const defaultTab = getDefaultTab(page); if (defaultTab) { - subRoutes.push(); + subRoutes.push( + , + ); } // add sub routes for each tab if (page.tabs) { subRoutes.push( ...page.tabs.map(tab => ( - + + {tab.modals && + tab.modals.map(modal => ( + + ))} + )), ); } @@ -72,7 +83,7 @@ function getDefaultTab(page: Page): ?Tab { } const getRoutes = (store: any) => ( - + {/* */} @@ -92,7 +103,12 @@ const getRoutes = (store: any) => ( {getPageRoutes("downloads", AuditDownloads)} {getPageRoutes("members", AuditUsers)} {getPageRoutes("member/:userId", AuditUserDetail)} + {getPageRoutes("subscriptions", AuditSubscriptions)} ); +export const getUserMenuRotes = () => ( + +); + export default getRoutes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.jsx new file mode 100644 index 000000000000..e9ceac8762c2 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.jsx @@ -0,0 +1,41 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import { formDomOnlyProps } from "metabase/lib/redux"; +import { + CacheTTLFieldContainer, + FieldText, + Input, +} from "./CacheTTLField.styled"; + +const propTypes = { + field: PropTypes.shape({ + name: PropTypes.string.isRequired, + value: PropTypes.number, + error: PropTypes.string, + }), + message: PropTypes.string, +}; + +export function CacheTTLField({ field, message, ...props }) { + const hasError = !!field.error; + return ( + + {message && ( + + {message} + + )} + + {t`hours`} + + ); +} + +CacheTTLField.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.styled.js b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.styled.js new file mode 100644 index 000000000000..40d1e32647fa --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.styled.js @@ -0,0 +1,33 @@ +import styled, { css } from "styled-components"; +import { color } from "metabase/lib/colors"; +import NumericInput from "metabase/components/NumericInput"; + +export const CacheTTLFieldContainer = styled.div` + display: flex; + align-items: center; +`; + +export const FieldText = styled.span` + color: ${props => color(props.hasError ? "error" : "text-dark")}; + ${props => css`margin-${props.margin}: 10px;`} +`; + +export const Input = styled(NumericInput)` + width: 50px; + text-align: center; + + color: ${props => color(props.hasError ? "error" : "text-dark")}; + font-weight: bold; + padding: 0.75em; + + border: 1px solid ${color("border")}; + border-radius: 4px; + outline: none; + + :focus, + :hover { + border-color: ${color("brand")}; + } + + transition: border 300ms ease-in-out; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.unit.spec.js new file mode 100644 index 000000000000..1c2bc1a09ac5 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.unit.spec.js @@ -0,0 +1,64 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { CacheTTLField } from "./CacheTTLField"; + +function setup({ name = "cache_ttl", message, value }) { + const onChange = jest.fn(); + render( + + Label + + , + ); + const field = screen.getByLabelText("Label"); + return { field, onChange }; +} + +describe("CacheTTLField", () => { + [ + { value: 0, expected: "0" }, + { value: 1, expected: "1" }, + { value: 12, expected: "12" }, + ].forEach(({ value, expected }) => { + it(`displays ${value} value as ${expected}`, () => { + const { field } = setup({ value }); + expect(field).toHaveValue(expected); + }); + }); + + it("displays a placeholder for null values", () => { + const { field } = setup({ value: null }); + + expect(field).toHaveAttribute("placeholder", "24"); + expect(field).toHaveValue(""); + }); + + it("displays message", () => { + setup({ message: "Cache results for" }); + expect(screen.queryByText("Cache results for")).toBeInTheDocument(); + }); + + it("calls onChange correctly", () => { + const { field, onChange } = setup({ value: 4 }); + + userEvent.clear(field); + userEvent.type(field, "14"); + field.blur(); + + expect(onChange).toHaveBeenLastCalledWith(14); + }); + + it("calls onChange with null value if input is cleared", () => { + const { field, onChange } = setup({ value: 4 }); + + userEvent.clear(field); + field.blur(); + + expect(onChange).toHaveBeenLastCalledWith(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/index.js b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/index.js new file mode 100644 index 000000000000..35634c0a52f2 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/index.js @@ -0,0 +1 @@ +export * from "./CacheTTLField"; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.jsx new file mode 100644 index 000000000000..c0cda2d8ddd0 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.jsx @@ -0,0 +1,57 @@ +import React, { useCallback, useEffect, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Select, { Option } from "metabase/components/Select"; +import { CacheTTLField } from "../CacheTTLField"; +import { + CacheFieldContainer, + FieldContainer, +} from "./DatabaseCacheTTLField.styled"; + +const MODE = { + INSTANCE_DEFAULT: "instance-default", + CUSTOM: "custom", +}; + +const INSTANCE_DEFAULT_CACHE_TTL = null; +const DEFAULT_CUSTOM_CACHE_TTL = 24; // hours + +const propTypes = { + field: PropTypes.object.isRequired, +}; + +export function DatabaseCacheTTLField({ field }) { + const [mode, setMode] = useState( + field.value > 0 ? MODE.CUSTOM : MODE.INSTANCE_DEFAULT, + ); + + const onModeChange = useCallback(e => { + setMode(e.target.value); + }, []); + + useEffect(() => { + if (mode === MODE.INSTANCE_DEFAULT) { + field.onChange(INSTANCE_DEFAULT_CACHE_TTL); + } else if (field.value == null) { + field.onChange(DEFAULT_CUSTOM_CACHE_TTL); + } + }, [field, mode]); + + return ( + + + {mode === MODE.CUSTOM && ( + + + + )} + + ); +} + +DatabaseCacheTTLField.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.styled.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.styled.jsx new file mode 100644 index 000000000000..65f8eac58b5a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.styled.jsx @@ -0,0 +1,12 @@ +import styled from "styled-components"; +import { space } from "metabase/styled-components/theme"; + +export const FieldContainer = styled.div` + display: flex; + flex-direction: row; + align-items: center; +`; + +export const CacheFieldContainer = styled.div` + margin-left: ${space(2)}; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.unit.spec.js new file mode 100644 index 000000000000..e4670251a828 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.unit.spec.js @@ -0,0 +1,72 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { DatabaseCacheTTLField } from "./DatabaseCacheTTLField"; + +function setup({ value = null } = {}) { + const onChange = jest.fn(); + render( + , + ); + return { onChange }; +} + +function selectMode(nextMode) { + const currentModeLabel = + nextMode === "custom" ? "Use instance default (TTL)" : "Custom"; + const nextModeLabel = + nextMode === "instance-default" ? "Use instance default (TTL)" : "Custom"; + + userEvent.click(screen.getByText(currentModeLabel)); + userEvent.click(screen.getByText(nextModeLabel)); +} + +describe("DatabaseCacheTTLField", () => { + it("displays 'Use instance default' option when cache_ttl is null", () => { + setup({ value: null }); + expect( + screen.queryByText("Use instance default (TTL)"), + ).toBeInTheDocument(); + expect(screen.queryByLabelText("Cache TTL Field")).not.toBeInTheDocument(); + }); + + it("displays 'Use instance default' option when cache_ttl is 0", () => { + setup({ value: 0 }); + expect( + screen.queryByText("Use instance default (TTL)"), + ).toBeInTheDocument(); + expect(screen.queryByLabelText("Cache TTL Field")).not.toBeInTheDocument(); + }); + + it("sets 24 hours as a default TTL custom value", () => { + const { onChange } = setup(); + selectMode("custom"); + expect(onChange).toHaveBeenLastCalledWith(24); + }); + + it("can select and fill custom cache TTL value", () => { + const { onChange } = setup(); + + selectMode("custom"); + const input = screen.getByPlaceholderText("24"); + userEvent.type(input, "{selectall}{backspace}14"); + input.blur(); + + expect(onChange).toHaveBeenLastCalledWith(14); + }); + + it("displays input when cache_ttl has value", () => { + setup({ value: 4 }); + expect(screen.queryByDisplayValue("4")).toBeInTheDocument(); + expect(screen.queryByText("Custom")).toBeInTheDocument(); + expect( + screen.queryByText("Use instance default (TTL)"), + ).not.toBeInTheDocument(); + }); + + it("can reset cache_ttl to instance default", () => { + const { onChange } = setup({ value: 48 }); + selectMode("instance-default"); + expect(onChange).toHaveBeenLastCalledWith(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/index.js b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/index.js new file mode 100644 index 000000000000..0e1d95230ac8 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/index.js @@ -0,0 +1 @@ +export * from "./DatabaseCacheTTLField"; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.jsx new file mode 100644 index 000000000000..51ab91308c92 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.jsx @@ -0,0 +1,75 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { t } from "ttag"; +import PropTypes from "prop-types"; +import { duration } from "metabase/lib/formatting"; +import { getQuestionsImplicitCacheTTL } from "../../utils"; +import { + CacheTTLInput, + CacheTTLExpandedField, + StyledRadio, +} from "./QuestionCacheTTLField.styled"; + +const propTypes = { + field: PropTypes.shape({ + value: PropTypes.number, + onChange: PropTypes.func.isRequired, + }).isRequired, + question: PropTypes.object.isRequired, // metabase-lib's Question instance +}; + +const DEFAULT_CACHE_TTL = null; + +const MODE = { + DEFAULT: "default", + CUSTOM: "custom", +}; + +function getInitialMode(question, implicitCacheTTL) { + if (question.card().cache_ttl > 0 || !implicitCacheTTL) { + return MODE.CUSTOM; + } + return MODE.DEFAULT; +} + +export function QuestionCacheTTLField({ field, question, ...props }) { + const implicitCacheTTL = useMemo( + () => getQuestionsImplicitCacheTTL(question), + [question], + ); + + const [mode, setMode] = useState(getInitialMode(question, implicitCacheTTL)); + + useEffect(() => { + if (mode === MODE.DEFAULT) { + field.onChange(DEFAULT_CACHE_TTL); + } + }, [field, mode]); + + if (!implicitCacheTTL) { + return ; + } + + // implicitCacheTTL is in seconds and duration works with milliseconds + const defaultCachingLabel = duration(implicitCacheTTL * 1000); + + return ( +
+ setMode(val)} + options={[ + { + name: t`Use default` + ` (${defaultCachingLabel})`, + value: MODE.DEFAULT, + }, + { name: t`Custom`, value: MODE.CUSTOM }, + ]} + vertical + showButtons + /> + {mode === MODE.CUSTOM && } +
+ ); +} + +QuestionCacheTTLField.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.styled.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.styled.jsx new file mode 100644 index 000000000000..1d00df64c42d --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.styled.jsx @@ -0,0 +1,21 @@ +import React from "react"; +import { t } from "ttag"; +import styled from "styled-components"; +import { space } from "metabase/styled-components/theme"; +import Radio from "metabase/components/Radio"; +import { CacheTTLField } from "../CacheTTLField"; + +export function CacheTTLInput(props) { + return ; +} + +export const CacheTTLExpandedField = styled(CacheTTLInput)` + margin-left: 1.3rem; +`; + +export const StyledRadio = styled(Radio)` + li { + margin-top: ${space(0)}; + font-weight: bold; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.unit.spec.js new file mode 100644 index 000000000000..dda8007c75e5 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.unit.spec.js @@ -0,0 +1,149 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { msToMinutes, msToHours } from "metabase/lib/time"; +import MetabaseSettings from "metabase/lib/settings"; +import { QuestionCacheTTLField } from "./QuestionCacheTTLField"; + +const TEN_MINUTES = 10 * 60 * 1000; + +function setup({ + value = null, + avgQueryDuration, + databaseCacheTTL = null, + cacheTTLMultiplier, + minCacheThreshold, +} = {}) { + const onChange = jest.fn(); + + const spy = jest.spyOn(MetabaseSettings, "get"); + spy.mockImplementation(key => { + if (key === "enable-query-caching") { + return true; + } + if (key === "query-caching-ttl-ratio") { + return cacheTTLMultiplier; + } + if (key === "query-caching-min-ttl") { + return minCacheThreshold; + } + }); + + const question = { + card: () => ({ + average_query_time: avgQueryDuration, + cache_ttl: value, + }), + database: () => ({ + cache_ttl: databaseCacheTTL, + }), + }; + + render( +
+ Label + + , + ); + return { onChange, avgQueryDuration }; +} + +const DEFAULT_MODE_REGEXP = /Use default \([.0-9]+ hours\)/; + +function selectMode(nextMode) { + const currentModeLabel = + nextMode === "custom" ? DEFAULT_MODE_REGEXP : "Custom"; + const nextModeLabel = nextMode === "default" ? DEFAULT_MODE_REGEXP : "Custom"; + + userEvent.click(screen.getByText(currentModeLabel)); + userEvent.click(screen.getByText(nextModeLabel)); +} + +function fillValue(input, value) { + userEvent.clear(input); + userEvent.type(input, String(value)); + input.blur(); +} + +const DEFAULT_MODE_TEXT_TEST_ID = /radio-[0-9]+-default-name/; + +describe("QuestionCacheTTLField", () => { + it("displays a placeholder if question is not cached", () => { + setup(); + expect(screen.getByLabelText("Label")).toHaveAttribute("placeholder", "24"); + }); + + it("displays question's cache TTL value", () => { + setup({ value: 21 }); + expect(screen.getByLabelText("Label")).toHaveValue("21"); + }); + + it("displays default caching value if question is cached on a db level", () => { + setup({ databaseCacheTTL: 32 }); + expect(screen.queryByTestId(DEFAULT_MODE_TEXT_TEST_ID)).toHaveTextContent( + "Use default (32 hours)", + ); + }); + + it("displays default caching value if question is cached on an instance level", () => { + setup({ + avgQueryDuration: TEN_MINUTES, + minCacheThreshold: 0, + cacheTTLMultiplier: 100, + }); + const expectedTTL = Math.round(msToHours(TEN_MINUTES * 100)); + expect(screen.queryByTestId(DEFAULT_MODE_TEXT_TEST_ID)).toHaveTextContent( + `Use default (${expectedTTL} hours)`, + ); + }); + + it("handles if cache duration is in minutes", () => { + setup({ + avgQueryDuration: 14400, + minCacheThreshold: 0, + cacheTTLMultiplier: 100, + }); + const expectedTTL = Math.round(msToMinutes(14400 * 100)); + expect(screen.queryByTestId(DEFAULT_MODE_TEXT_TEST_ID)).toHaveTextContent( + `Use default (${expectedTTL} minutes)`, + ); + }); + + it("calls onChange correctly when filling the input", () => { + const { onChange } = setup(); + fillValue(screen.getByLabelText("Label"), 48); + expect(onChange).toHaveBeenLastCalledWith(48); + }); + + it("offers to provide custom cache TTL when question is cached on a db level", () => { + setup({ databaseCacheTTL: 32 }); + + expect(screen.queryByLabelText("Use default (32 hours)")).toBeChecked(); + expect(screen.queryByLabelText("Custom")).not.toBeChecked(); + }); + + it("allows to overwrite default caching with custom value", () => { + const { onChange } = setup({ databaseCacheTTL: 32 }); + + selectMode("custom"); + fillValue(screen.getByLabelText("Label"), 24); + + expect(onChange).toHaveBeenLastCalledWith(24); + }); + + it("offers to switch to default caching instead of a custom TTL", () => { + setup({ value: 24, databaseCacheTTL: 32 }); + + expect(screen.queryByLabelText("Use default (32 hours)")).not.toBeChecked(); + expect(screen.queryByLabelText("Custom")).toBeChecked(); + }); + + it("allows to switch to default caching instead of a custom TTL", () => { + const { onChange } = setup({ value: 24, databaseCacheTTL: 32 }); + selectMode("default"); + expect(onChange).toHaveBeenLastCalledWith(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/index.js b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/index.js new file mode 100644 index 000000000000..bbc522fae524 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/index.js @@ -0,0 +1 @@ +export * from "./QuestionCacheTTLField"; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/index.js b/enterprise/frontend/src/metabase-enterprise/caching/index.js new file mode 100644 index 000000000000..68af6b6f0f18 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/index.js @@ -0,0 +1,51 @@ +import React from "react"; +import { t, jt } from "ttag"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import { PLUGIN_CACHING, PLUGIN_FORM_WIDGETS } from "metabase/plugins"; +import Link from "metabase/components/Link"; +import { CacheTTLField } from "./components/CacheTTLField"; +import { DatabaseCacheTTLField } from "./components/DatabaseCacheTTLField"; +import { QuestionCacheTTLField } from "./components/QuestionCacheTTLField"; +import { + getQuestionsImplicitCacheTTL, + validateCacheTTL, + normalizeCacheTTL, +} from "./utils"; + +function getDatabaseCacheTTLFieldDescription() { + return ( + + {jt`How long to keep question results. By default, Metabase will use the value you supply on the ${( + {t`cache settings page`} + )}, but if this database has other factors that influence the freshness of data, it could make sense to set a custom duration. You can also choose custom durations on individual questions or dashboards to help improve performance.`} + + ); +} + +if (hasPremiumFeature("advanced_config")) { + PLUGIN_CACHING.cacheTTLFormField = { + name: "cache_ttl", + validate: validateCacheTTL, + normalize: normalizeCacheTTL, + }; + + PLUGIN_CACHING.databaseCacheTTLFormField = { + name: "cache_ttl", + type: "databaseCacheTTL", + title: t`Default result cache duration`, + description: getDatabaseCacheTTLFieldDescription(), + descriptionPosition: "bottom", + validate: validateCacheTTL, + normalize: normalizeCacheTTL, + }; + + PLUGIN_FORM_WIDGETS.dashboardCacheTTL = CacheTTLField; + PLUGIN_FORM_WIDGETS.databaseCacheTTL = DatabaseCacheTTLField; + PLUGIN_FORM_WIDGETS.questionCacheTTL = QuestionCacheTTLField; + + PLUGIN_CACHING.getQuestionsImplicitCacheTTL = getQuestionsImplicitCacheTTL; +} diff --git a/enterprise/frontend/src/metabase-enterprise/caching/utils.js b/enterprise/frontend/src/metabase-enterprise/caching/utils.js new file mode 100644 index 000000000000..525ff74b75e1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/utils.js @@ -0,0 +1,56 @@ +import { t } from "ttag"; +import { msToSeconds } from "metabase/lib/time"; +import MetabaseSettings from "metabase/lib/settings"; + +/** + * If a question doesn't have an explicitly set cache TTL, + * its results can still be cached with a db-level cache TTL + * or with an instance level setting + * + * More on caching: + * https://www.metabase.com/docs/latest/administration-guide/14-caching.html + * + * @param {Question} metabase-lib Question instance + * @returns {number} — cache TTL value in seconds (from db or instance default) that will be used + */ +export function getQuestionsImplicitCacheTTL(question) { + if (!MetabaseSettings.get("enable-query-caching")) { + return null; + } + if (question.database().cache_ttl) { + // Database's cache TTL is in hours, need to convert that to seconds + return question.database().cache_ttl * 60 * 60; + } + const avgQueryDurationInSeconds = msToSeconds( + question.card().average_query_time, + ); + if (checkQuestionWillBeCached(avgQueryDurationInSeconds)) { + return calcQuestionMagicCacheDuration(avgQueryDurationInSeconds); + } + return null; +} + +function checkQuestionWillBeCached(avgQueryDurationInSeconds) { + const minQueryDurationThresholdSeconds = MetabaseSettings.get( + "query-caching-min-ttl", + ); + return avgQueryDurationInSeconds > minQueryDurationThresholdSeconds; +} + +function calcQuestionMagicCacheDuration(avgQueryDurationInSeconds) { + const cacheTTLMultiplier = MetabaseSettings.get("query-caching-ttl-ratio"); + return avgQueryDurationInSeconds * cacheTTLMultiplier; +} + +export function validateCacheTTL(value) { + if (value === null) { + return; + } + if (!Number.isSafeInteger(value) || value < 0) { + return t`Must be a positive integer value`; + } +} + +export function normalizeCacheTTL(value) { + return value === 0 ? null : value; +} diff --git a/enterprise/frontend/src/metabase-enterprise/caching/utils.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/utils.unit.spec.js new file mode 100644 index 000000000000..9b73ebab8701 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/utils.unit.spec.js @@ -0,0 +1,94 @@ +import { msToSeconds, hoursToSeconds } from "metabase/lib/time"; +import MetabaseSettings from "metabase/lib/settings"; +import { getQuestionsImplicitCacheTTL, validateCacheTTL } from "./utils"; + +describe("validateCacheTTL", () => { + const validTestCases = [null, 0, 1, 6, 42]; + const invalidTestCases = [-1, -1.2, 0.5, 4.3]; + + validTestCases.forEach(value => { + it(`should be valid for ${value}`, () => { + expect(validateCacheTTL(value)).toBe(undefined); + }); + }); + + invalidTestCases.forEach(value => { + it(`should return error for ${value}`, () => { + expect(validateCacheTTL(value)).toBe("Must be a positive integer value"); + }); + }); +}); + +describe("getQuestionsImplicitCacheTTL", () => { + const TEN_MINUTES = 10 * 60 * 1000; + const DEFAULT_CACHE_TTL_MULTIPLIER = 10; + + function setup({ + cachingEnabled = true, + avgQueryTime = null, + databaseCacheTTL = null, + cacheTTLMultiplier = DEFAULT_CACHE_TTL_MULTIPLIER, + minCacheThreshold = 60, + } = {}) { + const spy = jest.spyOn(MetabaseSettings, "get"); + spy.mockImplementation(key => { + if (key === "enable-query-caching") { + return cachingEnabled; + } + if (key === "query-caching-ttl-ratio") { + return cachingEnabled ? cacheTTLMultiplier : null; + } + if (key === "query-caching-min-ttl") { + return cachingEnabled ? minCacheThreshold : null; + } + }); + + return { + card: () => ({ + average_query_time: avgQueryTime, + }), + database: () => ({ + cache_ttl: databaseCacheTTL, + }), + }; + } + + it("returns database's cache TTL if set", () => { + const question = setup({ databaseCacheTTL: 10 }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(hoursToSeconds(10)); + }); + + it("returns 'magic TTL' if there is no prior caching strategy", () => { + const question = setup({ avgQueryTime: TEN_MINUTES }); + + expect(getQuestionsImplicitCacheTTL(question)).toBe( + msToSeconds(TEN_MINUTES * DEFAULT_CACHE_TTL_MULTIPLIER), + ); + }); + + it("returns null if instance-level caching enabled, but the query doesn't pass the min exec time threshold", () => { + const question = setup({ + avgQueryTime: TEN_MINUTES, + minCacheThreshold: TEN_MINUTES * 2, + }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(null); + }); + + it("prefers database cache TTL over instance-level one", () => { + const question = setup({ databaseCacheTTL: 10, avgQueryTime: TEN_MINUTES }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(hoursToSeconds(10)); + }); + + it("returns null if caching disabled, but instance level caching parameters are present", () => { + const question = setup({ + avgQueryTime: TEN_MINUTES, + cachingEnabled: false, + }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(null); + }); + + it("returns null if caching disabled, but database has a cache ttl", () => { + const question = setup({ databaseCacheTTL: 10, cachingEnabled: false }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.jsx b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.jsx new file mode 100644 index 000000000000..adfa981181be --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.jsx @@ -0,0 +1,37 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import Icon from "metabase/components/Icon"; +import { color } from "metabase/lib/colors"; + +import { AUTHORITY_LEVELS } from "../constants"; +import { isRegularCollection } from "../utils"; + +const propTypes = { + tooltip: PropTypes.string, + collection: PropTypes.shape({ + authority_level: PropTypes.oneOf(["official"]), + }), +}; + +export function CollectionAuthorityLevelIcon({ + collection, + tooltip = "default", + ...iconProps +}) { + if (isRegularCollection(collection)) { + return null; + } + const level = AUTHORITY_LEVELS[collection.authority_level]; + return ( + + ); +} + +CollectionAuthorityLevelIcon.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.unit.spec.js new file mode 100644 index 000000000000..778512bf6ee9 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.unit.spec.js @@ -0,0 +1,78 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { CollectionAuthorityLevelIcon } from "./CollectionAuthorityLevelIcon"; + +describe("CollectionAuthorityLevelIcon", () => { + describe("regular collections", () => { + [ + { + name: "collection without authority level", + collection: {}, + }, + { + name: "regular collection", + collection: { + authority_level: null, + }, + }, + ].forEach(({ collection, name }) => { + it(`doesn't render for ${name}`, () => { + render(); + expect(screen.queryByLabelText("folder icon")).toBeNull(); + }); + }); + }); + + describe("official collections", () => { + const OFFICIAL_COLLECTION = { + authority_level: "official", + }; + + function renderOfficialCollection({ + collection = OFFICIAL_COLLECTION, + ...props + } = {}) { + render( + , + ); + } + + function queryOfficialIcon() { + return screen.queryByLabelText("badge icon"); + } + + it(`renders correctly`, () => { + renderOfficialCollection(); + expect(queryOfficialIcon()).toBeInTheDocument(); + }); + + it(`displays a tooltip by default`, () => { + renderOfficialCollection(); + userEvent.hover(queryOfficialIcon()); + expect(screen.getByRole("tooltip")).toHaveTextContent( + "Official collection", + ); + }); + + it(`can display different tooltip`, () => { + renderOfficialCollection({ tooltip: "belonging" }); + userEvent.hover(queryOfficialIcon()); + expect(screen.getByRole("tooltip")).toHaveTextContent( + "Belongs to an Official collection", + ); + }); + + it(`can display custom tooltip text`, () => { + renderOfficialCollection({ tooltip: "Hello" }); + userEvent.hover(queryOfficialIcon()); + expect(screen.getByRole("tooltip")).toHaveTextContent("Hello"); + }); + + it(`can hide tooltip`, () => { + renderOfficialCollection({ tooltip: null }); + userEvent.hover(queryOfficialIcon()); + expect(screen.queryByLabelText("tooltip")).toBeNull(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.jsx b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.jsx new file mode 100644 index 000000000000..d1b83d5378e3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.jsx @@ -0,0 +1,67 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import CheckBox from "metabase/components/CheckBox"; +import { + SegmentedControl, + optionShape, +} from "metabase/components/SegmentedControl"; + +import { AUTHORITY_LEVELS } from "../constants"; +import { FormFieldRoot, Label } from "./FormCollectionAuthorityLevel.styled"; + +const propTypes = { + field: PropTypes.shape({ + value: PropTypes.any, + initialValue: PropTypes.any, + onChange: PropTypes.func.isRequired, + }).isRequired, + options: PropTypes.arrayOf(optionShape).isRequired, + values: PropTypes.shape({ + id: PropTypes.number, + authority_level: PropTypes.oneOf(["official"]), + update_collection_tree_authority_level: PropTypes.bool, + }), + onChangeField: PropTypes.func.isRequired, +}; + +export function FormCollectionAuthorityLevel({ + field, + options, + values, + onChangeField, +}) { + const isNewCollection = !values.id; + const selectedAuthorityLevel = + AUTHORITY_LEVELS[field.value] || AUTHORITY_LEVELS.regular; + const shouldSuggestToUpdateChildren = + !isNewCollection && field.initialValue !== field.value; + return ( + + + {shouldSuggestToUpdateChildren && ( + {t`Make all sub-collections ${selectedAuthorityLevel.name}, too.`} + } + checked={values.update_collection_tree_authority_level} + onChange={e => + onChangeField( + "update_collection_tree_authority_level", + e.target.checked, + ) + } + /> + )} + + ); +} + +FormCollectionAuthorityLevel.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.styled.jsx b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.styled.jsx new file mode 100644 index 000000000000..9af08dd82608 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.styled.jsx @@ -0,0 +1,16 @@ +import styled from "styled-components"; +import CheckBox from "metabase/components/CheckBox"; +import { color } from "metabase/lib/colors"; + +export const FormFieldRoot = styled.div` + display: flex; + align-items: center; + justify-content: space-between; +`; + +export const Label = styled(CheckBox.Label)` + color: ${color("text-dark")}; + font-size: 1em; + font-weight: bold; + margin-bottom: 1px; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/constants.js b/enterprise/frontend/src/metabase-enterprise/collections/constants.js new file mode 100644 index 000000000000..d511493f3848 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/constants.js @@ -0,0 +1,23 @@ +import { t } from "ttag"; + +export const REGULAR_COLLECTION = { + type: null, + name: t`Regular`, + icon: "folder", +}; + +export const OFFICIAL_COLLECTION = { + type: "official", + name: t`Official`, + icon: "badge", + color: "saturated-yellow", + tooltips: { + default: t`Official collection`, + belonging: t`Belongs to an Official collection`, + }, +}; + +export const AUTHORITY_LEVELS = { + [OFFICIAL_COLLECTION.type]: OFFICIAL_COLLECTION, + [REGULAR_COLLECTION.type]: REGULAR_COLLECTION, +}; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/index.js b/enterprise/frontend/src/metabase-enterprise/collections/index.js new file mode 100644 index 000000000000..4d9a2a0a166c --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/index.js @@ -0,0 +1,50 @@ +import { t } from "ttag"; +import { + PLUGIN_FORM_WIDGETS, + PLUGIN_COLLECTIONS, + PLUGIN_COLLECTION_COMPONENTS, +} from "metabase/plugins"; +import { FormCollectionAuthorityLevel } from "./components/FormCollectionAuthorityLevel"; +import { CollectionAuthorityLevelIcon } from "./components/CollectionAuthorityLevelIcon"; +import { + AUTHORITY_LEVELS, + REGULAR_COLLECTION, + OFFICIAL_COLLECTION, +} from "./constants"; +import { isRegularCollection } from "./utils"; + +PLUGIN_COLLECTIONS.isRegularCollection = isRegularCollection; + +PLUGIN_COLLECTIONS.REGULAR_COLLECTION = REGULAR_COLLECTION; + +PLUGIN_COLLECTIONS.AUTHORITY_LEVEL = AUTHORITY_LEVELS; + +PLUGIN_COLLECTIONS.authorityLevelFormFields = [ + { + name: "authority_level", + title: t`Collection type`, + info: t`The contents of Official collections will get a badge by their name and will be more likely to show up in search results.`, + type: "collectionAuthorityLevel", + options: [ + { + name: REGULAR_COLLECTION.name, + value: REGULAR_COLLECTION.type, + icon: REGULAR_COLLECTION.icon, + }, + { + name: OFFICIAL_COLLECTION.name, + value: OFFICIAL_COLLECTION.type, + icon: OFFICIAL_COLLECTION.icon, + selectedColor: OFFICIAL_COLLECTION.color, + }, + ], + }, + { + name: "update_collection_tree_authority_level", + type: "hidden", + }, +]; + +PLUGIN_FORM_WIDGETS.collectionAuthorityLevel = FormCollectionAuthorityLevel; + +PLUGIN_COLLECTION_COMPONENTS.CollectionAuthorityLevelIcon = CollectionAuthorityLevelIcon; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/utils.js b/enterprise/frontend/src/metabase-enterprise/collections/utils.js new file mode 100644 index 000000000000..2b880fa8eabb --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/utils.js @@ -0,0 +1,6 @@ +import { REGULAR_COLLECTION } from "./constants"; + +export function isRegularCollection({ authority_level }) { + // Root, personal collections don't have `authority_level` + return !authority_level || authority_level === REGULAR_COLLECTION.type; +} diff --git a/enterprise/frontend/src/metabase-enterprise/collections/utils.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/collections/utils.unit.spec.js new file mode 100644 index 000000000000..4b9b5732836e --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/utils.unit.spec.js @@ -0,0 +1,33 @@ +import { isRegularCollection } from "./utils"; + +describe("Collections plugin utils", () => { + const COLLECTION = { + NO_AUTHORITY_LEVEL: { + id: "root", + name: "Our analytics", + }, + REGULAR: { + authority_level: null, + }, + OFFICIAL: { + authority_level: "official", + }, + }; + + describe("isRegularCollection", () => { + it("returns 'true' if collection is missing an authority level", () => { + const collection = COLLECTION.NO_AUTHORITY_LEVEL; + expect(isRegularCollection(collection)).toBe(true); + }); + + it("returns 'true' for regular collections", () => { + const collection = COLLECTION.REGULAR; + expect(isRegularCollection(collection)).toBe(true); + }); + + it("returns 'false' for official collections", () => { + const collection = COLLECTION.OFFICIAL; + expect(isRegularCollection(collection)).toBe(false); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/actions.js b/enterprise/frontend/src/metabase-enterprise/moderation/actions.js new file mode 100644 index 000000000000..3d51a9b36bb3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/actions.js @@ -0,0 +1,31 @@ +import { createThunkAction } from "metabase/lib/redux"; +import { verifyItem, removeReview } from "./service"; +import { softReloadCard } from "metabase/query_builder/actions"; + +export const VERIFY_CARD = "metabase-enterprise/moderation/VERIFY_CARD"; +export const verifyCard = createThunkAction( + VERIFY_CARD, + (cardId, text) => async (dispatch, getState) => { + await verifyItem({ + itemId: cardId, + itemType: "card", + text, + }); + + return dispatch(softReloadCard()); + }, +); + +export const REMOVE_CARD_REVIEW = + "metabase-enterprise/moderation/REMOVE_CARD_REVIEW"; +export const removeCardReview = createThunkAction( + REMOVE_CARD_REVIEW, + cardId => async (dispatch, getState) => { + await removeReview({ + itemId: cardId, + itemType: "card", + }); + + return dispatch(softReloadCard()); + }, +); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.jsx new file mode 100644 index 000000000000..ee9502b85309 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.jsx @@ -0,0 +1,30 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import { isItemVerified } from "metabase-enterprise/moderation/service"; + +import { Container, VerifyButton } from "./ModerationActions.styled"; + +export default ModerationActions; + +ModerationActions.propTypes = { + className: PropTypes.string, + onVerify: PropTypes.func, + moderationReview: PropTypes.object, +}; + +function ModerationActions({ moderationReview, className, onVerify }) { + const isVerified = isItemVerified(moderationReview); + const hasActions = !!onVerify; + + return hasActions ? ( + + {!isVerified && ( + + {t`Verify this question`} + + )} + + ) : null; +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.styled.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.styled.jsx new file mode 100644 index 000000000000..dd17fc80d11b --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.styled.jsx @@ -0,0 +1,38 @@ +import styled from "styled-components"; + +import { color } from "metabase/lib/colors"; +import { + MODERATION_STATUS, + getStatusIcon, +} from "metabase-enterprise/moderation/service"; + +const { name: verifiedIconName, color: verifiedIconColor } = getStatusIcon( + MODERATION_STATUS.verified, +); + +import Button from "metabase/components/Button"; + +export const Container = styled.div` + display: flex; + align-items: center; + justify-content: space-between; +`; + +export const Label = styled.h5` + font-size: 14px; + color: ${color("text-medium")}; + flex: 1; +`; + +export const VerifyButton = styled(Button).attrs({ + icon: verifiedIconName, + iconSize: 20, +})` + border: none; + color: ${color(verifiedIconColor)}; + padding: 8px; + + &:disabled { + color: ${color("text-medium")}; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.unit.spec.js new file mode 100644 index 000000000000..2110349e7834 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.unit.spec.js @@ -0,0 +1,28 @@ +import React from "react"; +import ModerationActions from "./ModerationActions"; +import { render, screen } from "@testing-library/react"; + +describe("ModerationActions", () => { + describe("when the user is not a moderator", () => { + it("should not render", () => { + const { queryByTestId } = render( + , + ); + expect(queryByTestId("moderation-verify-action")).toBeNull(); + expect(screen.queryByText("Moderation")).toBeNull(); + }); + }); + + describe("when a moderator clicks on the verify button", () => { + it("should call the onVerify prop", () => { + const onVerify = jest.fn(); + const { getByTestId } = render( + , + ); + + getByTestId("moderation-verify-action").click(); + + expect(onVerify).toHaveBeenCalled(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.jsx new file mode 100644 index 000000000000..1cfefdf6e491 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.jsx @@ -0,0 +1,100 @@ +import React from "react"; +import PropTypes from "prop-types"; +import _ from "underscore"; +import { connect } from "react-redux"; + +import { color, alpha } from "metabase/lib/colors"; +import { getUser } from "metabase/selectors/user"; +import { getRelativeTimeAbbreviated } from "metabase/lib/time"; +import { + getTextForReviewBanner, + getIconForReview, +} from "metabase-enterprise/moderation/service"; +import User from "metabase/entities/users"; + +import { + Container, + Text, + Time, + IconButton, + StatusIcon, +} from "./ModerationReviewBanner.styled"; +import Tooltip from "metabase/components/Tooltip"; + +const ICON_BUTTON_SIZE = 20; +const TOOLTIP_X_OFFSET = ICON_BUTTON_SIZE / 4; + +const mapStateToProps = (state, props) => ({ + currentUser: getUser(state), +}); + +export default _.compose( + User.load({ + id: (state, props) => props.moderationReview.moderator_id, + loadingAndErrorWrapper: false, + }), + connect(mapStateToProps), +)(ModerationReviewBanner); + +ModerationReviewBanner.propTypes = { + moderationReview: PropTypes.object.isRequired, + user: PropTypes.object, + currentUser: PropTypes.object.isRequired, + onRemove: PropTypes.func, +}; + +export function ModerationReviewBanner({ + moderationReview, + user: moderator, + currentUser, + onRemove, +}) { + const [isHovering, setIsHovering] = React.useState(false); + const [isActive, setIsActive] = React.useState(false); + + const { bannerText, tooltipText } = getTextForReviewBanner( + moderationReview, + moderator, + currentUser, + ); + const relativeCreationTime = getRelativeTimeAbbreviated( + moderationReview.created_at, + ); + const { name: iconName, color: iconColor } = getIconForReview( + moderationReview, + ); + const showClose = isHovering || isActive; + + return ( + setIsHovering(true)} + onMouseLeave={() => setIsHovering(false)} + > + + {onRemove ? ( + setIsActive(true)} + onBlur={() => setIsActive(false)} + icon={showClose ? "close" : iconName} + color={color(showClose ? "text-medium" : iconColor)} + onClick={onRemove} + iconSize={ICON_BUTTON_SIZE} + /> + ) : ( + + )} + + {bannerText} + + + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.styled.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.styled.jsx new file mode 100644 index 000000000000..60439334ad7a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.styled.jsx @@ -0,0 +1,40 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Button from "metabase/components/Button"; +import Icon from "metabase/components/Icon"; + +export const Container = styled.div` + padding: 1rem 1rem 1rem 0.5rem; + background-color: ${props => props.backgroundColor}; + display: flex; + justify-content: space-between; + align-items: center; + column-gap: 0.5rem; + border-radius: 8px; +`; + +export const Text = styled.span` + flex: 1; + font-size: 14px; + font-weight: 700; +`; + +export const Time = styled.time` + color: ${color("text-medium")}; + font-size: 12px; +`; + +export const IconButton = styled(Button)` + padding: 0 0 0 0.5rem !important; + border: none; + background-color: transparent; + + &:hover { + background-color: transparent; + color: ${color("danger")}; + } +`; + +export const StatusIcon = styled(Icon)` + padding: 0 0.5rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.unit.spec.js new file mode 100644 index 000000000000..3a53401a3c16 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.unit.spec.js @@ -0,0 +1,104 @@ +import React from "react"; +import { ModerationReviewBanner } from "./ModerationReviewBanner"; +import { render, fireEvent } from "@testing-library/react"; + +const VERIFIED_ICON_SELECTOR = ".Icon-verified"; +const CLOSE_ICON_SELECTOR = ".Icon-close"; + +const moderationReview = { + status: "verified", + moderator_id: 1, + created_at: Date.now(), +}; +const moderator = { id: 1, common_name: "Foo" }; +const currentUser = { id: 2, common_name: "Bar" }; + +describe("ModerationReviewBanner", () => { + it("should show text concerning the given review", () => { + const { getByText } = render( + , + ); + expect(getByText("Foo verified this")).toBeTruthy(); + }); + + describe("when not provided an onRemove prop", () => { + let getByRole; + let container; + beforeEach(() => { + const wrapper = render( + , + ); + + getByRole = wrapper.getByRole; + container = wrapper.container; + }); + + it("should render a status icon, not a button", () => { + expect(() => getByRole("button")).toThrow(); + }); + + it("should render with the icon relevant to the review's status", () => { + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + }); + + describe("when provided an onRemove callback prop", () => { + let onRemove; + let container; + let getByRole; + beforeEach(() => { + onRemove = jest.fn(); + const wrapper = render( + , + ); + + container = wrapper.container; + getByRole = wrapper.getByRole; + }); + + it("should render a button", () => { + expect(getByRole("button")).toBeTruthy(); + }); + + it("should render the button with the icon relevant to the review's status", () => { + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should render the button as a close icon when the user is hovering their mouse over the banner", () => { + const banner = container.firstChild; + fireEvent.mouseEnter(banner); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + fireEvent.mouseLeave(banner); + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should render the button as a close icon when the user focuses the button", () => { + fireEvent.focus(getByRole("button")); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + fireEvent.blur(getByRole("button")); + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should render the button as a close icon when focused, even when the mouse leaves the banner", () => { + const banner = container.firstChild; + fireEvent.mouseEnter(banner); + fireEvent.focus(getByRole("button")); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + fireEvent.mouseLeave(banner); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.jsx new file mode 100644 index 000000000000..6c9cfff6796f --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.jsx @@ -0,0 +1,20 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import { color } from "metabase/lib/colors"; +import { getStatusIcon } from "metabase-enterprise/moderation/service"; + +import Icon from "metabase/components/Icon"; + +ModerationStatusIcon.propTypes = { + status: PropTypes.string, +}; + +function ModerationStatusIcon({ status, ...iconProps }) { + const { name: iconName, color: iconColor } = getStatusIcon(status); + return iconName ? ( + + ) : null; +} + +export default ModerationStatusIcon; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.unit.spec.js new file mode 100644 index 000000000000..e0616400c2dd --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.unit.spec.js @@ -0,0 +1,25 @@ +import React from "react"; +import ModerationStatusIcon from "./ModerationStatusIcon"; +import { render } from "@testing-library/react"; + +const VERIFIED_ICON_SELECTOR = ".Icon-verified"; + +describe("ModerationReviewBanner", () => { + it("should show an icon when given a real moderation status", () => { + render(); + + expect(document.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should not show an icon when given an undefined status", () => { + render(); + + expect(document.querySelector(VERIFIED_ICON_SELECTOR)).toBeNull(); + }); + + it("should not show an icon when given a status that does not match any existing moderation status", () => { + render(); + + expect(document.querySelector(VERIFIED_ICON_SELECTOR)).toBeNull(); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.jsx new file mode 100644 index 000000000000..aafe91711f88 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.jsx @@ -0,0 +1,69 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { connect } from "react-redux"; + +import { getLatestModerationReview } from "metabase-enterprise/moderation/service"; +import { getIsModerator } from "metabase-enterprise/moderation/selectors"; +import { + verifyCard, + removeCardReview, +} from "metabase-enterprise/moderation/actions"; + +import { BorderedModerationActions } from "./QuestionModerationSection.styled"; +import ModerationReviewBanner from "../ModerationReviewBanner/ModerationReviewBanner"; + +const mapStateToProps = (state, props) => ({ + isModerator: getIsModerator(state, props), +}); +const mapDispatchToProps = { + verifyCard, + removeCardReview, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(QuestionModerationSection); + +QuestionModerationSection.propTypes = { + question: PropTypes.object.isRequired, + verifyCard: PropTypes.func.isRequired, + removeCardReview: PropTypes.func.isRequired, + isModerator: PropTypes.bool.isRequired, +}; + +function QuestionModerationSection({ + question, + verifyCard, + removeCardReview, + isModerator, +}) { + const latestModerationReview = getLatestModerationReview( + question.getModerationReviews(), + ); + + const onVerify = () => { + const id = question.id(); + verifyCard(id); + }; + + const onRemoveModerationReview = () => { + const id = question.id(); + removeCardReview(id); + }; + + return ( + + + {latestModerationReview && ( + + )} + + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.styled.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.styled.jsx new file mode 100644 index 000000000000..373a7c285356 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.styled.jsx @@ -0,0 +1,8 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import ModerationActions from "../ModerationActions/ModerationActions"; + +export const BorderedModerationActions = styled(ModerationActions)` + border-top: 1px solid ${color("border")}; + padding-top: 1rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/constants.js b/enterprise/frontend/src/metabase-enterprise/moderation/constants.js new file mode 100644 index 000000000000..3e4417c28b91 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/constants.js @@ -0,0 +1,14 @@ +export const MODERATION_STATUS = { + verified: "verified", +}; + +export const MODERATION_STATUS_ICONS = { + verified: { + name: "verified", + color: "brand", + }, + null: { + name: "close", + color: "text-light", + }, +}; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/index.js b/enterprise/frontend/src/metabase-enterprise/moderation/index.js new file mode 100644 index 000000000000..f471a7f5bcc1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/index.js @@ -0,0 +1,17 @@ +import { PLUGIN_MODERATION } from "metabase/plugins"; +import QuestionModerationSection from "./components/QuestionModerationSection/QuestionModerationSection"; +import ModerationStatusIcon from "./components/ModerationStatusIcon/ModerationStatusIcon"; + +import { + getStatusIconForQuestion, + getStatusIcon, + getModerationTimelineEvents, +} from "./service"; + +Object.assign(PLUGIN_MODERATION, { + QuestionModerationSection, + ModerationStatusIcon, + getStatusIconForQuestion, + getStatusIcon, + getModerationTimelineEvents, +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/selectors.js b/enterprise/frontend/src/metabase-enterprise/moderation/selectors.js new file mode 100644 index 000000000000..c0b8f5778727 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/selectors.js @@ -0,0 +1,5 @@ +import { getUserIsAdmin } from "metabase/selectors/user"; + +export const getIsModerator = (state, props) => { + return getUserIsAdmin(state, props); +}; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/service.js b/enterprise/frontend/src/metabase-enterprise/moderation/service.js new file mode 100644 index 000000000000..2d6639f5b2ac --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/service.js @@ -0,0 +1,128 @@ +import { t } from "ttag"; +import _ from "underscore"; + +import { ModerationReviewApi } from "metabase/services"; +import { MODERATION_STATUS_ICONS } from "./constants"; + +export { MODERATION_STATUS } from "./constants"; + +export function verifyItem({ text, itemId, itemType }) { + return ModerationReviewApi.create({ + status: "verified", + moderated_item_id: itemId, + moderated_item_type: itemType, + text, + }); +} + +export function removeReview({ itemId, itemType }) { + return ModerationReviewApi.create({ + status: null, + moderated_item_id: itemId, + moderated_item_type: itemType, + }); +} + +const noIcon = {}; +export function getStatusIcon(status) { + if (isRemovedReviewStatus(status)) { + return noIcon; + } + + return MODERATION_STATUS_ICONS[status] || noIcon; +} + +export function getIconForReview(review, options) { + return getStatusIcon(review?.status, options); +} + +// we only want the icon that represents the removal of a review in special cases, +// so you must ask for the icon explicitly +export function getRemovedReviewStatusIcon() { + return MODERATION_STATUS_ICONS[null]; +} + +export function getLatestModerationReview(reviews) { + const maybeReview = _.findWhere(reviews, { + most_recent: true, + }); + + // since we can't delete reviews, consider a most recent review with a status of null to mean there is no review + return isRemovedReviewStatus(maybeReview?.status) ? undefined : maybeReview; +} + +export function getStatusIconForQuestion(question) { + const reviews = question.getModerationReviews(); + const review = getLatestModerationReview(reviews); + return getIconForReview(review); +} + +export function getTextForReviewBanner( + moderationReview, + moderator, + currentUser, +) { + const moderatorName = getModeratorDisplayName(moderator, currentUser); + const { status } = moderationReview; + + if (status === "verified") { + const bannerText = t`${moderatorName} verified this`; + const tooltipText = t`Remove verification`; + return { bannerText, tooltipText }; + } + + return {}; +} + +function getModeratorDisplayName(user, currentUser) { + const { id: userId, common_name } = user || {}; + const { id: currentUserId } = currentUser || {}; + + if (currentUserId != null && userId === currentUserId) { + return t`You`; + } else if (userId != null) { + return common_name; + } else { + return t`A moderator`; + } +} + +// a `status` of `null` represents the removal of a review, since we can't delete reviews +export function isRemovedReviewStatus(status) { + return String(status) === "null"; +} + +export function isItemVerified(review) { + return review != null && review.status === "verified"; +} + +function getModerationReviewEventText(review, moderatorDisplayName) { + switch (review.status) { + case "verified": + return t`${moderatorDisplayName} verified this`; + case null: + return t`${moderatorDisplayName} removed verification`; + default: + return t`${moderatorDisplayName} changed status to ${review.status}`; + } +} + +export function getModerationTimelineEvents(reviews, usersById, currentUser) { + return reviews.map((review, index) => { + const moderator = usersById[review.moderator_id]; + const moderatorDisplayName = getModeratorDisplayName( + moderator, + currentUser, + ); + const text = getModerationReviewEventText(review, moderatorDisplayName); + const icon = isRemovedReviewStatus(review.status) + ? getRemovedReviewStatusIcon() + : getIconForReview(review); + + return { + timestamp: new Date(review.created_at).valueOf(), + icon, + title: text, + }; + }); +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/service.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/service.unit.spec.js new file mode 100644 index 000000000000..5016f46befee --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/service.unit.spec.js @@ -0,0 +1,275 @@ +import { + verifyItem, + removeReview, + getIconForReview, + getTextForReviewBanner, + isItemVerified, + getLatestModerationReview, + getStatusIconForQuestion, + getModerationTimelineEvents, + getStatusIcon, + getRemovedReviewStatusIcon, +} from "./service"; + +jest.mock("metabase/services", () => ({ + ModerationReviewApi: { + create: jest.fn(() => Promise.resolve({ id: 123 })), + }, +})); + +import { ModerationReviewApi } from "metabase/services"; + +describe("moderation/service", () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe("verifyItem", () => { + it("should create a new moderation review", async () => { + const review = await verifyItem({ + itemId: 123, + itemType: "card", + text: "bar", + }); + + expect(ModerationReviewApi.create).toHaveBeenCalledWith({ + status: "verified", + moderated_item_id: 123, + moderated_item_type: "card", + text: "bar", + }); + + expect(review).toEqual({ id: 123 }); + }); + }); + + describe("removeReview", () => { + it("should create a new moderation review with a null status", async () => { + const review = await removeReview({ + itemId: 123, + itemType: "card", + }); + + expect(ModerationReviewApi.create).toHaveBeenCalledWith({ + status: null, + moderated_item_id: 123, + moderated_item_type: "card", + }); + + expect(review).toEqual({ id: 123 }); + }); + }); + + describe("getStatusIcon", () => { + it("should return an empty icon if there is no matching status", () => { + expect(getStatusIcon("foo")).toEqual({}); + }); + + it("should return an icon if there is a matching status", () => { + expect(getStatusIcon("verified")).toEqual({ + name: "verified", + color: "brand", + }); + }); + + it("should not return an icon for a status of null, which represents the removal of a review and is a special case", () => { + const removedReviewStatus = null; + const accidentallyStringCoercedRemvovedReviewStatus = "null"; + expect(getStatusIcon(removedReviewStatus)).toEqual({}); + expect( + getStatusIcon(accidentallyStringCoercedRemvovedReviewStatus), + ).toEqual({}); + }); + }); + + describe("getRemovedReviewStatusIcon", () => { + it("should return an icon for a removed review", () => { + expect(getRemovedReviewStatusIcon()).toEqual({ + name: "close", + color: "text-light", + }); + }); + }); + + describe("getIconForReview", () => { + it("should return icon name/color for given review", () => { + expect(getIconForReview({ status: "verified" })).toEqual( + getStatusIcon("verified"), + ); + }); + }); + + describe("getTextForReviewBanner", () => { + it("should return text for a verified review", () => { + expect(getTextForReviewBanner({ status: "verified" })).toEqual({ + bannerText: "A moderator verified this", + tooltipText: "Remove verification", + }); + }); + + it("should include the moderator name", () => { + expect( + getTextForReviewBanner( + { status: "verified" }, + { + common_name: "Foo", + id: 1, + }, + { id: 2 }, + ), + ).toEqual({ + bannerText: "Foo verified this", + tooltipText: "Remove verification", + }); + }); + + it("should handle the moderator being the current user", () => { + expect( + getTextForReviewBanner( + { status: "verified" }, + { + common_name: "Foo", + id: 1, + }, + { id: 1 }, + ), + ).toEqual({ + bannerText: "You verified this", + tooltipText: "Remove verification", + }); + }); + }); + + describe("isItemVerified", () => { + it("should return true for a verified review", () => { + expect(isItemVerified({ status: "verified" })).toBe(true); + }); + + it("should return false for a null review", () => { + expect(isItemVerified({ status: null })).toBe(false); + }); + + it("should return false for no review", () => { + expect(isItemVerified()).toBe(false); + }); + }); + + describe("getLatestModerationReview", () => { + it("should return the review flagged as most recent", () => { + const reviews = [ + { id: 1, status: "verified" }, + { id: 2, status: "verified", most_recent: true }, + { id: 3, status: null }, + ]; + + expect(getLatestModerationReview(reviews)).toEqual({ + id: 2, + status: "verified", + most_recent: true, + }); + }); + + it("should return undefined when there is no review flagged as most recent", () => { + const reviews = [ + { id: 1, status: "verified" }, + { id: 2, status: "verified" }, + { id: 3, status: null }, + ]; + + expect(getLatestModerationReview(reviews)).toEqual(undefined); + expect(getLatestModerationReview([])).toEqual(undefined); + }); + + it("should return undefined when there is a review with a status of null flagged as most recent", () => { + const reviews = [ + { id: 1, status: "verified" }, + { id: 2, status: "verified" }, + { id: 3, status: null, most_recent: true }, + ]; + + expect(getLatestModerationReview(reviews)).toEqual(undefined); + }); + }); + + describe("getStatusIconForQuestion", () => { + it('should return the status icon for the most recent "real" review', () => { + const questionWithReviews = { + getModerationReviews: () => [ + { id: 1, status: "verified" }, + { id: 2, status: "verified", most_recent: true }, + { id: 3, status: null }, + ], + }; + + expect(getStatusIconForQuestion(questionWithReviews)).toEqual( + getStatusIcon("verified"), + ); + }); + + it("should return undefined vals for no review", () => { + const questionWithNoMostRecentReview = { + getModerationReviews: () => [ + { id: 1, status: "verified" }, + { id: 2, status: "verified" }, + { id: 3, status: null, most_recent: true }, + ], + }; + + const questionWithNoReviews = { + getModerationReviews: () => [], + }; + + const questionWithUndefinedReviews = { + getModerationReviews: () => undefined, + }; + + const noIcon = { name: undefined, color: undefined }; + + expect(getStatusIconForQuestion(questionWithNoMostRecentReview)).toEqual( + noIcon, + ); + expect(getStatusIconForQuestion(questionWithNoReviews)).toEqual(noIcon); + expect(getStatusIconForQuestion(questionWithUndefinedReviews)).toEqual( + noIcon, + ); + }); + }); + + describe("getModerationTimelineEvents", () => { + it("should return the moderation timeline events", () => { + const reviews = [ + { + id: 1, + status: "verified", + created_at: "2018-01-01T00:00:00.000Z", + moderator_id: 1, + }, + { + id: 2, + status: null, + created_at: "2018-01-02T00:00:00.000Z", + moderator_id: 123, + }, + ]; + const usersById = { + 1: { + id: 1, + common_name: "Foo", + }, + }; + + expect(getModerationTimelineEvents(reviews, usersById)).toEqual([ + { + timestamp: expect.any(Number), + icon: getStatusIcon("verified"), + title: "Foo verified this", + }, + { + timestamp: expect.any(Number), + icon: getRemovedReviewStatusIcon(), + title: "A moderator removed verification", + }, + ]); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/plugins.js b/enterprise/frontend/src/metabase-enterprise/plugins.js index 8feaedee4ea1..ab5d012de734 100644 --- a/enterprise/frontend/src/metabase-enterprise/plugins.js +++ b/enterprise/frontend/src/metabase-enterprise/plugins.js @@ -5,15 +5,20 @@ import MetabaseSettings from "metabase/lib/settings"; // NOTE: temporarily use "latest" for Enterprise Edition docs MetabaseSettings.docsTag = () => "latest"; MetabaseSettings.isEnterprise = () => true; -// PLUGINS: -// import "./management"; +// PLUGINS: -import "./audit_app"; +import "./tools"; import "./sandboxes"; import "./auth"; +import "./caching"; +import "./collections"; import "./whitelabel"; import "./embedding"; import "./store"; import "./snippets"; import "./sharing"; +import "./moderation"; +import "./advanced_config"; +import "./advanced_permissions"; +import "./audit_app"; diff --git a/enterprise/frontend/src/metabase-enterprise/sandboxes/actions.js b/enterprise/frontend/src/metabase-enterprise/sandboxes/actions.js new file mode 100644 index 000000000000..6e31d23599c8 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/sandboxes/actions.js @@ -0,0 +1,19 @@ +import { updateDataPermission } from "metabase/admin/permissions/permissions"; +import { createThunkAction } from "metabase/lib/redux"; + +export const UPDATE_TABLE_SANDBOXING_PERMISSION = + "metabase-enterprise/sandboxes/UPDATE_TABLE_SANDBOXING_PERMISSION"; +export const updateTableSandboxingPermission = createThunkAction( + UPDATE_TABLE_SANDBOXING_PERMISSION, + params => async dispatch => { + const { groupId, ...entityId } = params; + return dispatch( + updateDataPermission({ + groupId, + permission: { name: "access" }, + value: "controlled", + entityId, + }), + ); + }, +); diff --git a/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx b/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx index c7588543fc85..6c7e994aa2ea 100644 --- a/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx +++ b/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx @@ -1,6 +1,7 @@ /* eslint-disable react/prop-types */ import React from "react"; - +import _ from "underscore"; +import { jt, t } from "ttag"; import { withRouter } from "react-router"; import { connect } from "react-redux"; import { push } from "react-router-redux"; @@ -25,12 +26,13 @@ import QuestionLoader from "metabase/containers/QuestionLoader"; import Dimension from "metabase-lib/lib/Dimension"; -import _ from "underscore"; -import { jt, t } from "ttag"; +import { getParentPath } from "metabase/hoc/ModalRoute"; +import { updateTableSandboxingPermission } from "../actions"; const mapStateToProps = () => ({}); const mapDispatchToProps = { push, + updateTableSandboxingPermission, }; type GTAP = { @@ -91,15 +93,8 @@ export default class GTAPModal extends React.Component { } close = () => { - const { - push, - params: { databaseId, schemaName }, - } = this.props; - push( - `/admin/permissions/databases/${databaseId}` + - (schemaName ? `/schemas/${encodeURIComponent(schemaName)}` : ``) + - `/tables`, - ); + const { push, route, location } = this.props; + return push(getParentPath(route, location)); }; _getCanonicalGTAP() { @@ -128,6 +123,8 @@ export default class GTAPModal extends React.Component { } else { await GTAPApi.create(gtap); } + this.props.updateTableSandboxingPermission(this.props.params); + this.close(); } catch (error) { console.error("Error saving GTAP", error); const message = error @@ -138,7 +135,6 @@ export default class GTAPModal extends React.Component { this.setState({ error: message }); throw new Error(message); } - this.close(); }; isValid() { @@ -483,7 +479,7 @@ const AttributeMappingEditor = ({ - + } diff --git a/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js b/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js index cd73c894d2e2..cd998bebc355 100644 --- a/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js +++ b/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js @@ -1,6 +1,7 @@ import { PLUGIN_ADMIN_USER_FORM_FIELDS, PLUGIN_ADMIN_PERMISSIONS_TABLE_ROUTES, + PLUGIN_ADMIN_PERMISSIONS_TABLE_GROUP_ROUTES, PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS, PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS, PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_POST_ACTION, @@ -12,43 +13,42 @@ import { push } from "react-router-redux"; import { t } from "ttag"; import { hasPremiumFeature } from "metabase-enterprise/settings"; -import { color, alpha } from "metabase/lib/colors"; - +import { + getDatabaseFocusPermissionsUrl, + getGroupFocusPermissionsUrl, +} from "metabase/admin/permissions/utils/urls"; import { ModalRoute } from "metabase/hoc/ModalRoute"; + import LoginAttributesWidget from "./components/LoginAttributesWidget"; import GTAPModal from "./components/GTAPModal"; -const OPTION_BLUE = { - iconColor: color("brand"), - bgColor: alpha(color("brand"), 0.15), -}; - const OPTION_SEGMENTED = { - ...OPTION_BLUE, + label: t`Sandboxed`, value: "controlled", - title: t`Grant sandboxed access`, - tooltip: t`Sandboxed access`, icon: "permissions_limited", + iconColor: "brand", +}; + +const getDatabaseViewSandboxModalUrl = (entityId, groupId) => { + const baseUrl = getDatabaseFocusPermissionsUrl(entityId, groupId); + return `${baseUrl}/segmented/group/${groupId}`; }; -const getEditSegementedAccessUrl = ( - groupId, - { databaseId, schemaName, tableId }, -) => - `/admin/permissions` + - `/databases/${databaseId}` + - (schemaName ? `/schemas/${encodeURIComponent(schemaName)}` : "") + - `/tables/${tableId}/segmented/group/${groupId}`; +const getGroupViewSandboxModalUrl = (entityId, groupId) => { + const baseUrl = getGroupFocusPermissionsUrl(groupId, { + ...entityId, + tableId: null, + }); + return `${baseUrl}/${entityId.tableId}/segmented`; +}; -const getEditSegementedAccessAction = (groupId, entityId) => ({ - ...OPTION_BLUE, - title: t`Edit sandboxed access`, - icon: "pencil", - value: push(getEditSegementedAccessUrl(groupId, entityId)), -}); +const getEditSegementedAccessUrl = (entityId, groupId, view) => + view === "database" + ? getDatabaseViewSandboxModalUrl(entityId, groupId) + : getGroupViewSandboxModalUrl(entityId, groupId); -const getEditSegmentedAcessPostAction = (groupId, entityId) => - push(getEditSegementedAccessUrl(groupId, entityId)); +const getEditSegmentedAcessPostAction = (entityId, groupId, view) => + push(getEditSegementedAccessUrl(entityId, groupId, view)); if (hasPremiumFeature("sandboxes")) { PLUGIN_ADMIN_USER_FORM_FIELDS.push({ @@ -57,12 +57,27 @@ if (hasPremiumFeature("sandboxes")) { type: LoginAttributesWidget, }); PLUGIN_ADMIN_PERMISSIONS_TABLE_ROUTES.push( - , + , ); - PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS.push(OPTION_SEGMENTED); - PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS["controlled"].push( - getEditSegementedAccessAction, + PLUGIN_ADMIN_PERMISSIONS_TABLE_GROUP_ROUTES.push( + , ); + PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS.push(OPTION_SEGMENTED); + PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS["controlled"].push({ + label: t`Edit sandboxed access`, + iconColor: "brand", + icon: "pencil", + actionCreator: (entityId, groupId, view) => + push(getEditSegementedAccessUrl(entityId, groupId, view)), + }); PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_POST_ACTION[ "controlled" ] = getEditSegmentedAcessPostAction; diff --git a/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx b/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx index bb37b3fc8ffb..ceb9e8b40b90 100644 --- a/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx +++ b/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx @@ -10,7 +10,7 @@ import { t } from "ttag"; import CollapseSection from "metabase/components/CollapseSection"; import ParametersList from "metabase/parameters/components/ParametersList"; -import { collateParametersWithValues } from "metabase/meta/Parameter"; +import { getValuePopulatedParameters } from "metabase/meta/Parameter"; import { getPulseParameters, getActivePulseParameters, @@ -31,7 +31,7 @@ function MutableParametersSection({ return map; }, {}); - const collatedParameters = collateParametersWithValues( + const valuePopulatedParameters = getValuePopulatedParameters( parameters, pulseParamValuesById, ); @@ -63,7 +63,7 @@ function MutableParametersSection({ className="align-stretch row-gap-1" vertical dashboard={dashboard} - parameters={collatedParameters} + parameters={valuePopulatedParameters} setParameterValue={setParameterValue} /> diff --git a/enterprise/frontend/src/metabase-enterprise/snippets/index.js b/enterprise/frontend/src/metabase-enterprise/snippets/index.js index 011b246ec211..ec09fca5febf 100644 --- a/enterprise/frontend/src/metabase-enterprise/snippets/index.js +++ b/enterprise/frontend/src/metabase-enterprise/snippets/index.js @@ -9,7 +9,7 @@ import { } from "metabase/plugins"; import MetabaseSettings from "metabase/lib/settings"; -import CollectionPermissionsModal from "metabase/admin/permissions/containers/CollectionPermissionsModal"; +import CollectionPermissionsModal from "metabase/admin/permissions/components/CollectionPermissionsModal/CollectionPermissionsModal"; import Modal from "metabase/components/Modal"; import CollectionRow from "./components/CollectionRow"; diff --git a/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx b/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx index 5136282a7b0d..b70fc33cd933 100644 --- a/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx +++ b/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx @@ -14,7 +14,7 @@ const StoreIconWrapper = ({ children, color }) => ( p={2} bg={color || colors["brand"]} color="white" - w={WRAPPER_SIZE} + width={WRAPPER_SIZE} style={{ borderRadius: 99, height: WRAPPER_SIZE }} > {children} diff --git a/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx b/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx index bdacf1690381..ebd9add4f70a 100644 --- a/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx +++ b/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx @@ -209,7 +209,7 @@ const AccountStatus = ({ flexDirection="column" className={className} p={[2, 4]} - w="100%" + width="100%" >

{title}

@@ -219,7 +219,7 @@ const AccountStatus = ({ {subtitle}
)} - + {featuresOrdered.map(([id, feature]) => ( ( ); const Feature = ({ feature, included, expired, preview }) => ( - + + diff --git a/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorDetail.jsx b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorDetail.jsx new file mode 100644 index 000000000000..e43f26d25324 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorDetail.jsx @@ -0,0 +1,184 @@ +import React from "react"; + +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { getMetadata } from "metabase/selectors/metadata"; + +import { t } from "ttag"; +import PropTypes from "prop-types"; +import { getIn } from "icepick"; + +import { formatColumn, formatValue } from "metabase/lib/formatting"; +import { CardApi } from "metabase/services"; +import Button from "metabase/components/Button"; +import Link from "metabase/components/Link"; +import Question from "metabase-lib/lib/Question"; +import { QuestionResultLoader } from "metabase/containers/QuestionResultLoader"; +import { columnNameToUrl } from "../../audit_app/lib/mode"; + +function idxToUrl(resRow, resCols, nameToResCol, colName) { + const idVal = resRow[nameToResCol[colName]]; + const urlVal = colName && idVal ? columnNameToUrl[colName](idVal) : ""; + const linkClass = urlVal === "" ? "" : "text-brand"; + return [urlVal, linkClass]; +} + +function ErrorDetailDisplay(props) { + const { result } = props; + const resRow = getIn(result, ["data", "rows", 0]); + const resCols = getIn(result, ["data", "cols"]); + if (resRow && resCols) { + const nameToResCol = resCols.reduce( + (obj, x, idx) => Object.assign(obj, { [x.name]: idx }), + {}, + ); + + const linkColumns = [ + null, + "collection_id", + "database_id", + null, + "table_id", + null, + "user_id", + null, + ]; + + const ordinaryRows = [ + "last_run_at", + "collection_name", + "database_name", + "schema_name", + "table_name", + "total_runs", + "user_name", + "updated_at", + ].map((x, idx) => { + const [urlVal, linkClass] = idxToUrl( + resRow, + resCols, + nameToResCol, + linkColumns[idx], + ); + const formattedVal = formatValue(resRow[nameToResCol[x]], { + column: resCols[nameToResCol[x]], + jsx: true, + rich: true, + type: "cell", + local: true, + }); + return ( +
+ + + + ); + }); + + const dashIdRows = resRow[nameToResCol.dash_name_str] + ?.split("|") + ?.map((x, idx) => ( + + + + + )); + + const [cardUrlVal, cardLinkClass] = idxToUrl( + resRow, + resCols, + nameToResCol, + "card_id", + ); + + return [ +

+ { + + {resRow[nameToResCol.card_name]} + + } +

, +
+ {resRow[nameToResCol.error_str]} +
, +
+ elem)} + onChange={e => this.handleAllSelectClick(e, rows)} + /> +
+ + this.handleRowSelectClick( + { ...e, originRow: rowIndex }, + row, + rowIndex, + ) + } + /> + onVisualizationClick(clicked) : null } > - {formatValue(value, { - ...columnSettings, - type: "cell", - jsx: true, - rich: true, - clicked: clicked, - // always show timestamps in local time for the audit app - local: true, - })} +
+ {formatValue(value, { + ...columnSettings, + type: "cell", + jsx: true, + rich: true, + clicked: clicked, + // always show timestamps in local time for the audit app + local: true, + })} +
+ this.handleRemoveRowClick(row, cols)} + > + + +
+ {formatColumn(resCols[nameToResCol[x]])} + + { + + {formattedVal} + + } +
+ {idx === 0 && formatColumn(resCols[nameToResCol.dash_name_str])} + + {formatValue(x, { column: resCols[nameToResCol.dash_name_str] })} +
+ {[ordinaryRows, dashIdRows]} +
, + ]; + } else { + return null; + } +} + +function ErrorDetail(props) { + const { params, errorRetry } = props; + const cardId = parseInt(params.cardId); + + // below card is not the card in question, but + // the card we're creating to query for the error details + const card = { + name: "Card Errors", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.query-detail/bad-card", + args: [cardId], + }, + }; + const question = new Question(card, null); + + return ( +
+ + + {({ rawSeries, result }) => } + +
+ ); +} + +const mapStateToProps = (state, props) => ({ + metadata: getMetadata(state), +}); + +const mapDispatchToProps = { + errorRetry: async cardId => { + await CardApi.query({ cardId: cardId }); + // we're imagining that we successfully reran, in which case we want to go back to overall table + return push("/admin/tools/errors/"); + }, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(ErrorDetail); + +ErrorDetail.propTypes = { + params: PropTypes.object, + errorRetry: PropTypes.func, +}; +ErrorDetailDisplay.propTypes = { + result: PropTypes.object, +}; diff --git a/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorOverview.jsx b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorOverview.jsx new file mode 100644 index 000000000000..eddf2d8108d4 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorOverview.jsx @@ -0,0 +1,114 @@ +import React, { useState, useRef } from "react"; +import { t } from "ttag"; + +import _ from "underscore"; + +import { CardApi } from "metabase/services"; + +import * as Queries from "../../audit_app/lib/cards/queries"; +import AuditTable from "../../audit_app/containers/AuditTable"; +import AuditParameters from "../../audit_app/components/AuditParameters"; +import { ErrorMode } from "../mode"; + +const getSortOrder = isAscending => (isAscending ? "asc" : "desc"); + +const CARD_ID_COL = 0; + +export default function ErrorOverview(props) { + const reloadRef = useRef(null); + // TODO: use isReloading to display a loading overlay + // eslint-disable-next-line no-unused-vars + const [isReloading, setIsReloading] = useState(false); + const [hasResults, setHasResults] = useState(false); + const [sorting, setSorting] = useState({ + column: "last_run_at", + isAscending: false, + }); + + const [rowChecked, setRowChecked] = useState({}); + + const handleAllSelectClick = e => { + const newRowChecked = { ...rowChecked }; + const noRowChecked = Object.values(rowChecked).every(v => !v); + for (const rowIndex of Array(e.rows.length).keys()) { + const cardIndex = e.rows[rowIndex][CARD_ID_COL]; + if (noRowChecked) { + newRowChecked[cardIndex] = true; + } else { + newRowChecked[cardIndex] = false; + } + } + setRowChecked(newRowChecked); + }; + + const handleRowSelectClick = e => { + const newRowChecked = { ...rowChecked }; + const cardIndex = e.row[CARD_ID_COL]; + newRowChecked[cardIndex] = !(rowChecked[cardIndex] || false); + setRowChecked(newRowChecked); + }; + + const handleReloadSelected = async () => { + const checkedCardIds = Object.keys(_.pick(rowChecked, _.identity)); + + await Promise.all( + checkedCardIds.map( + async member => await CardApi.query({ cardId: member }), + ), + ); + setRowChecked({}); + setIsReloading(true); + reloadRef.current?.(); + }; + + const handleSortingChange = sorting => setSorting(sorting); + + const handleLoad = result => { + setHasResults(result[0].row_count !== 0); + setIsReloading(false); + }; + + return ( + !isChecked), + onClick: handleReloadSelected, + }, + ]} + hasResults={hasResults} + > + {({ errorFilter, dbFilter, collectionFilter }) => ( + + )} + + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/tools/containers/ToolsApp.jsx b/enterprise/frontend/src/metabase-enterprise/tools/containers/ToolsApp.jsx new file mode 100644 index 000000000000..4048d5a3c6f2 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/containers/ToolsApp.jsx @@ -0,0 +1,20 @@ +import React, { Component } from "react"; +import PropTypes from "prop-types"; + +import AdminLayout from "metabase/components/AdminLayout"; + +export default class ToolsApp extends Component { + static propTypes = { + children: PropTypes.node, + }; + + render() { + const { children } = this.props; + return ( + +

Questions that errored when last run

+ {children} +
+ ); + } +} diff --git a/enterprise/frontend/src/metabase-enterprise/tools/index.js b/enterprise/frontend/src/metabase-enterprise/tools/index.js new file mode 100644 index 000000000000..4220bb437927 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/index.js @@ -0,0 +1,10 @@ +import { t } from "ttag"; + +import { PLUGIN_ADMIN_NAV_ITEMS, PLUGIN_ADMIN_ROUTES } from "metabase/plugins"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import getToolsRoutes from "./routes"; + +if (hasPremiumFeature("audit_app")) { + PLUGIN_ADMIN_NAV_ITEMS.push({ name: t`Tools`, path: "/admin/tools" }); + PLUGIN_ADMIN_ROUTES.push(getToolsRoutes); +} diff --git a/enterprise/frontend/src/metabase-enterprise/tools/mode.js b/enterprise/frontend/src/metabase-enterprise/tools/mode.js new file mode 100644 index 000000000000..f59162335c32 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/mode.js @@ -0,0 +1,28 @@ +import { t } from "ttag"; +import { push } from "react-router-redux"; + +const CARD_ID_ROW_IDX = 0; + +const ErrorDrill = ({ clicked }) => { + if (!clicked) { + return []; + } + + const cardId = clicked.origin.row[CARD_ID_ROW_IDX]; + + return [ + { + name: "detail", + title: t`View this`, + default: true, + action() { + return push(`/admin/tools/errors/${cardId}`); + }, + }, + ]; +}; + +export const ErrorMode = { + name: "error", + drills: () => [ErrorDrill], +}; diff --git a/enterprise/frontend/src/metabase-enterprise/tools/routes.jsx b/enterprise/frontend/src/metabase-enterprise/tools/routes.jsx new file mode 100644 index 000000000000..18533bb63bd6 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/routes.jsx @@ -0,0 +1,22 @@ +import React from "react"; + +import { Route } from "metabase/hoc/Title"; +import { IndexRedirect } from "react-router"; +import { t } from "ttag"; +import ToolsApp from "./containers/ToolsApp"; +import ErrorOverview from "./containers/ErrorOverview"; +import ErrorDetail from "./containers/ErrorDetail"; + +const getRoutes = (store: any) => ( + + + + + +); + +export default getRoutes; diff --git a/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx b/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx index 91134f9acb05..fcbaec193674 100644 --- a/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx +++ b/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx @@ -1,6 +1,8 @@ /* eslint-disable react/prop-types */ import React from "react"; +import { t } from "ttag"; + import ColorPicker from "metabase/components/ColorPicker"; import Icon from "metabase/components/Icon"; import { humanize } from "metabase/lib/formatting"; @@ -15,39 +17,35 @@ const THEMEABLE_COLORS = [ const COLOR_DISPLAY_PROPERTIES = { brand: { - name: "Primary color", - description: - "The main color used throughout the app for buttons, links, and the default chart color.", + name: t`Primary color`, + description: t`The main color used throughout the app for buttons, links, and the default chart color.`, }, nav: { - name: "Navigation bar color", - description: - "The top nav bar of Metabase. Defaults to the Primary Color if not set.", + name: t`Navigation bar color`, + description: t`The top nav bar of Metabase. Defaults to the Primary Color if not set.`, }, accent1: { - name: "Accent 1", - description: - "The color of aggregations and breakouts in the graphical query builder.", + name: t`Accent 1`, + description: t`The color of aggregations and breakouts in the graphical query builder.`, }, accent2: { - name: "Accent 2", - description: - "The color of filters in the query builder and buttons and links in filter widgets.", + name: t`Accent 2`, + description: t`The color of filters in the query builder and buttons and links in filter widgets.`, }, accent3: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent4: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent5: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent6: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent7: { - name: "Additional chart color", + name: t`Additional chart color`, }, }; diff --git a/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js b/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js index 66eff9931ef9..30b5c45b2acb 100644 --- a/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js +++ b/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js @@ -49,7 +49,9 @@ function walkStyleSheets(sheets, fn) { } if (rule.style) { for (const prop of rule.style) { - fn(rule.style, prop, rule.style[prop]); + const cssValue = rule.style.getPropertyValue(prop); + const cssPriority = rule.style.getPropertyPriority(prop); + fn(rule.style, prop, cssValue, cssPriority); } } } @@ -74,12 +76,15 @@ const replaceColors = (cssValue, matchColor, replacementColor) => { const getColorStyleProperties = memoize(function() { const properties = []; - walkStyleSheets(document.styleSheets, (style, cssProperty, cssValue) => { - // don't bother with checking if there are no colors - if (COLOR_REGEX.test(cssValue)) { - properties.push({ style, cssProperty, cssValue }); - } - }); + walkStyleSheets( + document.styleSheets, + (style, cssProperty, cssValue, cssPriority) => { + // don't bother with checking if there are no colors + if (COLOR_REGEX.test(cssValue)) { + properties.push({ style, cssProperty, cssValue, cssPriority }); + } + }, + ); return properties; }); @@ -96,11 +101,17 @@ function initColorCSS(colorName) { const originalColor = Color(originalColors[colorName]); // look for CSS rules which have colors matching the brand colors or very light or desaturated - for (const { style, cssProperty, cssValue } of getColorStyleProperties()) { + for (const { + style, + cssProperty, + cssValue, + cssPriority, + } of getColorStyleProperties()) { // try replacing with a random color to see if we actually need to if (cssValue !== replaceColors(cssValue, originalColor, RANDOM_COLOR)) { CSS_COLOR_UPDATORS_BY_COLOR_NAME[colorName].push(themeColor => { - style[cssProperty] = replaceColors(cssValue, originalColor, themeColor); + const newCssValue = replaceColors(cssValue, originalColor, themeColor); + style.setProperty(cssProperty, newCssValue, cssPriority); }); } } diff --git a/frontend/interfaces/underscore.js b/frontend/interfaces/underscore.js index ca19feec88e9..27e7fe0ed5df 100755 --- a/frontend/interfaces/underscore.js +++ b/frontend/interfaces/underscore.js @@ -56,6 +56,8 @@ declare module "underscore" { declare function any(a: Array, pred: (val: T) => boolean): boolean; declare function contains(a: Array, val: T): boolean; + declare function head(a: Array, n?: number): Array; + declare function tail(a: Array, n?: number): Array; declare function initial(a: Array, n?: number): Array; declare function rest(a: Array, index?: number): Array; diff --git a/frontend/parse-deps.js b/frontend/parse-deps.js new file mode 100644 index 000000000000..0524c39d9ea3 --- /dev/null +++ b/frontend/parse-deps.js @@ -0,0 +1,258 @@ +#!/usr/bin/env node + +const fs = require("fs"); +const path = require("path"); + +const glob = require("glob"); +const minimatch = require("minimatch"); +const parser = require("@babel/parser"); +const traverse = require("@babel/traverse").default; +const readline = require("readline"); + +const PATTERN = "{enterprise/,}frontend/src/**/*.{js,jsx}"; + +// after webpack.config.js +const ALIAS = { + metabase: "frontend/src/metabase", + "metabase-lib": "frontend/src/metabase-lib", + "metabase-enterprise": "enterprise/frontend/src/metabase-enterprise", + "metabase-types": "frontend/src/metabase-types", +}; + +function files() { + return glob.sync(PATTERN); +} + +function dependencies() { + const deps = files().map(fileName => { + const contents = fs.readFileSync(fileName, "utf-8"); + const options = { + allowImportExportEverywhere: true, + allowReturnOutsideFunction: true, + decoratorsBeforeExport: true, + sourceType: "unambiguous", + plugins: ["jsx", "flow", "decorators-legacy", "exportDefaultFrom"], + }; + const importList = []; + try { + const ast = parser.parse(contents, options); + traverse(ast, { + enter(path) { + if (path.node.type === "ImportDeclaration") { + importList.push(path.node.source.value); + } + if (path.node.type === "CallExpression") { + const callee = path.node.callee; + const args = path.node.arguments; + if (callee.type === "Identifier" && callee.name === "require") { + if (args.length === 1 && args[0].type === "StringLiteral") { + importList.push(args[0].value); + } + } + } + }, + }); + } catch (e) { + console.error(fileName, e.toString()); + process.exit(-1); + n; + } + const base = path.dirname(fileName) + path.sep; + const absoluteImportList = importList + .map(name => { + const absName = name[0] === "." ? path.normalize(base + name) : name; + const parts = absName.split(path.sep); + const realPath = ALIAS[parts[0]]; + parts[0] = realPath ? realPath : parts[0]; + const realName = parts.join(path.sep); + return realName; + }) + .map(name => { + if (fs.existsSync(name)) { + if ( + fs.lstatSync(name).isDirectory() && + fs.existsSync(name + "/index.js") + ) { + return name + "/index.js"; + } + return name; + } else if (fs.existsSync(name + ".js")) { + return name + ".js"; + } else if (fs.existsSync(name + ".jsx")) { + return name + ".jsx"; + } + return name; + }) + .filter(name => minimatch(name, PATTERN)); + + return { source: fileName, dependencies: absoluteImportList.sort() }; + }); + return deps; +} + +function dependents() { + let dependents = {}; + dependencies().forEach(dep => { + const { source, dependencies } = dep; + dependencies.forEach(d => { + if (!dependents[d]) { + dependents[d] = []; + } + dependents[d].push(source); + }); + }); + return dependents; +} + +function getDependents(sources) { + const allDependents = dependents(); + let filteredDependents = []; + + sources.forEach(name => { + const list = allDependents[name]; + if (list && Array.isArray(list) && list.length > 0) { + filteredDependents.push(...list); + } + }); + + return Array.from(new Set(filteredDependents)).sort(); // unique +} + +function filterDependents() { + const rl = readline.createInterface({ input: process.stdin }); + + const start = async () => { + let sources = []; + for await (const line of rl) { + const name = line.trim(); + if (name.length > 0) { + sources.push(name); + } + } + const filteredDependents = getDependents(sources); + console.log(filteredDependents.join("\n")); + }; + start(); +} + +function filterAllDependents() { + const rl = readline.createInterface({ input: process.stdin }); + + const start = async () => { + let sources = []; + for await (const line of rl) { + const name = line.trim(); + if (name.length > 0) { + sources.push(name); + } + } + let filteredDependents = getDependents(sources); + + const allDependents = dependents(); + for (let i = 0; i < filteredDependents.length; ++i) { + const name = filteredDependents[i]; + const list = allDependents[name]; + if (list && Array.isArray(list) && list.length > 0) { + const newAddition = list.filter(e => filteredDependents.indexOf(e) < 0); + filteredDependents.push(...newAddition); + } + } + console.log(filteredDependents.sort().join("\n")); + }; + start(); +} + +function countDependents() { + const allDependents = dependents(); + const sources = Object.keys(allDependents).sort(); + const tally = sources.map(name => { + return { name, count: allDependents[name].length }; + }); + console.log(tally.map(({ name, count }) => `${count} ${name}`).join("\n")); +} + +function countAllDependents() { + const allDependents = dependents(); + const sources = Object.keys(allDependents).sort(); + const tally = sources.map(name => { + const list = allDependents[name]; + for (let i = 0; i < list.length; ++i) { + const deps = allDependents[list[i]]; + if (deps && Array.isArray(deps) && deps.length > 1) { + const newAddition = deps.filter(e => list.indexOf(e) < 0); + list.push(...newAddition); + } + } + return { name, count: list.length }; + }); + console.log(tally.map(({ name, count }) => `${count} ${name}`).join("\n")); +} + +function matrix() { + const allDependents = dependents(); + const sources = Object.keys(allDependents).sort(); + const width = Math.max(...sources.map(s => s.length)); + const rows = sources.map(name => { + const list = allDependents[name]; + const checks = sources.map(dep => (list.indexOf(dep) < 0 ? " " : "x")); + return name.padEnd(width) + " | " + checks.join(""); + }); + console.log(rows.join("\n")); +} + +const USAGE = ` +parse-deps cmd + +cmd must be one of: + + files Display list of source files + dependencies Show the dependencies of each source file + dependents Show the dependents of each source file + filter-dependents Filter direct dependents based on stdin +filter-all-dependents Filter all indirect and direct dependents based on stdin + count-dependents List the total count of direct dependents + count-all-dependents List the total count of its direct and indirect dependents + matrix Display 2-D matrix of dependent relationship +`; + +function main(args) { + const cmd = args[0]; + if (cmd) { + switch (cmd.toLowerCase()) { + case "files": + console.log(files().join("\n")); + break; + case "dependencies": + console.log(JSON.stringify(dependencies(), null, 2)); + break; + case "dependents": + console.log(JSON.stringify(dependents(), null, 2)); + break; + case "filter-dependents": + filterDependents(); + break; + case "filter-all-dependents": + filterAllDependents(); + break; + case "count-dependents": + countDependents(); + break; + case "count-all-dependents": + countAllDependents(); + break; + case "matrix": + matrix(); + break; + default: + console.log(USAGE); + break; + } + } else { + console.log(USAGE); + } +} + +let args = process.argv; +args.shift(); +args.shift(); +main(args); diff --git a/frontend/src/metabase-lib/lib/Question.js b/frontend/src/metabase-lib/lib/Question.js index f90c67333b25..6927ab47c5d9 100644 --- a/frontend/src/metabase-lib/lib/Question.js +++ b/frontend/src/metabase-lib/lib/Question.js @@ -1,5 +1,5 @@ import _ from "underscore"; -import { chain, assoc, dissoc, assocIn } from "icepick"; +import { chain, assoc, dissoc, assocIn, getIn } from "icepick"; // NOTE: the order of these matters due to circular dependency issues import StructuredQuery, { @@ -35,7 +35,10 @@ import { findColumnIndexForColumnSetting, syncTableColumnsToQuery, } from "metabase/lib/dataset"; -import { getParametersWithExtras, isTransientId } from "metabase/meta/Card"; +import { + getValueAndFieldIdPopulatedParametersFromCard, + isTransientId, +} from "metabase/meta/Card"; import { parameterToMBQLFilter, normalizeParameterValue, @@ -301,10 +304,16 @@ export default class Question { return this._card && this._card.displayIsLocked; } - // If we're locked to a display that is no longer "sensible", unlock it. - maybeUnlockDisplay(sensibleDisplays): Question { - const locked = - this.displayIsLocked() && sensibleDisplays.includes(this.display()); + // If we're locked to a display that is no longer "sensible", unlock it + // unless it was locked in unsensible + maybeUnlockDisplay(sensibleDisplays, previousSensibleDisplays): Question { + const wasSensible = + previousSensibleDisplays == null || + previousSensibleDisplays.includes(this.display()); + const isSensible = sensibleDisplays.includes(this.display()); + + const shouldUnlock = wasSensible && !isSensible; + const locked = this.displayIsLocked() && !shouldUnlock; return this.setDisplayIsLocked(locked); } @@ -535,13 +544,38 @@ export default class Question { drillPK(field: Field, value: Value): ?Question { const query = this.query(); - if (query instanceof StructuredQuery) { - return query - .reset() - .setTable(field.table) - .filter(["=", ["field", field.id, null], value]) - .question(); + + if (!(query instanceof StructuredQuery)) { + return; } + + const otherPKFilters = query + .filters() + ?.filter(filter => { + const filterField = filter?.field(); + if (!filterField) { + return false; + } + + const isNotSameField = filterField.id !== field.id; + const isPKEqualsFilter = + filterField.isPK() && filter.operatorName() === "="; + const isFromSameTable = filterField.table.id === field.table.id; + + return isPKEqualsFilter && isNotSameField && isFromSameTable; + }) + .map(filter => filter.raw()); + + const filtersToApply = [ + ["=", ["field", field.id, null], value], + ...otherPKFilters, + ]; + + const resultedQuery = filtersToApply.reduce((query, filter) => { + return query.addFilter(filter); + }, query.reset().setTable(field.table)); + + return resultedQuery.question(); } _syncStructuredQueryColumnsAndSettings(previousQuestion, previousQuery) { @@ -703,6 +737,11 @@ export default class Question { return Mode.forQuestion(this); } + /** + * Returns true if, based on filters and table columns, the expected result is a single row. + * However, it might not be true when a PK column is not unique, leading to multiple rows. + * Because of that, always check query results in addition to this property. + */ isObjectDetail(): boolean { const mode = this.mode(); return mode ? mode.name() === "object" : false; @@ -785,16 +824,22 @@ export default class Question { originalQuestion, clean = true, query, + includeDisplayIsLocked, }: { originalQuestion?: Question, clean?: boolean, query?: { [string]: any }, + includeDisplayIsLocked?: boolean, } = {}): string { if ( !this.id() || (originalQuestion && this.isDirtyComparedTo(originalQuestion)) ) { - return Urls.question(null, this._serializeForUrl({ clean }), query); + return Urls.question( + null, + this._serializeForUrl({ clean, includeDisplayIsLocked }), + query, + ); } else { return Urls.question(this.card(), "", query); } @@ -969,7 +1014,10 @@ export default class Question { // TODO: Fix incorrect Flow signature parameters(): ParameterObject[] { - return getParametersWithExtras(this.card(), this._parameterValues); + return getValueAndFieldIdPopulatedParametersFromCard( + this.card(), + this._parameterValues, + ); } parametersList(): ParameterObject[] { @@ -1003,7 +1051,11 @@ export default class Question { } // Internal methods - _serializeForUrl({ includeOriginalCardId = true, clean = true } = {}) { + _serializeForUrl({ + includeOriginalCardId = true, + clean = true, + includeDisplayIsLocked = false, + } = {}) { const query = clean ? this.query().clean() : this.query(); const cardCopy = { @@ -1019,6 +1071,11 @@ export default class Question { ...(includeOriginalCardId ? { original_card_id: this._card.original_card_id } : {}), + ...(includeDisplayIsLocked + ? { + displayIsLocked: this._card.displayIsLocked, + } + : {}), }; return Card_DEPRECATED.utf8_to_b64url(JSON.stringify(sortObject(cardCopy))); @@ -1039,11 +1096,17 @@ export default class Question { } getUrlWithParameters() { - const question = this.query().isEditable() - ? this.convertParametersToFilters() - : this.markDirty(); // forces use of serialized question url + const question = this.convertParametersToFilters().markDirty(); const query = this.isNative() ? this._parameterValues : undefined; - return question.getUrl({ originalQuestion: this, query }); + return question.getUrl({ + originalQuestion: this, + query, + includeDisplayIsLocked: true, + }); + } + + getModerationReviews() { + return getIn(this, ["_card", "moderation_reviews"]) || []; } } diff --git a/frontend/src/metabase-lib/lib/metadata/Base.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Base.unit.spec.js new file mode 100644 index 000000000000..b7d32dd87636 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Base.unit.spec.js @@ -0,0 +1,53 @@ +import Base from "./Base"; + +describe("Base", () => { + describe("instantiation", () => { + it("should set properties from `object` on the Base instance", () => { + const instance = new Base({ abc: 123 }); + expect(instance.abc).toEqual(123); + }); + + it("should set ALL enumerable properties of `object`, including properties down the prototype chain", () => { + const object = { + abc: 123, + }; + + Object.defineProperty(object, "anEnumerableProperty", { + enumerable: false, + value: false, + }); + + object.__proto__ = { + secretPrototypeValue: true, + }; + + const instance = new Base(object); + + expect(instance.abc).toEqual(123); + expect(instance.secretPrototypeValue).toBe(true); + expect(instance.anEnumerableProperty).toBeUndefined(); + }); + }); + + describe("getPlainObject", () => { + it("returns whatever `object` was provided during instantiation", () => { + const obj = { + abc: 123, + }; + + const instance = new Base(obj); + + expect(instance.getPlainObject()).toBe(obj); + }); + + it("returns whatever `_plainObject` is set to", () => { + const obj1 = {}; + const obj2 = {}; + + const instance = new Base(obj1); + instance._plainObject = obj2; + + expect(instance.getPlainObject()).toBe(obj2); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Database.js b/frontend/src/metabase-lib/lib/metadata/Database.js index ef5d61ec437c..697d2d4475e0 100644 --- a/frontend/src/metabase-lib/lib/metadata/Database.js +++ b/frontend/src/metabase-lib/lib/metadata/Database.js @@ -1,17 +1,14 @@ import Question from "../Question"; import Base from "./Base"; -import Table from "./Table"; -import Schema from "./Schema"; import { memoize, createLookupByProperty } from "metabase-lib/lib/utils"; import { generateSchemaId } from "metabase/schema"; -import type { SchemaName } from "metabase-types/types/Table"; -import type { DatabaseFeature } from "metabase-types/types/Database"; - -type VirtualDatabaseFeature = "join"; +/** + * @typedef { import("./metadata").SchemaName } SchemaName + */ /** * Wrapper class for database metadata objects. Contains {@link Schema}s, {@link Table}s, {@link Metric}s, {@link Segment}s. @@ -21,28 +18,35 @@ type VirtualDatabaseFeature = "join"; export default class Database extends Base { // TODO Atte Keinänen 6/11/17: List all fields here (currently only in types/Database) - name: string; - description: ?string; - - tables: Table[]; - schemas: Schema[]; - - auto_run_queries: boolean; - - displayName(): string { + displayName() { return this.name; } // SCHEMAS - schema(schemaName: ?SchemaName) { + /** + * @param {SchemaName} [schemaName] + */ + schema(schemaName) { return this.metadata.schema(generateSchemaId(this.id, schemaName)); } - schemaNames(): SchemaName[] { + schemaNames() { return this.schemas.map(s => s.name).sort((a, b) => a.localeCompare(b)); } + getSchemas() { + return this.schemas; + } + + schemasCount() { + return this.schemas.length; + } + + getTables() { + return this.tables; + } + // TABLES @memoize @@ -57,9 +61,12 @@ export default class Database extends Base { // FEATURES - hasFeature( - feature: null | DatabaseFeature | VirtualDatabaseFeature, - ): boolean { + /** + * @typedef {import("./metadata").DatabaseFeature} DatabaseFeature + * @typedef {"join"} VirtualDatabaseFeature + * @param {DatabaseFeature | VirtualDatabaseFeature} [feature] + */ + hasFeature(feature) { if (!feature) { return true; } @@ -82,13 +89,13 @@ export default class Database extends Base { // QUESTIONS - newQuestion(): Question { + newQuestion() { return this.question() .setDefaultQuery() .setDefaultDisplay(); } - question(query = { "source-table": null }): Question { + question(query = { "source-table": null }) { return Question.create({ metadata: this.metadata, dataset_query: { @@ -99,7 +106,7 @@ export default class Database extends Base { }); } - nativeQuestion(native = {}): Question { + nativeQuestion(native = {}) { return Question.create({ metadata: this.metadata, dataset_query: { @@ -119,7 +126,36 @@ export default class Database extends Base { } /** Returns a database containing only the saved questions from the same database, if any */ - savedQuestionsDatabase(): ?Database { + savedQuestionsDatabase() { return this.metadata.databasesList().find(db => db.is_saved_questions); } + + /** + * @private + * @param {number} id + * @param {string} name + * @param {?string} description + * @param {Table[]} tables + * @param {Schema[]} schemas + * @param {Metadata} metadata + * @param {boolean} auto_run_queries + */ + /* istanbul ignore next */ + _constructor( + id, + name, + description, + tables, + schemas, + metadata, + auto_run_queries, + ) { + this.id = id; + this.name = name; + this.description = description; + this.tables = tables; + this.schemas = schemas; + this.metadata = metadata; + this.auto_run_queries = auto_run_queries; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Database.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Database.unit.spec.js new file mode 100644 index 000000000000..599eb61da842 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Database.unit.spec.js @@ -0,0 +1,247 @@ +import Database from "./Database"; +import Schema from "./Schema"; +import Metadata from "./Metadata"; +import Table from "./Table"; +import Base from "./Base"; +import Question from "../Question"; + +describe("Database", () => { + describe("instantiation", () => { + it("should create an instance of Schema", () => { + expect(new Database()).toBeInstanceOf(Database); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Database()).toBeInstanceOf(Base); + expect(new Database({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("displayName", () => { + it("should return the name prop", () => { + expect(new Database({ name: "foo" }).displayName()).toBe("foo"); + }); + }); + + describe("schema", () => { + let schema; + let database; + beforeEach(() => { + schema = new Schema({ id: "123:foo" }); + const metadata = new Metadata({ + schemas: { + "123:foo": schema, + }, + }); + database = new Database({ + id: 123, + metadata, + }); + }); + + it("should return the schema with the given name", () => { + expect(database.schema("foo")).toBe(schema); + }); + + it("should return null when the given schema name doesn not match a schema", () => { + expect(database.schema("bar")).toBe(null); + }); + }); + + describe("schemaNames", () => { + it("should return a list of schemaNames", () => { + const database = new Database({ + id: 123, + schemas: [ + new Schema({ id: "123:foo", name: "foo" }), + new Schema({ id: "123:bar", name: "bar" }), + ], + }); + expect(database.schemaNames()).toEqual(["bar", "foo"]); + }); + }); + + describe("tablesLookup", () => { + it("should return a map of tables keyed by id", () => { + const table1 = new Table({ id: 1 }); + const table2 = new Table({ id: 2 }); + + expect( + new Database({ + tables: [], + }).tablesLookup(), + ).toEqual({}); + + expect( + new Database({ + tables: [table1, table2], + }).tablesLookup(), + ).toEqual({ + 1: table1, + 2: table2, + }); + }); + }); + + describe("hasFeature", () => { + beforeEach(() => {}); + + it("returns true when given a falsy `feature`", () => { + expect(new Database({}).hasFeature(null)).toBe(true); + expect(new Database({}).hasFeature("")).toBe(true); + }); + + it("should return true when given `feature` is found within the `features` on the instance", () => { + expect(new Database({ features: ["foo"] }).hasFeature("foo")).toBe(true); + }); + + it("should return false when given `feature` is not found within the `features` on the instance", () => { + expect(new Database({ features: ["foo"] }).hasFeature("bar")).toBe(false); + }); + + it("should return false for 'join' even when it exists in `features`", () => { + expect(new Database({ features: ["join"] }).hasFeature("join")).toBe( + false, + ); + }); + + it("should return true for 'join' for a set of other values", () => { + ["left-join", "right-join", "inner-join", "full-join"].forEach( + feature => { + expect(new Database({ features: [feature] }).hasFeature("join")).toBe( + true, + ); + }, + ); + }); + }); + + describe("supportsPivots", () => { + it("returns true when `expressions` and `left-join` exist in `features`", () => { + expect( + new Database({ + features: ["foo", "left-join"], + }).supportsPivots(), + ).toBe(false); + + expect( + new Database({ + features: ["expressions", "right-join"], + }).supportsPivots(), + ).toBe(false); + + expect( + new Database({ + features: ["expressions", "left-join"], + }).supportsPivots(), + ).toBe(true); + }); + }); + + describe("question", () => { + it("should create a question using the `metadata` found on the Database instance", () => { + const metadata = new Metadata(); + const database = new Database({ + metadata, + }); + const question = database.question(); + expect(question.metadata()).toBe(metadata); + }); + + it("should create a question using the given Database instance's id in the question's query", () => { + const database = new Database({ + id: 123, + }); + + expect(database.question().datasetQuery()).toEqual({ + database: 123, + query: { + "source-table": null, + }, + type: "query", + }); + + expect(database.question({ foo: "bar" }).datasetQuery()).toEqual({ + database: 123, + query: { + foo: "bar", + }, + type: "query", + }); + }); + }); + + describe("nativeQuestion", () => { + it("should create a native question using the `metadata` found on the Database instance", () => { + const metadata = new Metadata(); + const database = new Database({ + metadata, + }); + const question = database.nativeQuestion(); + expect(question.metadata()).toBe(metadata); + }); + + it("should create a native question using the given Database instance's id in the question's query", () => { + const database = new Database({ + id: 123, + }); + + expect(database.nativeQuestion().datasetQuery()).toEqual({ + database: 123, + native: { + query: "", + "template-tags": {}, + }, + type: "native", + }); + + expect(database.nativeQuestion({ foo: "bar" }).datasetQuery()).toEqual({ + database: 123, + native: { + query: "", + "template-tags": {}, + foo: "bar", + }, + type: "native", + }); + }); + }); + + describe("newQuestion", () => { + it("should return new question with defaulted query and display", () => { + const database = new Database({ + id: 123, + }); + + Question.prototype.setDefaultQuery = jest.fn(function() { + return this; + }); + Question.prototype.setDefaultDisplay = jest.fn(function() { + return this; + }); + + const question = database.newQuestion(); + + expect(question).toBeInstanceOf(Question); + expect(Question.prototype.setDefaultDisplay).toHaveBeenCalled(); + expect(Question.prototype.setDefaultQuery).toHaveBeenCalled(); + }); + }); + + describe("savedQuestionsDatabase", () => { + it("should return the 'fake' saved questions database", () => { + const database1 = new Database({ id: 1 }); + const database2 = new Database({ id: 2, is_saved_questions: true }); + const metadata = new Metadata({ + databases: { + 1: database1, + 2: database2, + }, + }); + + database1.metadata = metadata; + + expect(database1.savedQuestionsDatabase()).toBe(database2); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Field.js b/frontend/src/metabase-lib/lib/metadata/Field.js index b8dd19b6336e..2c3cd484b44a 100644 --- a/frontend/src/metabase-lib/lib/metadata/Field.js +++ b/frontend/src/metabase-lib/lib/metadata/Field.js @@ -1,5 +1,4 @@ import Base from "./Base"; -import Table from "./Table"; import moment from "moment"; @@ -34,19 +33,14 @@ import { getFilterOperators, } from "metabase/lib/schema_metadata"; -import type { FieldValues } from "metabase-types/types/Field"; +/** + * @typedef { import("./metadata").FieldValues } FieldValues + */ /** * Wrapper class for field metadata objects. Belongs to a Table. */ export default class Field extends Base { - name: string; - display_name: string; - description: string; - - table: Table; - name_field: ?Field; - parent() { return this.metadata ? this.metadata.field(this.parent_id) : null; } @@ -153,7 +147,10 @@ export default class Field extends Base { return isEntityName(this); } - isCompatibleWith(field: Field) { + /** + * @param {Field} field + */ + isCompatibleWith(field) { return ( this.isDate() === field.isDate() || this.isNumeric() === field.isNumeric() || @@ -161,7 +158,10 @@ export default class Field extends Base { ); } - fieldValues(): FieldValues { + /** + * @returns {FieldValues} + */ + fieldValues() { return getFieldValues(this._object); } @@ -282,8 +282,9 @@ export default class Field extends Base { /** * Returns the remapped field, if any + * @return {?Field} */ - remappedField(): ?Field { + remappedField() { const displayFieldId = this.dimensions && this.dimensions.human_readable_field_id; if (displayFieldId != null) { @@ -299,8 +300,9 @@ export default class Field extends Base { /** * Returns the human readable remapped value, if any + * @returns {?string} */ - remappedValue(value): ?string { + remappedValue(value) { // TODO: Ugh. Should this be handled further up by the parameter widget? if (this.isNumeric() && typeof value !== "number") { value = parseFloat(value); @@ -310,8 +312,9 @@ export default class Field extends Base { /** * Returns whether the field has a human readable remapped value for this value + * @returns {?string} */ - hasRemappedValue(value): ?string { + hasRemappedValue(value) { // TODO: Ugh. Should this be handled further up by the parameter widget? if (this.isNumeric() && typeof value !== "number") { value = parseFloat(value); @@ -321,8 +324,9 @@ export default class Field extends Base { /** * Returns true if this field can be searched, e.x. in filter or parameter widgets + * @returns {boolean} */ - isSearchable(): boolean { + isSearchable() { // TODO: ...? return this.isString(); } @@ -333,8 +337,39 @@ export default class Field extends Base { /** * Returns a FKDimension for this field and the provided field + * @param {Field} foreignField + * @return {Dimension} */ - foreign(foreignField: Field): Dimension { + foreign(foreignField) { return this.dimension().foreign(foreignField.dimension()); } + + /** + * @private + * @param {number} id + * @param {string} name + * @param {string} display_name + * @param {string} description + * @param {Table} table + * @param {?Field} name_field + * @param {Metadata} metadata + */ + /* istanbul ignore next */ + _constructor( + id, + name, + display_name, + description, + table, + name_field, + metadata, + ) { + this.id = id; + this.name = name; + this.display_name = display_name; + this.description = description; + this.table = table; + this.name_field = name_field; + this.metadata = metadata; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Field.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Field.unit.spec.js new file mode 100644 index 000000000000..6eaddf76dc27 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Field.unit.spec.js @@ -0,0 +1,363 @@ +import Field from "./Field"; +import Table from "./Table"; +import Schema from "./Schema"; +import Metadata from "./Metadata"; +import Base from "./Base"; +import Dimension from "../Dimension"; + +describe("Field", () => { + describe("instantiation", () => { + it("should create an instance of Schema", () => { + expect(new Field()).toBeInstanceOf(Field); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Field()).toBeInstanceOf(Base); + expect(new Field({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("parent", () => { + it("should return null when `metadata` does not exist on instance", () => { + expect(new Field().parent()).toBeNull(); + }); + + it("should return the field that matches the instance's `parent_id` when `metadata` exists on the instance", () => { + const parentField = new Field({ + id: 1, + }); + + const metadata = new Metadata({ + fields: { + 1: parentField, + }, + }); + + const field = new Field({ + parent_id: 1, + id: 2, + metadata, + }); + + expect(field.parent()).toBe(parentField); + }); + }); + + describe("path", () => { + it("should return list of fields starting with instance, ending with root parent", () => { + const rootField = new Field({ + id: 1, + }); + + const parentField = new Field({ + id: 2, + parent_id: 1, + }); + + const metadata = new Metadata({ + fields: { + 1: rootField, + 2: parentField, + }, + }); + + parentField.metadata = metadata; + rootField.metadata = metadata; + + const field = new Field({ + parent_id: 2, + id: 3, + metadata, + }); + + expect(field.path()).toEqual([rootField, parentField, field]); + }); + }); + + describe("displayName", () => { + it("should return a field's display name", () => { + expect(new Field({ name: "foo" }).displayName()).toBe("foo"); + }); + + it("should prioritize the `display_name` field over `name`", () => { + expect( + new Field({ display_name: "bar", name: "foo" }).displayName(), + ).toBe("bar"); + }); + + it("should prioritize the name in the field's `dimensions` property if it has one", () => { + const field = new Field({ + dimensions: { name: "dimensions" }, + display_name: "display", + }); + + expect(field.displayName()).toBe("dimensions"); + }); + + describe("includePath flag", () => { + let field; + beforeEach(() => { + const rootField = new Field({ + id: 1, + name: "rootField", + }); + + const parentField = new Field({ + id: 2, + parent_id: 1, + name: "parentField", + }); + + const metadata = new Metadata({ + fields: { + 1: rootField, + 2: parentField, + }, + }); + + parentField.metadata = metadata; + rootField.metadata = metadata; + + field = new Field({ + parent_id: 2, + id: 3, + metadata, + name: "field", + }); + }); + + it("should add parent field display names to the field's display name when enabled", () => { + expect(field.displayName({ includePath: true })).toBe( + "rootField: parentField: field", + ); + }); + + it("should be enabled by default", () => { + expect(field.displayName({ includePath: true })).toBe( + field.displayName(), + ); + }); + + it("should exclude parent field display names when disabled", () => { + expect(field.displayName({ includePath: false })).toBe("field"); + }); + }); + + describe("includeTable flag", () => { + let field; + beforeEach(() => { + field = new Field({ + id: 1, + name: "field", + }); + }); + + it("should do nothing when there is no table on the field instance", () => { + expect(field.displayName({ includeTable: true })).toBe("field"); + }); + + it("should add the table name to the start of the field name", () => { + field.table = new Table({ + display_name: "table", + }); + + expect(field.displayName({ includeTable: true })).toBe("table → field"); + }); + }); + + describe("includeSchema flag", () => { + let field; + beforeEach(() => { + field = new Field({ + id: 1, + name: "field", + }); + }); + + it("won't do anything if enabled and includeTable is not enabled", () => { + expect(field.displayName({ includeSchema: true })).toBe("field"); + }); + + it("should add a combined schema + table display name to the start of the field name", () => { + field.table = new Table({ + display_name: "table", + schema: new Schema({ + name: "schema", + }), + }); + + expect( + field.displayName({ includeTable: true, includeSchema: true }), + ).toBe("Schema.table → field"); + }); + }); + }); + + describe("targetObjectName", () => { + it("should return the display name of the field stripped of an appended id", () => { + const field = new Field({ + name: "field id", + }); + + expect(field.targetObjectName()).toBe("field"); + }); + }); + + describe("dimension", () => { + it("should return the field's dimension when the id is an mbql field", () => { + const field = new Field({ + id: ["field", 123, null], + }); + + const dimension = field.dimension(); + + expect(dimension).toBeInstanceOf(Dimension); + expect(dimension.fieldIdOrName()).toBe(123); + }); + + it("should return the field's dimension when the id is not an mbql field", () => { + const field = new Field({ + id: 123, + }); + + const dimension = field.dimension(); + + expect(dimension).toBeInstanceOf(Dimension); + expect(dimension.fieldIdOrName()).toBe(123); + }); + }); + + describe("getDefaultDateTimeUnit", () => { + describe("when the field is of type `type/DateTime`", () => { + it("should return 'day'", () => { + const field = new Field({ + fingerprint: { + type: { + "type/Number": {}, + }, + }, + }); + + expect(field.getDefaultDateTimeUnit()).toBe("day"); + }); + }); + }); + + describe("when field is of type `type/DateTime`", () => { + it("should return a time unit depending on the number of days in the 'fingerprint'", () => { + const field = new Field({ + fingerprint: { + type: { + "type/DateTime": { + earliest: "2019-03-01T00:00:00Z", + latest: "2021-01-01T00:00:00Z", + }, + }, + }, + }); + + expect(field.getDefaultDateTimeUnit()).toBe("month"); + }); + }); + + describe("remappedField", () => { + it("should return the 'human readable' field tied to the field's dimension", () => { + const field1 = new Field({ id: 1 }); + const field2 = new Field({ id: 2 }); + const metadata = new Metadata({ + fields: { + 1: field1, + 2: field2, + }, + }); + + const field = new Field({ + id: 3, + dimensions: { + human_readable_field_id: 1, + }, + }); + field.metadata = metadata; + + expect(field.remappedField()).toBe(field1); + }); + + it("should return the field's name_field", () => { + const nameField = new Field(); + const field = new Field({ + id: 3, + name_field: nameField, + }); + + expect(field.remappedField()).toBe(nameField); + }); + + it("should return null when the field has no name_field or no dimension with a 'human readable' field", () => { + expect(new Field().remappedField()).toBe(null); + }); + }); + + describe("remappedValue", () => { + it("should call a given value using the instance's remapping property", () => { + const field = new Field({ + remapping: { + get: () => 1, + }, + }); + + expect(field.remappedValue(2)).toBe(1); + }); + + it("should convert a numeric field into a number if it is not a number", () => { + const field = new Field({ + isNumeric: () => true, + remapping: { + get: num => num, + }, + }); + + expect(field.remappedValue("2.5rem")).toBe(2.5); + }); + }); + + describe("hasRemappedValue", () => { + it("should call a given value using the instance's remapping property", () => { + const field = new Field({ + remapping: { + has: () => true, + }, + }); + + expect(field.hasRemappedValue(2)).toBe(true); + }); + + it("should convert a numeric field into a number if it is not a number", () => { + const field = new Field({ + isNumeric: () => true, + remapping: { + has: num => typeof num === "number", + }, + }); + + expect(field.hasRemappedValue("2.5rem")).toBe(true); + }); + }); + + describe("isSearchable", () => { + it("should be true when the field is a string", () => { + const field = new Field({ + isString: () => true, + }); + + expect(field.isSearchable()).toBe(true); + }); + + it("should be false when the field is not a string", () => { + const field = new Field({ + isString: () => false, + }); + + expect(field.isSearchable()).toBe(false); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Metadata.js b/frontend/src/metabase-lib/lib/metadata/Metadata.js index 6551d86603e1..03d16d05c7f7 100644 --- a/frontend/src/metabase-lib/lib/metadata/Metadata.js +++ b/frontend/src/metabase-lib/lib/metadata/Metadata.js @@ -2,33 +2,26 @@ import _ from "underscore"; import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; -import Schema from "./Schema"; -import Field from "./Field"; -import Segment from "./Segment"; -import Metric from "./Metric"; - import Question from "../Question"; -import type { DatabaseId } from "metabase-types/types/Database"; -import type { TableId } from "metabase-types/types/Table"; -import type { FieldId } from "metabase-types/types/Field"; -import type { MetricId } from "metabase-types/types/Metric"; -import type { SegmentId } from "metabase-types/types/Segment"; +/** + * @typedef { import("./metadata").DatabaseId } DatabaseId + * @typedef { import("./metadata").SchemaId } SchemaId + * @typedef { import("./metadata").TableId } TableId + * @typedef { import("./metadata").FieldId } FieldId + * @typedef { import("./metadata").MetricId } MetricId + * @typedef { import("./metadata").SegmentId } SegmentId + */ /** * Wrapper class for the entire metadata store */ export default class Metadata extends Base { - databases: { [id: DatabaseId]: Database }; - tables: { [id: TableId]: Table }; - fields: { [id: FieldId]: Field }; - metrics: { [id: MetricId]: Metric }; - segments: { [id: SegmentId]: Segment }; - - // DEPRECATED: this won't be sorted or filtered in a meaningful way - databasesList({ savedQuestions = true } = {}): Database[] { + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Database[]} + */ + databasesList({ savedQuestions = true } = {}) { return _.chain(this.databases) .values() .filter(db => savedQuestions || !db.is_saved_questions) @@ -36,46 +29,98 @@ export default class Metadata extends Base { .value(); } - // DEPRECATED: this won't be sorted or filtered in a meaningful way - tablesList(): Database[] { - return (Object.values(this.tables): Database[]); + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Table[]} + */ + tablesList() { + return Object.values(this.tables); } - // DEPRECATED: this won't be sorted or filtered in a meaningful way - metricsList(): Metric[] { - return (Object.values(this.metrics): Metric[]); + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Metric[]} + */ + metricsList() { + return Object.values(this.metrics); } - // DEPRECATED: this won't be sorted or filtered in a meaningful way - segmentsList(): Metric[] { - return (Object.values(this.segments): Segment[]); + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Segment[]} + */ + segmentsList() { + return Object.values(this.segments); } - segment(segmentId): ?Segment { + /** + * @param {SegmentId} segmentId + * @returns {?Segment} + */ + + segment(segmentId) { return (segmentId != null && this.segments[segmentId]) || null; } - metric(metricId): ?Metric { + /** + * @param {MetricId} metricId + * @returns {?Metric} + */ + metric(metricId) { return (metricId != null && this.metrics[metricId]) || null; } - database(databaseId): ?Database { + /** + * @param {DatabaseId} databaseId + * @returns {?Database} + */ + database(databaseId) { return (databaseId != null && this.databases[databaseId]) || null; } - schema(schemaId): ?Schema { + /** + * @param {SchemaId} schemaId + * @returns {Schema} + */ + schema(schemaId) { return (schemaId != null && this.schemas[schemaId]) || null; } - table(tableId): ?Table { + /** + * + * @param {TableId} tableId + * @returns {?Table} + */ + table(tableId) { return (tableId != null && this.tables[tableId]) || null; } - field(fieldId): ?Field { + /** + * @param {FieldId} fieldId + * @returns {?Field} + */ + field(fieldId) { return (fieldId != null && this.fields[fieldId]) || null; } question(card) { return new Question(card, this); } + + /** + * @private + * @param {Object.} databases + * @param {Object.} tables + * @param {Object.} fields + * @param {Object.} metrics + * @param {Object.} segments + */ + /* istanbul ignore next */ + _constructor(databases, tables, fields, metrics, segments) { + this.databases = databases; + this.tables = tables; + this.fields = fields; + this.metrics = metrics; + this.segments = segments; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Metadata.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Metadata.unit.spec.js new file mode 100644 index 000000000000..36475e355b6c --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Metadata.unit.spec.js @@ -0,0 +1,185 @@ +import Metadata from "./Metadata"; +import Base from "./Base"; + +import Database from "./Database"; +import Table from "./Table"; +import Schema from "./Schema"; +import Field from "./Field"; +import Segment from "./Segment"; +import Metric from "./Metric"; +import Question from "../Question"; + +describe("Metadata", () => { + describe("instantiation", () => { + it("should create an instance of Metadata", () => { + expect(new Metadata()).toBeInstanceOf(Metadata); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Metadata()).toBeInstanceOf(Base); + expect(new Metadata({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("databasesList (deprecated)", () => { + let databases; + let databaseA; + let databaseB; + let databaseC; + + beforeEach(() => { + databaseA = new Database({ id: 2, name: "A", is_saved_questions: true }); + databaseB = new Database({ id: 3, name: "B" }); + databaseC = new Database({ id: 1, name: "C" }); + + databases = { + 1: databaseC, + 2: databaseA, + 3: databaseB, + }; + }); + + it("should return a sorted list of database objects found on the metadata instance", () => { + const metadata = new Metadata({ + databases, + }); + + expect(metadata.databasesList()).toEqual([ + databaseA, + databaseB, + databaseC, + ]); + }); + + it("should return all databases when the `savedQuestions` flag is true", () => { + const metadata = new Metadata({ + databases, + }); + + expect( + metadata.databasesList({ + savedQuestions: true, + }), + ).toEqual(metadata.databasesList()); + }); + + it("should exclude the 'is_saved_questions' db when the `savedQuestions` flag is false", () => { + const metadata = new Metadata({ + databases, + }); + + expect( + metadata.databasesList({ + savedQuestions: false, + }), + ).toEqual([databaseB, databaseC]); + }); + }); + + describe("tablesList (deprecated)", () => { + it("should return a list of table objects found on the instance", () => { + const tableA = new Table({ id: 1, name: "A" }); + const tableB = new Table({ id: 2, name: "B" }); + + const tables = { + 1: tableA, + 2: tableB, + }; + + const metadata = new Metadata({ + tables, + }); + + expect(metadata.tablesList()).toEqual([tableA, tableB]); + }); + }); + + describe("metricsList (deprecated)", () => { + it("should return a list of metric objects found on the instance", () => { + const metricA = new Metric({ id: 1, name: "A" }); + const metricB = new Metric({ id: 2, name: "B" }); + + const metrics = { + 1: metricA, + 2: metricB, + }; + + const metadata = new Metadata({ + metrics, + }); + + expect(metadata.metricsList()).toEqual([metricA, metricB]); + }); + }); + + describe("segmentsList (deprecated)", () => { + it("should return a list of segment objects found on the instance", () => { + const segmentA = new Segment({ id: 1, name: "A" }); + const segmentB = new Segment({ id: 2, name: "B" }); + + const segments = { + 1: segmentA, + 2: segmentB, + }; + + const metadata = new Metadata({ + segments, + }); + + expect(metadata.segmentsList()).toEqual([segmentA, segmentB]); + }); + }); + + describe("question", () => { + it("should return a new question using the metadata instance", () => { + const card = { name: "Question", id: 1 }; + const metadata = new Metadata(); + const question = metadata.question(card); + + expect(question).toBeInstanceOf(Question); + expect(question.card()).toBe(card); + expect(question.metadata()).toBe(metadata); + }); + }); + + [ + ["segment", obj => new Segment(obj)], + ["metric", obj => new Metric(obj)], + ["database", obj => new Database(obj)], + ["schema", obj => new Schema(obj)], + ["table", obj => new Table(obj)], + ["field", obj => new Field(obj)], + ].forEach(([fnName, instantiate]) => { + describe(fnName, () => { + let instanceA; + let instanceB; + let metadata; + beforeEach(() => { + instanceA = instantiate({ id: 1, name: "A" }); + instanceB = instantiate({ id: 2, name: "B" }); + + const instances = { + 1: instanceA, + 2: instanceB, + }; + + metadata = new Metadata({ + [`${fnName}s`]: instances, + }); + }); + + it(`should retun the ${fnName} with the given id`, () => { + expect(metadata[fnName](1)).toBe(instanceA); + expect(metadata[fnName](2)).toBe(instanceB); + }); + + it("should return null when the id matches nothing", () => { + expect(metadata[fnName](3)).toBeNull(); + }); + + it("should return null when the id is nil", () => { + expect(metadata[fnName]()).toBeNull(); + }); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Metric.js b/frontend/src/metabase-lib/lib/metadata/Metric.js index 76a80de261a5..e7e221133755 100644 --- a/frontend/src/metabase-lib/lib/metadata/Metric.js +++ b/frontend/src/metabase-lib/lib/metadata/Metric.js @@ -1,23 +1,21 @@ import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; -import type { Aggregation } from "metabase-types/types/Query"; + +/** + * @typedef { import("./metadata").Aggregation } Aggregation + */ /** * Wrapper class for a metric. Belongs to a {@link Database} and possibly a {@link Table} */ export default class Metric extends Base { - name: string; - description: string; - - database: Database; - table: Table; - - displayName(): string { + displayName() { return this.name; } - aggregationClause(): Aggregation { + /** + * @returns {Aggregation} + */ + aggregationClause() { return ["metric", this.id]; } @@ -49,7 +47,28 @@ export default class Metric extends Base { } } - isActive(): boolean { + isActive() { return !this.archived; } + + /** + * @private + * @param {string} name + * @param {string} description + * @param {Database} database + * @param {Table} table + * @param {number} id + * @param {StructuredQuery} definition + * @param {boolean} archived + */ + /* istanbul ignore next */ + _constructor(name, description, database, table, id, definition, archived) { + this.name = name; + this.description = description; + this.database = database; + this.table = table; + this.id = id; + this.definition = definition; + this.archived = archived; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Schema.js b/frontend/src/metabase-lib/lib/metadata/Schema.js index 3f7aea7d497f..db1b0bf90c57 100644 --- a/frontend/src/metabase-lib/lib/metadata/Schema.js +++ b/frontend/src/metabase-lib/lib/metadata/Schema.js @@ -1,6 +1,4 @@ import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; import { titleize, humanize } from "metabase/lib/formatting"; @@ -8,10 +6,24 @@ import { titleize, humanize } from "metabase/lib/formatting"; * Wrapper class for a {@link Database} schema. Contains {@link Table}s. */ export default class Schema extends Base { - database: Database; - tables: Table[]; - displayName() { return titleize(humanize(this.name)); } + + getTables() { + return this.tables; + } + + /** + * @private + * @param {string} name + * @param {Database} database + * @param {Table[]} tables + */ + /* istanbul ignore next */ + _constructor(name, database, tables) { + this.name = name; + this.database = database; + this.tables = tables; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Schema.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Schema.unit.spec.js new file mode 100644 index 000000000000..4a9afea2b1cd --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Schema.unit.spec.js @@ -0,0 +1,22 @@ +import Schema from "./Schema"; +import Base from "./Base"; + +describe("Schema", () => { + describe("instantiation", () => { + it("should create an instance of Schema", () => { + expect(new Schema()).toBeInstanceOf(Schema); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Schema()).toBeInstanceOf(Base); + expect(new Schema({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("displayName", () => { + it("should return a formatted `name` string", () => { + const schema = new Schema({ name: "foo_bar" }); + expect(schema.displayName()).toBe("Foo Bar"); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Segment.js b/frontend/src/metabase-lib/lib/metadata/Segment.js index 697f55edadc6..7a6ceb5cb07b 100644 --- a/frontend/src/metabase-lib/lib/metadata/Segment.js +++ b/frontend/src/metabase-lib/lib/metadata/Segment.js @@ -1,27 +1,44 @@ import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; -import type { FilterClause } from "metabase-types/types/Query"; + +/** + * @typedef { import("./metadata").FilterClause } FilterClause + */ /** * Wrapper class for a segment. Belongs to a {@link Database} and possibly a {@link Table} */ export default class Segment extends Base { - name: string; - description: string; - - database: Database; - table: Table; - - displayName(): string { + displayName() { return this.name; } - filterClause(): FilterClause { + /** + * @returns {FilterClause} + */ + filterClause() { return ["segment", this.id]; } - isActive(): boolean { + isActive() { return !this.archived; } + + /** + * @private + * @param {string} name + * @param {string} description + * @param {Database} database + * @param {Table} table + * @param {number} id + * @param {boolean} archived + */ + /* istanbul ignore next */ + _constructor(name, description, database, table, id, archived) { + this.name = name; + this.description = description; + this.database = database; + this.table = table; + this.id = id; + this.archived = archived; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Segment.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Segment.unit.spec.js new file mode 100644 index 000000000000..d26d110654be --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Segment.unit.spec.js @@ -0,0 +1,37 @@ +import Segment from "./Segment"; +import Base from "./Base"; + +describe("Segment", () => { + describe("instantiation", () => { + it("should create an instance of Segment", () => { + expect(new Segment()).toBeInstanceOf(Segment); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Segment()).toBeInstanceOf(Base); + expect(new Segment({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("displayName", () => { + it("should return the `name` property found on the instance", () => { + expect(new Segment({ name: "foo" }).displayName()).toBe("foo"); + }); + }); + + describe("filterClause", () => { + it("should return a filter clause", () => { + expect(new Segment({ id: 123 }).filterClause()).toEqual(["segment", 123]); + }); + }); + + describe("isActive", () => { + it("should return true if the segment is not archived", () => { + expect(new Segment({ archived: false }).isActive()).toBe(true); + }); + + it("should return false if the segment is archived", () => { + expect(new Segment({ archived: true }).isActive()).toBe(false); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Table.js b/frontend/src/metabase-lib/lib/metadata/Table.js index 27cff3e45c71..628ed812128d 100644 --- a/frontend/src/metabase-lib/lib/metadata/Table.js +++ b/frontend/src/metabase-lib/lib/metadata/Table.js @@ -2,36 +2,20 @@ import Question from "../Question"; import Base from "./Base"; -import Database from "./Database"; -import Schema from "./Schema"; -import Field from "./Field"; - -import Dimension from "../Dimension"; import { singularize } from "metabase/lib/formatting"; import { getAggregationOperatorsWithFields } from "metabase/lib/schema_metadata"; import { memoize, createLookupByProperty } from "metabase-lib/lib/utils"; -import type { SchemaName } from "metabase-types/types/Table"; -import type StructuredQuery from "metabase-lib/lib/queries/StructuredQuery"; - -type EntityType = string; // TODO: move somewhere central +/** + * @typedef { import("./metadata").SchemaName } SchemaName + * @typedef { import("./metadata").EntityType } EntityType + * @typedef { import("./metadata").StructuredQuery } StructuredQuery + */ /** This is the primary way people interact with tables */ export default class Table extends Base { - description: string; - - db: Database; - - schema: ?Schema; - // @deprecated: use schema.name (all tables should have a schema object, in theory) - schema_name: ?SchemaName; - - fields: Field[]; - - entity_type: ?EntityType; - - hasSchema(): boolean { + hasSchema() { return (this.schema_name && this.db && this.db.schemas.length > 1) || false; } @@ -40,13 +24,13 @@ export default class Table extends Base { return this.db; } - newQuestion(): Question { + newQuestion() { return this.question() .setDefaultQuery() .setDefaultDisplay(); } - question(): Question { + question() { return Question.create({ databaseId: this.db && this.db.id, tableId: this.id, @@ -54,7 +38,7 @@ export default class Table extends Base { }); } - isSavedQuestion(): boolean { + isSavedQuestion() { return this.savedQuestionId() !== null; } @@ -63,13 +47,16 @@ export default class Table extends Base { return match ? parseInt(match[1]) : null; } - query(query = {}): StructuredQuery { + /** + * @returns {StructuredQuery} + */ + query(query = {}) { return this.question() .query() .updateQuery(q => ({ ...q, ...query })); } - dimensions(): Dimension[] { + dimensions() { return this.fields.map(field => field.dimension()); } @@ -89,7 +76,7 @@ export default class Table extends Base { return singularize(this.displayName()); } - dateFields(): Field[] { + dateFields() { return this.fields.filter(field => field.isDate()); } @@ -130,4 +117,23 @@ export default class Table extends Base { get fields_lookup() { return this.fieldsLookup(); } + + /** + * @private + * @param {string} description + * @param {Database} db + * @param {Schema?} schema + * @param {SchemaName} [schema_name] + * @param {Field[]} fields + * @param {EntityType} entity_type + */ + /* istanbul ignore next */ + _constructor(description, db, schema, schema_name, fields, entity_type) { + this.description = description; + this.db = db; + this.schema = schema; + this.schema_name = schema_name; + this.fields = fields; + this.entity_type = entity_type; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/metadata.d.ts b/frontend/src/metabase-lib/lib/metadata/metadata.d.ts new file mode 100644 index 000000000000..d5df84f67ca0 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/metadata.d.ts @@ -0,0 +1,39 @@ +// to help declaring nominal types +interface Flavoring { + _type?: FlavorT; +} +export type Flavor = T & Flavoring; + + +export type EntityType = Flavor; +export type SchemaName = Flavor; + + +// TODO: move to types.d.ts + +export type DatabaseId = Flavor; +export type TableId = Flavor; +export type FieldId = Flavor; +export type MetricId = Flavor; +export type SegmentId = Flavor; + +export type SchemaId = Flavor; + +export type DatabaseFeature = + | "basic-aggregations" + | "standard-deviation-aggregations" + | "expression-aggregations" + | "foreign-keys" + | "native-parameters" + | "nested-queries" + | "expressions" + | "case-sensitivity-string-filter-options" + | "binning"; + +export type FieldValues = Flavor; + + +// TODO: move to query.d.ts +export type Aggregation = Flavor; +export type FilterClause = Flavor; +export type StructuredQuery = Flavor; \ No newline at end of file diff --git a/frontend/src/metabase-lib/lib/queries/InternalQuery.js b/frontend/src/metabase-lib/lib/queries/InternalQuery.js index 3dac6aa7f40b..e4829a804c36 100644 --- a/frontend/src/metabase-lib/lib/queries/InternalQuery.js +++ b/frontend/src/metabase-lib/lib/queries/InternalQuery.js @@ -9,7 +9,7 @@ import AtomicQuery from "metabase-lib/lib/queries/AtomicQuery"; // args: [], // } export default class InternalQuery extends AtomicQuery { - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + static isDatasetQueryType(datasetQuery: DatasetQuery) { return datasetQuery.type === "internal"; } } diff --git a/frontend/src/metabase-lib/lib/queries/NativeQuery.js b/frontend/src/metabase-lib/lib/queries/NativeQuery.js index 87b7592b20a0..94a1abb66055 100644 --- a/frontend/src/metabase-lib/lib/queries/NativeQuery.js +++ b/frontend/src/metabase-lib/lib/queries/NativeQuery.js @@ -24,7 +24,7 @@ import type { DatabaseEngine, DatabaseId } from "metabase-types/types/Database"; import AtomicQuery from "metabase-lib/lib/queries/AtomicQuery"; -import Dimension, { TemplateTagDimension } from "../Dimension"; +import Dimension, { TemplateTagDimension, FieldDimension } from "../Dimension"; import Variable, { TemplateTagVariable } from "../Variable"; import DimensionOptions from "../DimensionOptions"; @@ -76,7 +76,7 @@ export default class NativeQuery extends AtomicQuery { this._nativeDatasetQuery = (datasetQuery: NativeDatasetQuery); } - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + static isDatasetQueryType(datasetQuery: DatasetQuery) { return datasetQuery && datasetQuery.type === NATIVE_QUERY_TEMPLATE.type; } @@ -133,7 +133,7 @@ export default class NativeQuery extends AtomicQuery { /** * Returns true if the database metadata (or lack thererof indicates the user can modify and run this query */ - readOnly(): boolean { + readOnly() { const database = this.database(); return !database || database.native_permissions !== "write"; } @@ -168,12 +168,12 @@ export default class NativeQuery extends AtomicQuery { return this; } - hasWritePermission(): boolean { + hasWritePermission() { const database = this.database(); return database != null && database.native_permissions === "write"; } - supportsNativeParameters(): boolean { + supportsNativeParameters() { const database = this.database(); return ( database != null && _.contains(database.features, "native-parameters") @@ -270,7 +270,7 @@ export default class NativeQuery extends AtomicQuery { templateTagsMap(): TemplateTags { return getIn(this.datasetQuery(), ["native", "template-tags"]) || {}; } - allTemplateTagsAreValid(): boolean { + allTemplateTagsAreValid() { return this.templateTags().every( // field filters require a field t => !(t.type === "dimension" && t.dimension == null), @@ -458,4 +458,26 @@ export default class NativeQuery extends AtomicQuery { } return {}; } + + dependentMetadata() { + const templateTags = this.templateTags(); + + return templateTags + .filter( + tag => + tag.type === "dimension" && + FieldDimension.isFieldClause(tag.dimension), + ) + .map(tag => { + const dimension = FieldDimension.parseMBQL( + tag.dimension, + this.metadata(), + ); + + return { + type: "field", + id: dimension.field().id, + }; + }); + } } diff --git a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js index 82d6f5c8d646..fc467a71254e 100644 --- a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js +++ b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js @@ -72,7 +72,7 @@ export const STRUCTURED_QUERY_TEMPLATE = { * A wrapper around an MBQL (`query` type @type {DatasetQuery}) object */ export default class StructuredQuery extends AtomicQuery { - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + static isDatasetQueryType(datasetQuery: DatasetQuery) { return datasetQuery && datasetQuery.type === STRUCTURED_QUERY_TEMPLATE.type; } @@ -117,7 +117,7 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns true if this query is in a state where it can be edited. Must have database and table set, and metadata for the table loaded. */ - isEditable(): boolean { + isEditable() { return this.hasMetadata(); } @@ -158,7 +158,7 @@ export default class StructuredQuery extends AtomicQuery { /** * Returns true if the database metadata (or lack thererof indicates the user can modify and run this query */ - readOnly(): boolean { + readOnly() { return !this.database(); } @@ -361,7 +361,11 @@ export default class StructuredQuery extends AtomicQuery { } cleanJoins(): StructuredQuery { - return this._cleanClauseList("joins"); + let query = this; + this.joins().forEach((join, index) => { + query = query.updateJoin(index, join.clean()); + }); + return query._cleanClauseList("joins"); } cleanExpressions(): StructuredQuery { @@ -404,7 +408,7 @@ export default class StructuredQuery extends AtomicQuery { } } - isValid(): boolean { + isValid() { if (!this.hasData()) { return false; } @@ -647,21 +651,21 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns true if the aggregation can be removed */ - canRemoveAggregation(): boolean { + canRemoveAggregation() { return this.aggregations().length > 1; } /** * @returns true if the query has no aggregation */ - isBareRows(): boolean { + isBareRows() { return !this.hasAggregations(); } /** * @returns true if the query has no aggregation or breakouts */ - isRaw(): boolean { + isRaw() { return !this.hasAggregations() && !this.hasBreakouts(); } @@ -734,14 +738,14 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns whether a new breakout can be added or not */ - canAddBreakout(): boolean { + canAddBreakout() { return this.breakoutOptions().count > 0; } /** * @returns whether the current query has a valid breakout */ - hasValidBreakout(): boolean { + hasValidBreakout() { const breakouts = this.breakouts(); return breakouts.length > 0 && breakouts[0].isValid(); } @@ -878,7 +882,7 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns whether a new filter can be added or not */ - canAddFilter(): boolean { + canAddFilter() { return ( Q.canAddFilter(this.query()) && (this.filterDimensionOptions().count > 0 || @@ -952,7 +956,7 @@ export default class StructuredQuery extends AtomicQuery { return new DimensionOptions(sortOptions); } } - canAddSort(): boolean { + canAddSort() { const sorts = this.sorts(); return ( this.sortOptions().count > 0 && @@ -1087,6 +1091,32 @@ export default class StructuredQuery extends AtomicQuery { // DIMENSION OPTIONS + _keyForFK(source, destination) { + if (source && destination) { + return `${source.id},${destination.id}`; + } + return null; + } + + _getExplicitJoinsSet(joins) { + const joinDimensionPairs = joins.map(join => { + const dimensionPairs = join.getDimensions(); + return dimensionPairs.map(pair => { + const [parentDimension, joinDimension] = pair; + return this._keyForFK( + parentDimension && parentDimension.field(), + joinDimension && joinDimension.field(), + ); + }); + }); + + const flatJoinDimensions = _.flatten(joinDimensionPairs); + const explicitJoins = new Set(flatJoinDimensions); + explicitJoins.delete(null); + + return explicitJoins; + } + // TODO Atte Keinänen 6/18/17: Refactor to dimensionOptions which takes a dimensionFilter // See aggregationFieldOptions for an explanation why that covers more use cases dimensionOptions( @@ -1121,21 +1151,12 @@ export default class StructuredQuery extends AtomicQuery { } // de-duplicate explicit and implicit joined tables - const keyForFk = (src, dst) => - src && dst ? `${src.id},${dst.id}` : null; - const explicitJoins = new Set( - joins.map(join => { - const p = join.parentDimension(); - const j = join.joinDimension(); - return keyForFk(p && p.field(), j && j.field()); - }), - ); - explicitJoins.delete(null); + const explicitJoins = this._getExplicitJoinsSet(joins); const fkDimensions = this.dimensions().filter(dimensionIsFKReference); for (const dimension of fkDimensions) { const field = dimension.field(); - if (field && explicitJoins.has(keyForFk(field, field.target))) { + if (field && explicitJoins.has(this._keyForFK(field, field.target))) { continue; } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js b/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js index 06ddfc2f2c10..91fb2b7b1da1 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js @@ -44,7 +44,7 @@ export default class Aggregation extends MBQLClause { return this._query.removeAggregation(this._index); } - canRemove(): boolean { + canRemove() { return this.remove() .clean() .isValid(); @@ -131,7 +131,7 @@ export default class Aggregation extends MBQLClause { /** * Predicate function to test if a given aggregation clause is valid */ - isValid(): boolean { + isValid() { if (this.hasOptions()) { return this.aggregation().isValid(); } else if (this.isStandard() && this.dimension()) { @@ -159,21 +159,21 @@ export default class Aggregation extends MBQLClause { /** * Returns true if this is a "standard" metric */ - isStandard(): boolean { + isStandard() { return AGGREGATION.isStandard(this); } /** * Returns true if this is a metric */ - isMetric(): boolean { + isMetric() { return AGGREGATION.isMetric(this); } /** * Returns true if this is custom expression created with the expression editor */ - isCustom(): boolean { + isCustom() { return AGGREGATION.isCustom(this); } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Breakout.js b/frontend/src/metabase-lib/lib/queries/structured/Breakout.js index 3652e60533ab..50f0fd6b4c14 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Breakout.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Breakout.js @@ -43,7 +43,7 @@ export default class Breakout extends MBQLClause { /** * Predicate function to test if a given breakout clause is valid */ - isValid(): boolean { + isValid() { const query = this.query(); return !query || query.breakoutOptions(this).hasDimension(this.dimension()); } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Filter.js b/frontend/src/metabase-lib/lib/queries/structured/Filter.js index 14136bec5bb4..51497cdc9e89 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Filter.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Filter.js @@ -115,28 +115,28 @@ export default class Filter extends MBQLClause { /** * Returns true if this is a "standard" filter */ - isStandard(): boolean { + isStandard() { return isStandard(this); } /** * Returns true if this is a segment */ - isSegment(): boolean { + isSegment() { return isSegment(this); } /** * Returns true if this is custom filter created with the expression editor */ - isCustom(): boolean { + isCustom() { return isCustom(this); } /** * Returns true for filters where the first argument is a field */ - isFieldFilter(): boolean { + isFieldFilter() { return isFieldFilter(this); } @@ -308,7 +308,7 @@ export default class Filter extends MBQLClause { } } - isDimension(otherDimension: Dimension): boolean { + isDimension(otherDimension: Dimension) { const dimension = this.dimension(); return dimension ? dimension.isEqual(otherDimension) : false; } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Join.js b/frontend/src/metabase-lib/lib/queries/structured/Join.js index 255dbf21e92c..708f48eb0750 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Join.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Join.js @@ -6,6 +6,7 @@ import Dimension, { FieldDimension } from "metabase-lib/lib/Dimension"; import DimensionOptions from "metabase-lib/lib/DimensionOptions"; import { pluralize } from "metabase/lib/formatting"; +import { getDatetimeUnit, isDateTimeField } from "metabase/lib/query/field_ref"; import { TableId } from "metabase-types/types/Table"; import type { @@ -28,6 +29,9 @@ const JOIN_STRATEGY_OPTIONS = [ { value: "full-join", name: t`Full outer join`, icon: "join_full_outer" }, ]; +const PARENT_DIMENSION_INDEX = 1; +const JOIN_DIMENSION_INDEX = 2; + export default class Join extends MBQLObjectClause { strategy: ?JoinStrategy; alias: ?JoinAlias; @@ -131,23 +135,32 @@ export default class Join extends MBQLObjectClause { setAlias(alias: JoinAlias) { alias = this._uniqueAlias(alias); if (alias !== this.alias) { - const join = this.set({ ...this, alias }); + let join = this.set({ ...this, alias }); // propagate alias change to join dimension - const joinDimension = join.joinDimension(); - if ( - joinDimension instanceof FieldDimension && - joinDimension.joinAlias() && - joinDimension.joinAlias() === this.alias - ) { - const newDimension = joinDimension.withJoinAlias(alias); - return join.setJoinDimension(newDimension); - } else { - return join; - } + const joinDimensions = join.joinDimensions(); + + joinDimensions.forEach((joinDimension, i) => { + if ( + joinDimension instanceof FieldDimension && + joinDimension.joinAlias() && + joinDimension.joinAlias() === this.alias + ) { + const newDimension = joinDimension.withJoinAlias(alias); + join = join.setJoinDimension({ index: i, dimension: newDimension }); + } + }); + + return join; } return this; } + _getParentDimensionForAlias() { + return this.parentDimensions().find( + dimension => dimension && dimension.field().isFK(), + ); + } + setDefaultAlias() { // The Join alias should be "Table - FK Field" if possible. We need both to disamiguate sitatutions where we have // multiple FKs that point to the same Table -- see #8418 and #11452. @@ -164,11 +177,9 @@ export default class Join extends MBQLObjectClause { const tableName = table && table.display_name; - const parentDimension = this.parentDimension(); + const parentDimension = this._getParentDimensionForAlias(); const fieldName = - parentDimension && - parentDimension.field().isFK() && - parentDimension.field().targetObjectName(); + parentDimension && parentDimension.field().targetObjectName(); const similarTableAndFieldNames = tableName && @@ -192,6 +203,17 @@ export default class Join extends MBQLObjectClause { return this.setAlias(alias); } + getConditions() { + if (!this.condition) { + return []; + } + if (this.isSingleConditionJoin()) { + return [this.condition]; + } + const [, ...conditions] = this.condition; + return conditions; + } + // STRATEGY setStrategy(strategy: JoinStrategy) { return this.set({ ...this, strategy }); @@ -211,10 +233,71 @@ export default class Join extends MBQLObjectClause { ); } - // CONDITION + // CONDITIONS + + isSingleConditionJoin() { + const { condition } = this; + return Array.isArray(condition) && condition[0] === "="; + } + + isMultipleConditionsJoin() { + const { condition } = this; + return Array.isArray(condition) && condition[0] === "and"; + } + + getConditionByIndex(index) { + if (!this.condition) { + return null; + } + if (this.isSingleConditionJoin() && !index) { + return this.condition; + } + if (this.isMultipleConditionsJoin()) { + const [, ...conditions] = this.condition; + return conditions[index]; + } + return null; + } + setCondition(condition: JoinCondition): Join { return this.set({ ...this, condition }); } + + setConditionByIndex({ index = 0, condition }): Join { + if (!this.condition) { + return this.setCondition(condition); + } + if (this.isSingleConditionJoin()) { + if (index === 0) { + return this.setCondition(condition); + } else { + return this.setCondition(["and", this.condition, condition]); + } + } + const conditions = [...this.condition]; + conditions[index + 1] = condition; + return this.setCondition(conditions); + } + + removeCondition(index) { + if (index == null || !this.getConditionByIndex(index)) { + return this; + } + if (this.isSingleConditionJoin()) { + return this.setCondition(null); + } + const filteredCondition = this.condition.filter((_, i) => { + // Adding 1 because the first element of a condition is an operator ("and") + return i !== index + 1; + }); + const [, ...conditions] = filteredCondition; + const isSingleNewCondition = conditions.length === 1; + if (isSingleNewCondition) { + return this.setCondition(conditions[0]); + } + return this.setCondition(filteredCondition); + } + setDefaultCondition() { const { dimensions } = this.parentDimensionOptions(); // look for foreign keys linking the two tables @@ -225,26 +308,66 @@ export default class Join extends MBQLObjectClause { return target && target.table && target.table.id === joinedTable.id; }); if (fk) { - return this.setParentDimension(fk).setJoinDimension( - this.joinedDimension(fk.field().target.dimension()), - ); + return this.setParentDimension({ + index: 0, + dimension: fk, + }).setJoinDimension({ + index: 0, + dimension: this.joinedDimension(fk.field().target.dimension()), + }); } } return this; } + _convertDimensionIntoMBQL(dimension: Dimension | ConcreteField) { + return dimension instanceof Dimension ? dimension.mbql() : dimension; + } + + _getJoinDimensionFromCondition(condition) { + const [, , joinDimension] = condition; + const joinedQuery = this.joinedQuery(); + return ( + joinedQuery && + joinDimension && + joinedQuery.parseFieldReference(joinDimension) + ); + } + + _getJoinDimensionsFromMultipleConditions() { + const [, ...conditions] = this.condition; + return conditions.map(condition => + this._getJoinDimensionFromCondition(condition), + ); + } + // simplified "=" join condition helpers: // NOTE: parentDimension refers to the left-hand side of the join, // and joinDimension refers to the right-hand side // TODO: should we rename them to lhsDimension/rhsDimension etc? - parentDimension() { - const { condition } = this; - if (Array.isArray(condition) && condition[0] === "=" && condition[1]) { - return this.query().parseFieldReference(condition[1]); + _getParentDimensionFromCondition(condition) { + const [, parentDimension] = condition; + return parentDimension && this.query().parseFieldReference(parentDimension); + } + + _getParentDimensionsFromMultipleConditions() { + const [, ...conditions] = this.condition; + return conditions.map(condition => + this._getParentDimensionFromCondition(condition), + ); + } + + parentDimensions() { + if (!this.condition) { + return []; } + return this.isSingleConditionJoin() + ? [this._getParentDimensionFromCondition(this.condition)] + : this._getParentDimensionsFromMultipleConditions(); } + parentDimensionOptions() { const query = this.query(); const dimensions = query.dimensions(); @@ -262,37 +385,77 @@ export default class Join extends MBQLObjectClause { } return new DimensionOptions(options); } - // TODO -- in what way is this setting a "parent dimension"? These names make no sense - setParentDimension(dimension: Dimension | ConcreteField): Join { - if (dimension instanceof Dimension) { - dimension = dimension.mbql(); + + joinDimensions() { + if (!this.condition) { + return []; } - const joinDimension = this.joinDimension(); - return this.setCondition([ - "=", - dimension, - joinDimension instanceof Dimension ? joinDimension.mbql() : null, - ]); + + return this.isSingleConditionJoin() + ? [this._getJoinDimensionFromCondition(this.condition)] + : this._getJoinDimensionsFromMultipleConditions(); } - joinDimension() { - const { condition } = this; - if (Array.isArray(condition) && condition[0] === "=" && condition[2]) { - const joinedQuery = this.joinedQuery(); - return joinedQuery && joinedQuery.parseFieldReference(condition[2]); + addEmptyDimensionsPair() { + if (!this.condition) { + return this.setCondition([]); } - } - setJoinDimension(dimension: Dimension | ConcreteField): Join { - if (dimension instanceof Dimension) { - dimension = dimension.mbql(); + if (this.isSingleConditionJoin()) { + return this.setCondition(["and", this.condition, []]); + } else { + return this.setCondition([...this.condition, []]); } - const parentDimension = this.parentDimension(); - return this.setCondition([ - "=", - parentDimension instanceof Dimension ? parentDimension.mbql() : null, - dimension, - ]); } + + _isDateTimeDimensionsJoin(d1, d2) { + return d1 && d2 && isDateTimeField(d1) && isDateTimeField(d2); + } + + _getDateTimeFieldCondition( + parentDimension, + joinDimension, + temporalUnitSource, + ) { + const temporalUnit = getDatetimeUnit( + temporalUnitSource === "parent" ? parentDimension : joinDimension, + ); + const parent = setTemporalUnit(parentDimension, temporalUnit); + const join = setTemporalUnit(joinDimension, temporalUnit); + return ["=", parent, join]; + } + + setJoinDimension({ index = 0, dimension, overwriteTemporalUnit = false }) { + const condition = this.getConditionByIndex(index); + const join = this._convertDimensionIntoMBQL(dimension); + const parent = condition ? condition[PARENT_DIMENSION_INDEX] : null; + + const newCondition = this._isDateTimeDimensionsJoin(parent, join) + ? this._getDateTimeFieldCondition( + parent, + join, + overwriteTemporalUnit ? "join" : "parent", + ) + : ["=", parent, join]; + + return this.setConditionByIndex({ index, condition: newCondition }); + } + + setParentDimension({ index = 0, dimension, overwriteTemporalUnit = false }) { + const condition = this.getConditionByIndex(index); + const parent = this._convertDimensionIntoMBQL(dimension); + const join = condition ? condition[JOIN_DIMENSION_INDEX] : null; + + const newCondition = this._isDateTimeDimensionsJoin(parent, join) + ? this._getDateTimeFieldCondition( + parent, + join, + overwriteTemporalUnit ? "parent" : "join", + ) + : ["=", parent, join]; + + return this.setConditionByIndex({ index, condition: newCondition }); + } + joinDimensionOptions() { const dimensions = this.joinedDimensions(); return new DimensionOptions({ @@ -304,6 +467,19 @@ export default class Join extends MBQLObjectClause { // HELPERS + getDimensions() { + const conditions = this.getConditions(); + return conditions.map(condition => { + const [, parentDimension, joinDimension] = condition; + return [ + parentDimension + ? this.query().parseFieldReference(parentDimension) + : null, + joinDimension ? this.query().parseFieldReference(joinDimension) : null, + ]; + }); + } + joinedQuery() { const sourceTable = this.joinSourceTableId(); const sourceQuery = this.joinSourceQuery(); @@ -389,11 +565,65 @@ export default class Join extends MBQLObjectClause { return this._query.removeJoin(this._index); } - isValid(): boolean { - return !!( - this.joinedTable() && - this.parentDimension() && - this.joinDimension() + hasGaps() { + if (!this.joinedTable()) { + return true; + } + const parentDimensions = this.parentDimensions(); + const joinDimensions = this.joinDimensions(); + return ( + parentDimensions.length === 0 || + joinDimensions.length === 0 || + parentDimensions.length !== joinDimensions.length || + parentDimensions.some(dimension => dimension == null) || + joinDimensions.some(dimension => dimension == null) + ); + } + + isValid() { + if (this.hasGaps()) { + return false; + } + const dimensionOptions = this.parent().dimensionOptions(); + const dimensions = [...this.parentDimensions(), ...this.joinDimensions()]; + return dimensions.every(dimension => + dimensionOptions.hasDimension(dimension), + ); + } + + clean() { + const invalidAndCantFix = !this.condition || !this.joinedTable(); + if (invalidAndCantFix || this.isValid()) { + return this; + } + let join = this; + + let invalidDimensionIndex = this.parentDimensions().findIndex( + dimension => dimension == null, + ); + if (invalidDimensionIndex >= 0) { + join = this.removeCondition(invalidDimensionIndex); + } + + invalidDimensionIndex = this.joinDimensions().findIndex( + dimension => dimension == null, ); + if (invalidDimensionIndex >= 0) { + join = this.removeCondition(invalidDimensionIndex); + } + + return join.clean(); } } + +function setTemporalUnit(fieldRef, value) { + const [field, id, opts] = fieldRef; + return [ + field, + id, + { + ...opts, + "temporal-unit": value, + }, + ]; +} diff --git a/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js b/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js index b6515329d740..8152ad58bbdf 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js +++ b/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js @@ -10,6 +10,15 @@ export default class MBQLArrayClause extends Array { _private(this, "_query", query); } + // There is a mismatch between the constructor args for `MBQLArrayClause` and `Array` + // so we need to reconcile things in the MBQLArrayClause[Symbol.species] constructor function + // See https://stackoverflow.com/questions/54522949 + static get [Symbol.species]() { + return Object.assign(function(...items) { + return new MBQLArrayClause(new Array(...items), this._index, this._query); + }, MBQLArrayClause); + } + set(mbql: any[]) { return new this.constructor(mbql, this._index, this._query); } @@ -106,6 +115,14 @@ export class MBQLObjectClause { metadata() { return this._query.metadata(); } + + raw() { + const entriesWithDefinedValue = Object.entries(this).filter(entry => { + const [, value] = entry; + return value !== undefined; + }); + return Object.fromEntries(entriesWithDefinedValue); + } } function _private(object, key, value) { diff --git a/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js b/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js index e4162c4f11f5..fe6cd27e7e2e 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js +++ b/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js @@ -43,7 +43,7 @@ export default class OrderBy extends MBQLClause { /** * Predicate function to test if a given order-by clause is valid */ - isValid(): boolean { + isValid() { const query = this.query(); return !query || query.sortOptions(this).hasDimension(this.dimension()); } diff --git a/frontend/src/metabase/App.jsx b/frontend/src/metabase/App.jsx index 6bacc9515ca9..dd176e986987 100644 --- a/frontend/src/metabase/App.jsx +++ b/frontend/src/metabase/App.jsx @@ -55,7 +55,6 @@ export default class App extends Component { } componentDidCatch(error, errorInfo) { - console.log("COMPONENT DID CATCH LOLE"); this.setState({ errorInfo }); } diff --git a/frontend/src/metabase/user/components/LoginHistoryList.jsx b/frontend/src/metabase/account/login-history/components/LoginHistory/LoginHistory.jsx similarity index 92% rename from frontend/src/metabase/user/components/LoginHistoryList.jsx rename to frontend/src/metabase/account/login-history/components/LoginHistory/LoginHistory.jsx index 82ab3dd4c747..4aa44e5fc737 100644 --- a/frontend/src/metabase/user/components/LoginHistoryList.jsx +++ b/frontend/src/metabase/account/login-history/components/LoginHistory/LoginHistory.jsx @@ -1,12 +1,8 @@ /* eslint-disable react/prop-types */ import React from "react"; import { Box, Flex } from "grid-styled"; - import _ from "underscore"; import moment from "moment"; - -import LoginHistory from "metabase/entities/loginHistory"; - import Card from "metabase/components/Card"; import Label from "metabase/components/type/Label"; import Text from "metabase/components/type/Text"; @@ -60,4 +56,4 @@ function LoginHistoryList({ loginHistory }) { return {_.map(groups, LoginHistoryGroup)}; } -export default LoginHistory.loadList()(LoginHistoryList); +export default LoginHistoryList; diff --git a/frontend/src/metabase/account/login-history/components/LoginHistory/index.js b/frontend/src/metabase/account/login-history/components/LoginHistory/index.js new file mode 100644 index 000000000000..cf0573759df0 --- /dev/null +++ b/frontend/src/metabase/account/login-history/components/LoginHistory/index.js @@ -0,0 +1 @@ +export { default } from "./LoginHistory"; diff --git a/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/LoginHistoryApp.jsx b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/LoginHistoryApp.jsx new file mode 100644 index 000000000000..27b009e30143 --- /dev/null +++ b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/LoginHistoryApp.jsx @@ -0,0 +1,4 @@ +import LoginHistory from "metabase/entities/loginHistory"; +import LoginHistoryList from "../../components/LoginHistory"; + +export default LoginHistory.loadList()(LoginHistoryList); diff --git a/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/index.js b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/index.js new file mode 100644 index 000000000000..9df80a402fcd --- /dev/null +++ b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/index.js @@ -0,0 +1 @@ +export { default } from "./LoginHistoryApp"; diff --git a/frontend/src/metabase/account/notifications/actions.js b/frontend/src/metabase/account/notifications/actions.js new file mode 100644 index 000000000000..cae214bfde78 --- /dev/null +++ b/frontend/src/metabase/account/notifications/actions.js @@ -0,0 +1,16 @@ +import { push } from "react-router-redux"; + +const PREFIX = `/account/notifications`; + +export const navigateToUnsubscribe = (item, type) => { + return push(`${PREFIX}/${type}/${item.id}/unsubscribe`); +}; + +export const navigateToArchive = (item, type, hasUnsubscribed) => { + const query = hasUnsubscribed ? "?unsubscribed=true" : ""; + return push(`${PREFIX}/${type}/${item.id}/archive${query}`); +}; + +export const navigateToHelp = () => { + return push(`${PREFIX}/help`); +}; diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.jsx b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.jsx new file mode 100644 index 000000000000..bb54bcd5cb6a --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.jsx @@ -0,0 +1,127 @@ +import React, { useCallback, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Settings from "metabase/lib/settings"; +import { formatDateTimeWithUnit } from "metabase/lib/formatting"; +import { formatChannelRecipients } from "metabase/lib/notifications"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import FormMessage from "metabase/components/form/FormMessage"; +import { ModalMessage } from "./ArchiveModal.styled"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + user: PropTypes.object, + hasUnsubscribed: PropTypes.bool, + onArchive: PropTypes.func, + onClose: PropTypes.func, +}; + +const ArchiveModal = ({ + item, + type, + user, + hasUnsubscribed, + onArchive, + onClose, +}) => { + const [error, setError] = useState(); + + const handleArchiveClick = useCallback(async () => { + try { + await onArchive(item, true); + onClose(); + } catch (error) { + setError(error); + } + }, [item, onArchive, onClose]); + + return ( + : null, + , + , + ]} + onClose={onClose} + > + {isCreator(item, user) && hasUnsubscribed && ( + + {getCreatorMessage(type, user)} + {t`As the creator you can also choose to delete this if it’s no longer relevant to others as well.`} + + )} + + {getDateMessage(item, type)} + {getRecipientsMessage(item)} + + + ); +}; + +ArchiveModal.propTypes = propTypes; + +const isCreator = (item, user) => { + return user != null && user.id === item.creator?.id; +}; + +const getTitleMessage = (type, hasUnsubscribed) => { + switch (type) { + case "alert": + return hasUnsubscribed + ? t`You’re unsubscribed. Delete this alert as well?` + : t`Delete this alert?`; + case "pulse": + return hasUnsubscribed + ? t`You’re unsubscribed. Delete this subscription as well?` + : t`Delete this subscription?`; + } +}; + +const getSubmitMessage = (type, hasUnsubscribed) => { + switch (type) { + case "alert": + return hasUnsubscribed ? t`Delete this alert` : t`Yes, delete this alert`; + case "pulse": + return hasUnsubscribed + ? t`Delete this subscription` + : t`Yes, delete this subscription`; + } +}; + +const getCancelMessage = hasUnsubscribed => { + return hasUnsubscribed ? t`Keep it around` : t`I changed my mind`; +}; + +const getCreatorMessage = (type, user) => { + switch (type) { + case "alert": + return t`You won’t receive this alert at ${user.email} any more. `; + case "pulse": + return t`You won’t receive this subscription at ${user.email} any more. `; + } +}; + +const getDateMessage = (item, type) => { + const options = Settings.formattingOptions(); + const createdAt = formatDateTimeWithUnit(item.created_at, "day", options); + + switch (type) { + case "alert": + return t`You created this alert on ${createdAt}. `; + case "pulse": + return t`You created this subscription on ${createdAt}. `; + } +}; + +const getRecipientsMessage = item => { + return t`It’s currently being sent to ${formatChannelRecipients(item)}.`; +}; + +export default ArchiveModal; diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.styled.jsx b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.styled.jsx new file mode 100644 index 000000000000..748448f0e643 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; + +export const ModalMessage = styled.div` + &:not(:last-child) { + margin-bottom: 1rem; + } +`; diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.unit.spec.js b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.unit.spec.js new file mode 100644 index 000000000000..faf3ab1f667e --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.unit.spec.js @@ -0,0 +1,137 @@ +import React from "react"; +import { render, screen, waitFor } from "@testing-library/react"; +import ArchiveModal from "./ArchiveModal"; + +const getAlert = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + creator, + channels, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getPulse = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + creator, + channels, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getUser = ({ id = 1 } = {}) => ({ + id, + common_name: "John Doe", +}); + +const getChannel = ({ + channel_type = "email", + schedule_type = "hourly", + recipients = [getUser()], +} = {}) => { + return { + channel_type, + schedule_type, + recipients, + schedule_hour: 8, + schedule_day: "mon", + schedule_frame: "first", + }; +}; + +describe("ArchiveModal", () => { + it("should render an email alert", () => { + const alert = getAlert(); + + render(); + + screen.getByText("Delete this alert?"); + screen.getByText("Yes, delete this alert"); + screen.getByText("You created this alert on May 8, 2021", { exact: false }); + screen.getByText("It’s currently being sent to 1 email.", { exact: false }); + }); + + it("should render an email pulse", () => { + const pulse = getPulse(); + + render(); + + screen.getByText("Delete this subscription?"); + screen.getByText("Yes, delete this subscription"); + screen.getByText("May 8, 2021", { exact: false }); + screen.getByText("It’s currently being sent to 1 email.", { exact: false }); + }); + + it("should render a slack pulse", () => { + const pulse = getPulse({ + channels: [getChannel({ channel_type: "slack" })], + }); + + render(); + + screen.getByText("1 Slack channel", { exact: false }); + }); + + it("should render an alert with both email and slack channels", () => { + const alert = getAlert({ + channels: [ + getChannel({ + channel_type: "email", + recipients: [getUser(), getUser()], + }), + getChannel({ + channel_type: "slack", + recipients: [getUser(), getUser(), getUser()], + }), + ], + }); + + render(); + + screen.getByText("2 emails and 3 Slack channels", { exact: false }); + }); + + it("should close on submit", async () => { + const alert = getAlert(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onArchive.mockResolvedValue(); + + render( + , + ); + + screen.getByText("Yes, delete this alert").click(); + + waitFor(() => { + expect(onArchive).toHaveBeenCalled(alert, true); + expect(onClose).toHaveBeenCalled(); + }); + }); + + it("should not close on a submit error", async () => { + const alert = getAlert(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onArchive.mockRejectedValue({ data: { message: "An error occurred" } }); + + render( + , + ); + + screen.getByText("Yes, delete this alert").click(); + + waitFor(() => { + screen.getByText("An error occurred"); + expect(onArchive).toHaveBeenCalled(alert, true); + expect(onClose).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/index.js b/frontend/src/metabase/account/notifications/components/ArchiveModal/index.js new file mode 100644 index 000000000000..3eb77a0f1601 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/index.js @@ -0,0 +1 @@ +export { default } from "./ArchiveModal"; diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.jsx b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.jsx new file mode 100644 index 000000000000..204857bb3156 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.jsx @@ -0,0 +1,50 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { jt, t } from "ttag"; +import Settings from "metabase/lib/settings"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import { ModalLink, ModalMessage } from "./HelpModal.styled"; + +const propTypes = { + onClose: PropTypes.func, +}; + +const HelpModal = ({ onClose }) => { + const email = Settings.get("admin-email"); + + const handleClose = () => onClose(true); + + return ( + + {t`Got it`} + + } + onClose={handleClose} + > + + {t`It’s possible you may also receive emails from Metabase if you’re a member of an email distribution list, like “team@mycompany.com” and that list is used as the recipient for an alert or dashboard subscription instead of your individual email.`} + + + {getAdminMessage(email)} + {t`Hopefully they’ll be able to help you out!`} + + + ); +}; + +HelpModal.propTypes = propTypes; + +const getAdminLink = (email, text) => { + return email ? {text} : text; +}; + +const getAdminMessage = email => { + const adminLink = getAdminLink(email, t`your instance administrator`); + return jt`Metabase doesn’t manage those lists, so we’d recommend contacting ${adminLink}. `; +}; + +export default HelpModal; diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.styled.jsx b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.styled.jsx new file mode 100644 index 000000000000..56ca70f898b0 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.styled.jsx @@ -0,0 +1,17 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Link from "metabase/components/Link"; + +export const ModalLink = styled(Link)` + color: ${color("brand")}; + + &:hover { + text-decoration: underline; + } +`; + +export const ModalMessage = styled.div` + &:not(:last-child) { + margin-bottom: 1rem; + } +`; diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.unit.spec.js b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.unit.spec.js new file mode 100644 index 000000000000..daea8aeda38f --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.unit.spec.js @@ -0,0 +1,32 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import Settings from "metabase/lib/settings"; +import HelpModal from "./HelpModal"; + +describe("HelpModal", () => { + it("should render with admin email", () => { + Settings.set("admin-email", "admin@example.com"); + + render(); + + const link = screen.getByRole("link"); + expect(link).toHaveProperty("href", "mailto:admin@example.com"); + }); + + it("should render without admin email", () => { + Settings.set("admin-email", null); + + render(); + + screen.getByText("administrator", { exact: false }); + }); + + it("should close on button click", () => { + const onClose = jest.fn(); + + render(); + + screen.getByText("Got it").click(); + expect(onClose).toHaveBeenCalled(); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/index.js b/frontend/src/metabase/account/notifications/components/HelpModal/index.js new file mode 100644 index 000000000000..cb16fb1a5562 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/index.js @@ -0,0 +1 @@ +export { default } from "./HelpModal"; diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.jsx b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.jsx new file mode 100644 index 000000000000..f25fac3f66e7 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.jsx @@ -0,0 +1,106 @@ +import React, { useCallback } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Settings from "metabase/lib/settings"; +import { formatDateTimeWithUnit } from "metabase/lib/formatting"; +import { + canArchive, + formatChannel, + formatLink, + formatTitle, +} from "metabase/lib/notifications"; +import { + NotificationContent, + NotificationIcon, + NotificationDescription, + NotificationCardRoot, + NotificationMessage, + NotificationTitle, +} from "./NotificationCard.styled"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["pulse", "alert"]).isRequired, + user: PropTypes.object.isRequired, + onUnsubscribe: PropTypes.func, + onArchive: PropTypes.func, +}; + +const NotificationCard = ({ item, type, user, onUnsubscribe, onArchive }) => { + const hasArchive = canArchive(item, user); + + const onUnsubscribeClick = useCallback(() => { + onUnsubscribe(item, type); + }, [item, type, onUnsubscribe]); + + const onArchiveClick = useCallback(() => { + onArchive(item, type); + }, [item, type, onArchive]); + + return ( + + + + {formatTitle(item, type)} + + + {item.channels.map((channel, index) => ( + + {getChannelMessage(channel)} + + ))} + + {getCreatorMessage(item, user)} + + + + {!hasArchive && ( + + )} + {hasArchive && ( + + )} + + ); +}; + +NotificationCard.propTypes = propTypes; + +const getChannelMessage = channel => { + return getCapitalizedMessage(formatChannel(channel)); +}; + +const getCapitalizedMessage = message => { + const [firstLetter, ...otherLetters] = message; + return [firstLetter.toUpperCase(), ...otherLetters].join(""); +}; + +const getCreatorMessage = (item, user) => { + let creatorString = ""; + const options = Settings.formattingOptions(); + + if (user.id === item.creator?.id) { + creatorString += t`Created by you`; + } else if (item.creator?.common_name) { + creatorString += t`Created by ${item.creator.common_name}`; + } else { + creatorString += t`Created`; + } + + if (item.created_at) { + const createdAt = formatDateTimeWithUnit(item.created_at, "day", options); + creatorString += t` on ${createdAt}`; + } + + return creatorString; +}; + +export default NotificationCard; diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.styled.jsx b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.styled.jsx new file mode 100644 index 000000000000..7d84acae5991 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.styled.jsx @@ -0,0 +1,58 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Icon from "metabase/components/Icon"; +import Link from "metabase/components/Link"; + +export const NotificationCardRoot = styled.div` + display: flex; + align-items: center; + padding: 1rem 1.5rem; + border: 1px solid ${color("border")}; + border-radius: 6px; + background-color: ${color("white")}; + + &:not(:last-child) { + margin-bottom: 1.25rem; + } +`; + +export const NotificationContent = styled.div` + flex: 1 1 auto; +`; + +export const NotificationTitle = styled(Link)` + color: ${color("brand")}; + font-weight: bold; + + &:hover { + text-decoration: underline; + } +`; + +export const NotificationDescription = styled.div` + display: flex; + flex-wrap: wrap; + margin-top: 0.25rem; +`; + +export const NotificationMessage = styled.span` + color: ${color("text-medium")}; + font-size: 0.75rem; + line-height: 0.875rem; + + &:not(:last-child)::after { + content: " · "; + white-space: pre; + } +`; + +export const NotificationIcon = styled(Icon)` + color: ${color("text-light")}; + cursor: pointer; + width: 1rem; + height: 1rem; + + &:hover { + color: ${color("text-medium")}; + } +`; diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.unit.spec.js b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.unit.spec.js new file mode 100644 index 000000000000..3f51fde7adbe --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.unit.spec.js @@ -0,0 +1,212 @@ +import React from "react"; +import { render, screen, fireEvent } from "@testing-library/react"; +import NotificationCard from "./NotificationCard"; + +const getAlert = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + creator, + channels, + card: { + name: "Alert", + }, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getPulse = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + name: "Pulse", + creator, + channels, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getUser = ({ id = 1 } = {}) => ({ + id, + common_name: "John Doe", +}); + +const getChannel = ({ + channel_type = "email", + schedule_type = "hourly", + recipients = [], +} = {}) => ({ + channel_type, + schedule_type, + recipients, + schedule_hour: 8, + schedule_day: "mon", + schedule_frame: "first", + details: { + channel: "@channel", + }, +}); + +describe("NotificationCard", () => { + it("should render an alert", () => { + const alert = getAlert(); + const user = getUser(); + + render(); + + screen.getByText("Alert"); + screen.getByText("Emailed hourly"); + screen.getByText("Created by you on May 8, 2021"); + }); + + it("should render a pulse", () => { + const pulse = getPulse(); + const user = getUser(); + + render(); + + screen.getByText("Pulse"); + screen.getByText("Emailed hourly"); + screen.getByText("Created by you on May 8, 2021"); + }); + + it("should render a slack alert", () => { + const alert = getAlert({ + channels: [getChannel({ channel_type: "slack" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Slack’d hourly to @channel"); + }); + + it("should render a daily alert", () => { + const alert = getAlert({ + channels: [getChannel({ schedule_type: "daily" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Emailed daily at 8:00 AM"); + }); + + it("should render a weekly alert", () => { + const alert = getAlert({ + channels: [getChannel({ schedule_type: "weekly" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Emailed Monday at 8:00 AM"); + }); + + it("should render a monthly alert", () => { + const alert = getAlert({ + channels: [getChannel({ schedule_type: "monthly" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Emailed monthly on the first Monday at 8:00 AM"); + }); + + it("should render an alert created by another user", () => { + const alert = getAlert(); + const user = getUser({ id: 2 }); + + render(); + + screen.getByText("Created by John Doe on May 8, 2021"); + }); + + it("should unsubscribe when the user is not the creator and subscribed", () => { + const creator = getUser({ id: 1 }); + const user = getUser({ id: 2 }); + const alert = getAlert({ + creator, + channels: [getChannel({ recipients: [user] })], + }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).toHaveBeenCalledWith(alert, "alert"); + expect(onArchive).not.toHaveBeenCalled(); + }); + + it("should unsubscribe when user user is the creator and subscribed with another user", () => { + const creator = getUser({ id: 1 }); + const recipient = getUser({ id: 2 }); + const alert = getAlert({ + creator, + channels: [getChannel({ recipients: [creator, recipient] })], + }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).toHaveBeenCalledWith(alert, "alert"); + expect(onArchive).not.toHaveBeenCalled(); + }); + + it("should archive when the user is the creator and not subscribed", () => { + const creator = getUser(); + const alert = getAlert({ creator }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).not.toHaveBeenCalled(); + expect(onArchive).toHaveBeenCalledWith(alert, "alert"); + }); + + it("should archive when the user is the creator and is the only one subscribed", () => { + const creator = getUser(); + const alert = getAlert({ + creator, + channels: [getChannel({ recipients: [creator] })], + }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).not.toHaveBeenCalled(); + expect(onArchive).toHaveBeenCalledWith(alert, "alert"); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/index.js b/frontend/src/metabase/account/notifications/components/NotificationCard/index.js new file mode 100644 index 000000000000..8c5b82942811 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/index.js @@ -0,0 +1 @@ +export { default } from "./NotificationCard"; diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.jsx b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.jsx new file mode 100644 index 000000000000..7d14a2cfe770 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.jsx @@ -0,0 +1,71 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import NotificationCard from "../NotificationCard"; +import { + NotificationButton, + NotificationHeader, + NotificationIcon, + NotificationLabel, + NotificationMessage, + NotificationSection, +} from "./NotificationList.styled"; + +const propTypes = { + items: PropTypes.array.isRequired, + user: PropTypes.object.isRequired, + children: PropTypes.node, + onHelp: PropTypes.func, + onUnsubscribe: PropTypes.func, + onArchive: PropTypes.func, +}; + +const NotificationList = ({ + items, + user, + children, + onHelp, + onUnsubscribe, + onArchive, +}) => { + if (!items.length) { + return ; + } + + return ( +
+ + {t`You receive or created these`} + + {t`Not seeing one here?`} + + + {items.map(({ item, type }) => ( + + ))} + {children} +
+ ); +}; + +const NotificationEmptyState = () => { + return ( + + + + {t`If you subscribe or are added to dashboard subscriptions or alerts you’ll be able to manage those here.`} + + + ); +}; + +NotificationList.propTypes = propTypes; + +export default NotificationList; diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.styled.jsx b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.styled.jsx new file mode 100644 index 000000000000..241279eb8dc7 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.styled.jsx @@ -0,0 +1,39 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Label from "metabase/components/type/Label"; +import { TextButton } from "metabase/components/Button.styled"; +import Icon from "metabase/components/Icon"; + +export const NotificationHeader = styled.div` + display: flex; + align-items: center; + margin-bottom: 1.5rem; +`; + +export const NotificationLabel = styled(Label)` + flex: 1 1 auto; + margin: 0; +`; + +export const NotificationButton = styled(TextButton).attrs({ + size: "small", +})``; + +export const NotificationSection = styled.div` + display: flex; + flex-direction: column; + align-items: center; +`; + +export const NotificationIcon = styled(Icon)` + color: ${color("bg-dark")}; + width: 3.25rem; + height: 3.25rem; + margin-top: 4.875rem; + margin-bottom: 1.75rem; +`; + +export const NotificationMessage = styled.div` + max-width: 24rem; + text-align: center; +`; diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.unit.spec.js b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.unit.spec.js new file mode 100644 index 000000000000..dc4184986d93 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.unit.spec.js @@ -0,0 +1,35 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import NotificationList from "./NotificationList"; + +const getPulse = () => ({ + name: "Pulse", + channels: [], + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getUser = () => ({ + id: 1, + common_name: "John Doe", +}); + +describe("NotificationList", () => { + it("should render items", () => { + const pulse = getPulse(); + const user = getUser(); + + render( + , + ); + + screen.getByText("Pulse"); + }); + + it("should render empty state when there are no items", () => { + const user = getUser(); + + render(); + + screen.getByText("you’ll be able to manage those here", { exact: false }); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/index.js b/frontend/src/metabase/account/notifications/components/NotificationList/index.js new file mode 100644 index 000000000000..886c9d477a29 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/index.js @@ -0,0 +1 @@ +export { default } from "./NotificationList"; diff --git a/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.jsx b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.jsx new file mode 100644 index 000000000000..b78f58e32eb6 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.jsx @@ -0,0 +1,78 @@ +import React, { useCallback, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import FormMessage from "metabase/components/form/FormMessage"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + user: PropTypes.object, + onUnsubscribe: PropTypes.func, + onArchive: PropTypes.func, + onClose: PropTypes.func, +}; + +const UnsubscribeModal = ({ + item, + type, + user, + onUnsubscribe, + onArchive, + onClose, +}) => { + const [error, setError] = useState(); + + const handleUnsubscribeClick = useCallback(async () => { + try { + await onUnsubscribe(item); + + if (isCreator(item, user)) { + onArchive(item, type, true); + } else { + onClose(); + } + } catch (error) { + setError(error); + } + }, [item, type, user, onUnsubscribe, onArchive, onClose]); + + return ( + : null, + , + , + ]} + onClose={onClose} + > +

+ {getUnsubscribeMessage(type)} + {t`Depending on your organization’s permissions you might need to ask a moderator to be re-added in the future.`} +

+
+ ); +}; + +UnsubscribeModal.propTypes = propTypes; + +const isCreator = (item, user) => { + return user != null && user.id === item.creator?.id; +}; + +const getUnsubscribeMessage = type => { + switch (type) { + case "alert": + return t`You’ll stop receiving this alert from now on. `; + case "pulse": + return t`You’ll stop receiving this subscription from now on. `; + } +}; + +export default UnsubscribeModal; diff --git a/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.unit.spec.js b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.unit.spec.js new file mode 100644 index 000000000000..4fd016450182 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.unit.spec.js @@ -0,0 +1,121 @@ +import React from "react"; +import { render, screen, waitFor } from "@testing-library/react"; +import UnsubscribeModal from "./UnsubscribeModal"; + +const getAlert = ({ creator = getUser({ id: 1 }) } = {}) => ({ + name: "Alert", + creator: creator, +}); + +const getPulse = ({ creator = getUser({ id: 1 }) } = {}) => ({ + name: "Pulse", + creator: creator, +}); + +const getUser = ({ id = 2 } = {}) => ({ + id, +}); + +describe("UnsubscribeModal", () => { + it("should render an alert", () => { + const alert = getAlert(); + + render(); + + screen.getByText("this alert", { exact: false }); + }); + + it("should render a pulse", () => { + const pulse = getPulse(); + + render(); + + screen.getByText("this subscription", { exact: false }); + }); + + it("should close if unsubscribed successfully", () => { + const alert = getAlert(); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onUnsubscribe.mockResolvedValue(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalledWith(alert); + expect(onArchive).not.toHaveBeenCalled(); + expect(onClose).toHaveBeenCalled(); + }); + }); + + it("should proceed with archiving if the notification is created by the user", () => { + const user = getUser(); + const alert = getAlert({ creator: user }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onUnsubscribe.mockResolvedValue(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalledWith(alert); + expect(onArchive).toHaveBeenCalledWith(alert, "alert", true); + expect(onClose).not.toHaveBeenCalled(); + }); + }); + + it("should not close on a submit error", () => { + const user = getUser(); + const alert = getAlert(); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onUnsubscribe.mockRejectedValue({ data: { message: "An error occurred" } }); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + screen.getByText("An error occurred"); + expect(onUnsubscribe).toHaveBeenCalled(); + expect(onArchive).not.toHaveBeenCalled(); + expect(onClose).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/UnsubscribeModal/index.js b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/index.js new file mode 100644 index 000000000000..78ad8837f723 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeModal"; diff --git a/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/ArchiveAlertModal.jsx b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/ArchiveAlertModal.jsx new file mode 100644 index 000000000000..4ed89bafaa2f --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/ArchiveAlertModal.jsx @@ -0,0 +1,27 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import { getUser } from "metabase/selectors/user"; +import { getAlertId } from "../../selectors"; +import ArchiveModal from "../../components/ArchiveModal"; + +const mapStateToProps = (state, { alert, location }) => ({ + item: alert, + type: "alert", + user: getUser(state), + hasUnsubscribed: location.query.unsubscribed, +}); + +const mapDispatchToProps = { + onArchive: Alerts.actions.setArchived, +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => getAlertId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(ArchiveModal); diff --git a/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/index.js b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/index.js new file mode 100644 index 000000000000..950bf8843c0a --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/index.js @@ -0,0 +1 @@ +export { default } from "./ArchiveAlertModal"; diff --git a/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/ArchivePulseModal.jsx b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/ArchivePulseModal.jsx new file mode 100644 index 000000000000..e616155221e8 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/ArchivePulseModal.jsx @@ -0,0 +1,27 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import { getUser } from "metabase/selectors/user"; +import { getPulseId } from "../../selectors"; +import ArchiveModal from "../../components/ArchiveModal"; + +const mapStateToProps = (state, { pulse, location }) => ({ + item: pulse, + type: "pulse", + user: getUser(state), + hasUnsubscribed: location.query.unsubscribed, +}); + +const mapDispatchToProps = { + onArchive: Pulses.actions.setArchived, +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => getPulseId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(ArchiveModal); diff --git a/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/index.js b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/index.js new file mode 100644 index 000000000000..7121b9f24cf0 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/index.js @@ -0,0 +1 @@ +export { default } from "./ArchivePulseModal"; diff --git a/frontend/src/metabase/account/notifications/containers/NotificationsApp/NotificationsApp.jsx b/frontend/src/metabase/account/notifications/containers/NotificationsApp/NotificationsApp.jsx new file mode 100644 index 000000000000..fb99353193fd --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/NotificationsApp/NotificationsApp.jsx @@ -0,0 +1,38 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import Pulses from "metabase/entities/pulses"; +import { getUser, getUserId } from "metabase/selectors/user"; +import { + navigateToArchive, + navigateToHelp, + navigateToUnsubscribe, +} from "../../actions"; +import { getNotifications } from "../../selectors"; +import NotificationList from "../../components/NotificationList"; + +const mapStateToProps = (state, props) => ({ + user: getUser(state), + items: getNotifications(props), +}); + +const mapDispatchToProps = { + onHelp: navigateToHelp, + onUnsubscribe: navigateToUnsubscribe, + onArchive: navigateToArchive, +}; + +export default _.compose( + Alerts.loadList({ + query: state => ({ user_id: getUserId(state) }), + reload: true, + }), + Pulses.loadList({ + query: state => ({ user_id: getUserId(state) }), + reload: true, + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(NotificationList); diff --git a/frontend/src/metabase/account/notifications/containers/NotificationsApp/index.js b/frontend/src/metabase/account/notifications/containers/NotificationsApp/index.js new file mode 100644 index 000000000000..219d08c4598d --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/NotificationsApp/index.js @@ -0,0 +1 @@ +export { default } from "./NotificationsApp"; diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/UnsubscribeAlertModal.jsx b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/UnsubscribeAlertModal.jsx new file mode 100644 index 000000000000..22858ef8230d --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/UnsubscribeAlertModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import { getUser } from "metabase/selectors/user"; +import { navigateToArchive } from "../../actions"; +import { getAlertId } from "../../selectors"; +import UnsubscribeModal from "../../components/UnsubscribeModal"; + +const mapStateToProps = (state, { alert }) => ({ + item: alert, + type: "alert", + user: getUser(state), +}); + +const mapDispatchToProps = { + onUnsubscribe: Alerts.actions.unsubscribe, + onArchive: navigateToArchive, +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => getAlertId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(UnsubscribeModal); diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/index.js b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/index.js new file mode 100644 index 000000000000..473d470d0765 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeAlertModal"; diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/UnsubscribePulseModal.jsx b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/UnsubscribePulseModal.jsx new file mode 100644 index 000000000000..97650c451697 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/UnsubscribePulseModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import { getUser } from "metabase/selectors/user"; +import { navigateToArchive } from "../../actions"; +import { getPulseId } from "../../selectors"; +import UnsubscribeModal from "../../components/UnsubscribeModal"; + +const mapStateToProps = (state, { pulse }) => ({ + item: pulse, + type: "pulse", + user: getUser(state), +}); + +const mapDispatchToProps = { + onUnsubscribe: Pulses.actions.unsubscribe, + onArchive: navigateToArchive, +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => getPulseId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(UnsubscribeModal); diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/index.js b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/index.js new file mode 100644 index 000000000000..08537ea00a8c --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribePulseModal"; diff --git a/frontend/src/metabase/account/notifications/routes.jsx b/frontend/src/metabase/account/notifications/routes.jsx new file mode 100644 index 000000000000..639a742d1b71 --- /dev/null +++ b/frontend/src/metabase/account/notifications/routes.jsx @@ -0,0 +1,27 @@ +import React from "react"; +import { Route } from "metabase/hoc/Title"; +import { ModalRoute } from "metabase/hoc/ModalRoute"; +import NotificationsApp from "./containers/NotificationsApp"; +import HelpModal from "./components/HelpModal"; +import ArchiveAlertModal from "./containers/ArchiveAlertModal"; +import ArchivePulseModal from "./containers/ArchivePulseModal"; +import UnsubscribeAlertModal from "./containers/UnsubscribeAlertModal"; +import UnsubscribePulseModal from "./containers/UnsubscribePulseModal"; + +const getRoutes = () => ( + + + + + + + +); + +export default getRoutes; diff --git a/frontend/src/metabase/account/notifications/selectors.js b/frontend/src/metabase/account/notifications/selectors.js new file mode 100644 index 000000000000..5558b68e79e6 --- /dev/null +++ b/frontend/src/metabase/account/notifications/selectors.js @@ -0,0 +1,32 @@ +import { createSelector } from "reselect"; +import { parseTimestamp } from "metabase/lib/time"; + +export const getAlertId = ({ params: { alertId } }) => { + return parseInt(alertId); +}; + +export const getPulseId = ({ params: { pulseId } }) => { + return parseInt(pulseId); +}; + +export const getNotifications = createSelector( + [({ alerts }) => alerts, ({ pulses }) => pulses], + (alerts, pulses) => { + const items = [ + ...alerts.map(alert => ({ + item: alert, + type: "alert", + })), + ...pulses.map(pulse => ({ + item: pulse, + type: "pulse", + })), + ]; + + return items.sort( + (a, b) => + parseTimestamp(b.item.created_at).unix() - + parseTimestamp(a.item.created_at).unix(), + ); + }, +); diff --git a/frontend/src/metabase/account/password/actions.js b/frontend/src/metabase/account/password/actions.js new file mode 100644 index 000000000000..7ed6425b9ec8 --- /dev/null +++ b/frontend/src/metabase/account/password/actions.js @@ -0,0 +1,41 @@ +import { t } from "ttag"; +import { UserApi, UtilApi } from "metabase/services"; +import { createThunkAction } from "metabase/lib/redux"; + +export const UPDATE_PASSWORD = "UPDATE_PASSWORD"; +export const VALIDATE_PASSWORD = "VALIDATE_PASSWORD"; + +export const validatePassword = createThunkAction(VALIDATE_PASSWORD, function( + password, +) { + return async function() { + return await UtilApi.password_check({ + password: password, + }); + }; +}); + +export const updatePassword = createThunkAction(UPDATE_PASSWORD, function( + user_id, + password, + old_password, +) { + return async function() { + try { + await UserApi.update_password({ + id: user_id, + password, + old_password, + }); + + return { + success: true, + data: { + message: t`Password updated successfully!`, + }, + }; + } catch (error) { + return error; + } + }; +}); diff --git a/frontend/src/metabase/account/password/components/UserPasswordForm/UserPasswordForm.jsx b/frontend/src/metabase/account/password/components/UserPasswordForm/UserPasswordForm.jsx new file mode 100644 index 000000000000..8a7d9633e07c --- /dev/null +++ b/frontend/src/metabase/account/password/components/UserPasswordForm/UserPasswordForm.jsx @@ -0,0 +1,45 @@ +import React, { useCallback } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import User from "metabase/entities/users"; + +const propTypes = { + user: PropTypes.object, + validatePassword: PropTypes.func, + updatePassword: PropTypes.func, +}; + +const UserPasswordForm = ({ user, validatePassword, updatePassword }) => { + const handleAsyncValidate = useCallback( + async ({ password }) => { + try { + validatePassword(password); + return {}; + } catch (error) { + return error.data.errors; + } + }, + [validatePassword], + ); + + const handleSubmit = useCallback( + ({ password, old_password }) => { + updatePassword(user.id, password, old_password); + }, + [user, updatePassword], + ); + + return ( + + ); +}; + +UserPasswordForm.propTypes = propTypes; + +export default UserPasswordForm; diff --git a/frontend/src/metabase/account/password/components/UserPasswordForm/index.js b/frontend/src/metabase/account/password/components/UserPasswordForm/index.js new file mode 100644 index 000000000000..107c471a5505 --- /dev/null +++ b/frontend/src/metabase/account/password/components/UserPasswordForm/index.js @@ -0,0 +1 @@ +export { default } from "./UserPasswordForm"; diff --git a/frontend/src/metabase/account/password/containers/UserPasswordApp/UserPasswordApp.jsx b/frontend/src/metabase/account/password/containers/UserPasswordApp/UserPasswordApp.jsx new file mode 100644 index 000000000000..b0c222bc2cd1 --- /dev/null +++ b/frontend/src/metabase/account/password/containers/UserPasswordApp/UserPasswordApp.jsx @@ -0,0 +1,18 @@ +import { connect } from "react-redux"; +import { getUser } from "metabase/selectors/user"; +import { updatePassword, validatePassword } from "../../actions"; +import UserPasswordForm from "../../components/UserPasswordForm"; + +const mapStateToProps = state => ({ + user: getUser(state), +}); + +const mapDispatchToProps = { + validatePassword, + updatePassword, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(UserPasswordForm); diff --git a/frontend/src/metabase/account/password/containers/UserPasswordApp/index.js b/frontend/src/metabase/account/password/containers/UserPasswordApp/index.js new file mode 100644 index 000000000000..e3bb2bbe3842 --- /dev/null +++ b/frontend/src/metabase/account/password/containers/UserPasswordApp/index.js @@ -0,0 +1 @@ +export { default } from "./UserPasswordApp"; diff --git a/frontend/src/metabase/account/profile/components/UserProfileForm/UserProfileForm.jsx b/frontend/src/metabase/account/profile/components/UserProfileForm/UserProfileForm.jsx new file mode 100644 index 000000000000..45a412908a44 --- /dev/null +++ b/frontend/src/metabase/account/profile/components/UserProfileForm/UserProfileForm.jsx @@ -0,0 +1,24 @@ +import React, { useCallback } from "react"; +import PropTypes from "prop-types"; +import User from "metabase/entities/users"; + +const propTypes = { + user: PropTypes.object, +}; + +const UserProfileForm = ({ user }) => { + const handleSaved = useCallback( + ({ locale }) => { + if (locale !== user.locale) { + window.location.reload(); + } + }, + [user], + ); + + return ; +}; + +UserProfileForm.propTypes = propTypes; + +export default UserProfileForm; diff --git a/frontend/src/metabase/account/profile/components/UserProfileForm/index.js b/frontend/src/metabase/account/profile/components/UserProfileForm/index.js new file mode 100644 index 000000000000..3ca451b87862 --- /dev/null +++ b/frontend/src/metabase/account/profile/components/UserProfileForm/index.js @@ -0,0 +1 @@ +export { default } from "./UserProfileForm"; diff --git a/frontend/src/metabase/account/profile/containers/UserProfileApp/UserProfileApp.jsx b/frontend/src/metabase/account/profile/containers/UserProfileApp/UserProfileApp.jsx new file mode 100644 index 000000000000..5b26c02d115d --- /dev/null +++ b/frontend/src/metabase/account/profile/containers/UserProfileApp/UserProfileApp.jsx @@ -0,0 +1,9 @@ +import { connect } from "react-redux"; +import { getUser } from "metabase/selectors/user"; +import UserProfileForm from "../../components/UserProfileForm"; + +const mapStateToProps = state => ({ + user: getUser(state), +}); + +export default connect(mapStateToProps)(UserProfileForm); diff --git a/frontend/src/metabase/account/profile/containers/UserProfileApp/index.js b/frontend/src/metabase/account/profile/containers/UserProfileApp/index.js new file mode 100644 index 000000000000..0f02a783b089 --- /dev/null +++ b/frontend/src/metabase/account/profile/containers/UserProfileApp/index.js @@ -0,0 +1 @@ +export { default } from "./UserProfileApp"; diff --git a/frontend/src/metabase/account/routes.jsx b/frontend/src/metabase/account/routes.jsx new file mode 100644 index 000000000000..48747092e1f2 --- /dev/null +++ b/frontend/src/metabase/account/routes.jsx @@ -0,0 +1,25 @@ +import React from "react"; +import { t } from "ttag"; +import { IndexRedirect } from "react-router"; +import { Route } from "metabase/hoc/Title"; +import AccountSettingsApp from "./settings/containers/AccountSettingsApp"; +import UserProfileApp from "./profile/containers/UserProfileApp"; +import UserPasswordApp from "./password/containers/UserPasswordApp"; +import LoginHistoryApp from "./login-history/containers/LoginHistoryApp"; +import getNotificationRoutes from "./notifications/routes"; + +const getRoutes = (store, IsAuthenticated) => { + return ( + + + + + + + {getNotificationRoutes()} + + + ); +}; + +export default getRoutes; diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.jsx b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.jsx new file mode 100644 index 000000000000..9a0d6d9e6855 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.jsx @@ -0,0 +1,55 @@ +import React, { useMemo } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Radio from "metabase/components/Radio"; +import { PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS } from "metabase/plugins"; +import { + AccountHeaderRoot, + HeaderAvatar, + HeaderSection, + HeaderTitle, +} from "./AccountHeader.styled"; + +const propTypes = { + user: PropTypes.object.isRequired, + path: PropTypes.string, + onChangeLocation: PropTypes.func, +}; + +const AccountHeader = ({ user, path, onChangeLocation }) => { + const hasPasswordChange = useMemo( + () => PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.every(f => f(user)), + [user], + ); + + const tabs = useMemo( + () => [ + { name: t`Profile`, value: "/account/profile" }, + ...(hasPasswordChange + ? [{ name: t`Password`, value: "/account/password" }] + : []), + { name: t`Login History`, value: "/account/login-history" }, + { name: t`Notifications`, value: "/account/notifications" }, + ], + [hasPasswordChange], + ); + + return ( + + + + {t`Account settings`} + + + + ); +}; + +AccountHeader.propTypes = propTypes; + +export default AccountHeader; diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.styled.jsx b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.styled.jsx new file mode 100644 index 000000000000..f24c3b1cd068 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.styled.jsx @@ -0,0 +1,55 @@ +import styled from "styled-components"; +import colors from "metabase/lib/colors"; + +import { + breakpointMinMedium, + breakpointMinSmall, + space, +} from "metabase/styled-components/theme"; +import UserAvatar from "metabase/components/UserAvatar"; + +export const AccountHeaderRoot = styled.div` + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + padding-top: ${space(1)}; + border-bottom: 1px solid ${colors["border"]}; + background-color: ${colors["white"]}; + + ${breakpointMinSmall} { + padding-top: ${space(2)}; + } +`; + +export const HeaderSection = styled.div` + display: flex; + flex-direction: column; + align-items: center; + padding: ${space(2)}; + + ${breakpointMinMedium} { + padding: ${space(4)}; + } +`; + +export const HeaderTitle = styled.h2` + text-align: center; +`; + +export const HeaderAvatar = styled(UserAvatar)` + width: 3em; + height: 3em; + margin-bottom: ${space(1)}; + + ${breakpointMinSmall} { + width: 4em; + height: 4em; + margin-bottom: ${space(2)}; + } + + ${breakpointMinMedium} { + width: 5em; + height: 5em; + } +`; diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.unit.spec.js b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.unit.spec.js new file mode 100644 index 000000000000..8ac3be18980b --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.unit.spec.js @@ -0,0 +1,69 @@ +import React from "react"; +import { fireEvent, render, screen } from "@testing-library/react"; +import AccountHeader from "./AccountHeader"; +import { PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS } from "metabase/plugins"; + +const getUser = () => ({ + id: 1, + first_name: "John", + last_name: "Doe", + email: "john@metabase.test", + google_auth: true, +}); + +describe("AccountHeader", () => { + const ORIGINAL_SHOW_CHANGE_PASSWORD_CONDITIONS = [ + ...PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS, + ]; + + beforeEach(() => { + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.splice(0); + }); + + afterEach(() => { + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.splice( + 0, + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.length, + ...ORIGINAL_SHOW_CHANGE_PASSWORD_CONDITIONS, + ); + }); + + it("should show all tabs for a regular user", () => { + const user = getUser(); + + render(); + + screen.getByText("Profile"); + screen.getByText("Password"); + screen.getByText("Login History"); + screen.getByText("Notifications"); + }); + + it("should show the password tab if it is enabled by a plugin", () => { + const user = getUser(); + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.push(user => user.google_auth); + + render(); + + screen.getByText("Password"); + }); + + it("should hide the password tab if it is disabled by a plugin", () => { + const user = getUser(); + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.push(user => !user.google_auth); + + render(); + + expect(screen.queryByText("Password")).not.toBeInTheDocument(); + }); + + it("should change location when a tab is selected", () => { + const user = getUser(); + const onChangeLocation = jest.fn(); + + render(); + + fireEvent.click(screen.getByText("Profile")); + expect(onChangeLocation).toHaveBeenCalledWith("/account/profile"); + }); +}); diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/index.js b/frontend/src/metabase/account/settings/components/AccountHeader/index.js new file mode 100644 index 000000000000..f763e2c973a4 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/index.js @@ -0,0 +1 @@ +export { default } from "./AccountHeader"; diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.jsx b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.jsx new file mode 100644 index 000000000000..e52ed223f819 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.jsx @@ -0,0 +1,22 @@ +import React from "react"; +import PropTypes from "prop-types"; +import AccountHeader from "../AccountHeader"; +import { AccountContent } from "./AccountLayout.styled"; + +const propTypes = { + ...AccountHeader.propTypes, + children: PropTypes.node, +}; + +const AccountLayout = ({ children, ...props }) => { + return ( +
+ + {children} +
+ ); +}; + +AccountLayout.propTypes = propTypes; + +export default AccountLayout; diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.styled.jsx b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.styled.jsx new file mode 100644 index 000000000000..1ff1dd6b6413 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.styled.jsx @@ -0,0 +1,12 @@ +import styled from "styled-components"; +import { breakpointMinSmall, space } from "metabase/styled-components/theme"; + +export const AccountContent = styled.div` + margin: 0 auto; + padding: ${space(1)}; + + ${breakpointMinSmall} { + width: 540px; + padding: ${space(3)} ${space(2)}; + } +`; diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.unit.spec.js b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.unit.spec.js new file mode 100644 index 000000000000..772a833782f5 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.unit.spec.js @@ -0,0 +1,21 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import AccountLayout from "./AccountLayout"; + +const getUser = () => ({ + id: 1, + first_name: "John", + last_name: "Doe", + email: "john@metabase.test", +}); + +describe("AccountLayout", () => { + it("should render header and content", () => { + const user = getUser(); + + render(Content); + + screen.getByText("Profile"); + screen.getByText("Content"); + }); +}); diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/index.js b/frontend/src/metabase/account/settings/components/AccountLayout/index.js new file mode 100644 index 000000000000..e8b4574ee5a0 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/index.js @@ -0,0 +1 @@ +export { default } from "./AccountLayout"; diff --git a/frontend/src/metabase/account/settings/containers/AccountSettingsApp/AccountSettingsApp.jsx b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/AccountSettingsApp.jsx new file mode 100644 index 000000000000..a65c7d59200c --- /dev/null +++ b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/AccountSettingsApp.jsx @@ -0,0 +1,18 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { getUser } from "metabase/selectors/user"; +import AccountLayout from "../../components/AccountLayout"; + +const mapStateToProps = (state, props) => ({ + user: getUser(state), + path: props.location.pathname, +}); + +const mapDispatchToProps = { + onChangeLocation: push, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(AccountLayout); diff --git a/frontend/src/metabase/account/settings/containers/AccountSettingsApp/index.js b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/index.js new file mode 100644 index 000000000000..96cffd19015b --- /dev/null +++ b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/index.js @@ -0,0 +1 @@ +export { default } from "./AccountSettingsApp"; diff --git a/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.jsx b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.jsx new file mode 100644 index 000000000000..b242b5e240b0 --- /dev/null +++ b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.jsx @@ -0,0 +1,97 @@ +import React, { useRef } from "react"; +import PropTypes from "prop-types"; +import { Box } from "grid-styled"; +import { t } from "ttag"; + +import DeleteDatabaseModal from "metabase/admin/databases/components/DeleteDatabaseModal.jsx"; +import ActionButton from "metabase/components/ActionButton"; +import ModalWithTrigger from "metabase/components/ModalWithTrigger"; +import ConfirmContent from "metabase/components/ConfirmContent"; + +const propTypes = { + database: PropTypes.object.isRequired, + deleteDatabase: PropTypes.func.isRequired, + syncDatabaseSchema: PropTypes.func.isRequired, + rescanDatabaseFields: PropTypes.func.isRequired, + discardSavedFieldValues: PropTypes.func.isRequired, +}; + +const DatabaseEditAppSidebar = ({ + database, + deleteDatabase, + syncDatabaseSchema, + rescanDatabaseFields, + discardSavedFieldValues, +}) => { + const discardSavedFieldValuesModal = useRef(); + const deleteDatabaseModal = useRef(); + + return ( + +
+
+ +
    +
  1. + syncDatabaseSchema(database.id)} + className="Button Button--syncDbSchema" + normalText={t`Sync database schema now`} + activeText={t`Starting…`} + failedText={t`Failed to sync`} + successText={t`Sync triggered!`} + /> +
  2. +
  3. + rescanDatabaseFields(database.id)} + className="Button Button--rescanFieldValues" + normalText={t`Re-scan field values now`} + activeText={t`Starting…`} + failedText={t`Failed to start scan`} + successText={t`Scan triggered!`} + /> +
  4. +
+
+ +
+ +
    +
  1. + + discardSavedFieldValuesModal.current.toggle()} + onAction={() => discardSavedFieldValues(database.id)} + /> + +
  2. + +
  3. + + deleteDatabaseModal.current.toggle()} + onDelete={() => deleteDatabase(database.id, true)} + /> + +
  4. +
+
+
+
+ ); +}; + +DatabaseEditAppSidebar.propTypes = propTypes; + +export default DatabaseEditAppSidebar; diff --git a/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.unit.spec.js b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.unit.spec.js new file mode 100644 index 000000000000..3b1c82c3841e --- /dev/null +++ b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.unit.spec.js @@ -0,0 +1,99 @@ +import React from "react"; +import { fireEvent, render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; + +import Sidebar from "./Sidebar"; + +it("syncs database schema", () => { + const databaseId = 1; + const database = { id: databaseId }; + const syncDatabaseSchema = jest.fn(); + + render( + , + ); + + const syncButton = screen.getByText("Sync database schema now"); + + fireEvent.click(syncButton); + + expect(syncDatabaseSchema).toHaveBeenCalledWith(databaseId); +}); + +it("rescans database field values", () => { + const databaseId = 1; + const database = { id: databaseId }; + const rescanDatabaseFields = jest.fn(); + + render( + , + ); + + const rescanButton = screen.getByText("Re-scan field values now"); + + fireEvent.click(rescanButton); + + expect(rescanDatabaseFields).toHaveBeenCalledWith(databaseId); +}); + +it("discards saved field values", () => { + const databaseId = 1; + const database = { id: databaseId }; + const discardSavedFieldValues = jest.fn(); + + render( + , + ); + + const discardButton = screen.getByText("Discard saved field values"); + + fireEvent.click(discardButton); + + expect(screen.getAllByText("Discard saved field values").length).toBe(2); + + const cancelButton = screen.getByText("Cancel"); + + fireEvent.click(cancelButton); + + fireEvent.click(discardButton); + + const yesButton = screen.getByText("Yes"); + + fireEvent.click(yesButton); + + expect(discardSavedFieldValues).toHaveBeenCalledWith(databaseId); +}); + +it("removes database", () => { + const databaseId = 1; + const name = "DB Name"; + const database = { id: databaseId, name }; + const deleteDatabase = jest.fn(); + + render(); + + const removeDBButton = screen.getByText("Remove this database"); + + fireEvent.click(removeDBButton); + + screen.getByText(`Delete the ${name} database?`); + + const cancelButton = screen.getByText("Cancel"); + + fireEvent.click(cancelButton); + + fireEvent.click(removeDBButton); + + const input = screen.getByRole("textbox"); + + userEvent.type(input, name); + + const deleteButton = screen.getByText("Delete"); + + fireEvent.click(deleteButton); + + expect(deleteDatabase).toHaveBeenCalled(); +}); diff --git a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx index 94a568d1e317..25584c48008b 100644 --- a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx +++ b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx @@ -10,13 +10,12 @@ import { Box, Flex } from "grid-styled"; import title from "metabase/hoc/Title"; -import DeleteDatabaseModal from "../components/DeleteDatabaseModal"; -import ActionButton from "metabase/components/ActionButton"; import AddDatabaseHelpCard from "metabase/components/AddDatabaseHelpCard"; import Button from "metabase/components/Button"; import Breadcrumbs from "metabase/components/Breadcrumbs"; +import DriverWarning from "metabase/components/DriverWarning"; import Radio from "metabase/components/Radio"; -import ModalWithTrigger from "metabase/components/ModalWithTrigger"; +import Sidebar from "metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar"; import Databases from "metabase/entities/databases"; @@ -37,7 +36,6 @@ import { deleteDatabase, selectEngine, } from "../database"; -import ConfirmContent from "metabase/components/ConfirmContent"; import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper"; import { getIn } from "icepick"; @@ -71,10 +69,7 @@ const mapDispatchToProps = { selectEngine, }; -type TabName = "connection" | "scheduling"; -type TabOption = { name: string, value: TabName }; - -const TABS: TabOption[] = [ +const TABS = [ { name: t`Connection`, value: "connection", @@ -91,19 +86,12 @@ const TABS: TabOption[] = [ ) @title(({ database }) => database && database.name) export default class DatabaseEditApp extends Component { - state: { - currentTab: TabName, - }; - constructor(props, context) { super(props, context); this.state = { currentTab: TABS[0].value, }; - - this.discardSavedFieldValuesModal = React.createRef(); - this.deleteDatabaseModal = React.createRef(); } static propTypes = { @@ -138,26 +126,30 @@ export default class DatabaseEditApp extends Component { render() { const { database, + deleteDatabase, + discardSavedFieldValues, selectedEngine, letUserControlSchedulingSaved, letUserControlSchedulingForm, initializeError, + rescanDatabaseFields, + syncDatabaseSchema, } = this.props; const { currentTab } = this.state; - const editingExistingDatabase = database && database.id != null; + const editingExistingDatabase = database?.id != null; const addingNewDatabase = !editingExistingDatabase; const showTabs = editingExistingDatabase && letUserControlSchedulingSaved; + const crumbs = [ + [t`Databases`, "/admin/databases"], + [addingNewDatabase ? t`Add Database` : database.name], + ]; + return ( - + +
@@ -171,134 +163,100 @@ export default class DatabaseEditApp extends Component { />
)} - - - + {() => ( + ( + + )) + } + submitButtonComponent={Button} > - {() => ( - ( - - )) - } - submitButtonComponent={Button} - /> - )} - - - {addingNewDatabase && ( - - - + {({ + Form, + FormField, + FormMessage, + FormSubmit, + formFields, + onChangeField, + submitTitle, + }) => { + return ( + + +
+ {formFields.map(formField => ( + + ))} + +
+ + {submitTitle} + +
+ +
+ + {addingNewDatabase && ( + + )} + { + onChangeField("engine", engine); + }} + data-testid="database-setup-driver-warning" + /> + +
+ ); + }} + )} -
+
- {/* Sidebar Actions */} {editingExistingDatabase && ( - -
-
- -
    -
  1. - - this.props.syncDatabaseSchema(database.id) - } - className="Button Button--syncDbSchema" - normalText={t`Sync database schema now`} - activeText={t`Starting…`} - failedText={t`Failed to sync`} - successText={t`Sync triggered!`} - /> -
  2. -
  3. - - this.props.rescanDatabaseFields(database.id) - } - className="Button Button--rescanFieldValues" - normalText={t`Re-scan field values now`} - activeText={t`Starting…`} - failedText={t`Failed to start scan`} - successText={t`Scan triggered!`} - /> -
  4. -
-
- -
- -
    -
  1. - - - this.discardSavedFieldValuesModal.current.toggle() - } - onAction={() => - this.props.discardSavedFieldValues(database.id) - } - /> - -
  2. - -
  3. - - - this.deleteDatabaseModal.current.toggle() - } - onDelete={() => - this.props.deleteDatabase(database.id, true) - } - /> - -
  4. -
-
-
-
+ )} diff --git a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.unit.spec.js b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.unit.spec.js new file mode 100644 index 000000000000..1e61ab53958f --- /dev/null +++ b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.unit.spec.js @@ -0,0 +1,105 @@ +import React from "react"; +import { Provider } from "react-redux"; +import { reducer as form } from "redux-form"; +import { Router, Route } from "react-router"; +import { createMemoryHistory } from "history"; +import { + render, + screen, + waitForElementToBeRemoved, +} from "@testing-library/react"; +import admin from "metabase/admin/admin"; +import MetabaseSettings from "metabase/lib/settings"; +import { PLUGIN_CACHING } from "metabase/plugins"; +import { getStore } from "__support__/entities-store"; +import DatabaseEditApp from "./DatabaseEditApp"; + +const ENGINES_MOCK = { + h2: { + "details-fields": [ + { "display-name": "Connection String", name: "db", required: true }, + ], + "driver-name": "H2", + "superseded-by": null, + }, + sqlite: { + "details-fields": [ + { "display-name": "Filename", name: "db", required: true }, + ], + "driver-name": "SQLite", + "superseded-by": null, + }, +}; + +function mockSettings({ cachingEnabled = false }) { + const spy = jest.spyOn(MetabaseSettings, "get"); + spy.mockImplementation(key => { + if (key === "engines") { + return ENGINES_MOCK; + } + if (key === "enable-query-caching") { + return cachingEnabled; + } + if (key === "site-url") { + return "http://localhost:3333"; + } + }); +} + +async function setup({ cachingEnabled = false } = {}) { + mockSettings({ cachingEnabled }); + + render( + + + + + , + ); + + await waitForElementToBeRemoved(() => screen.queryByText("Loading...")); +} + +describe("DatabaseEditApp", () => { + describe("Cache TTL field", () => { + describe("OSS", () => { + it("is invisible", async () => { + await setup({ cachingEnabled: true }); + + expect( + screen.queryByText("Default result cache duration"), + ).not.toBeInTheDocument(); + }); + }); + + describe("EE", () => { + beforeEach(() => { + PLUGIN_CACHING.databaseCacheTTLFormField = { + name: "cache_ttl", + type: "integer", + title: "Default result cache duration", + }; + }); + + afterEach(() => { + PLUGIN_CACHING.databaseCacheTTLFormField = null; + }); + + it("is visible", async () => { + await setup({ cachingEnabled: true }); + + expect( + screen.queryByText("Default result cache duration"), + ).toBeInTheDocument(); + }); + + it("is invisible when caching disabled", async () => { + await setup({ cachingEnabled: false }); + + expect( + screen.queryByText("Default result cache duration"), + ).not.toBeInTheDocument(); + }); + }); + }); +}); diff --git a/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.js b/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.js index 2ff833e60de2..9b29a9e62549 100644 --- a/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.js +++ b/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.js @@ -10,9 +10,9 @@ function editSyncParamsForUserControlledScheduling(database) { } function editScheduleParamsForUserControlledScheduling(database) { - const { details, schedules = {} } = database; + const { details, schedules } = database; - if (details["let-user-control-scheduling"] && !schedules.metadata_sync) { + if (details["let-user-control-scheduling"] && !schedules?.metadata_sync) { database.schedules.metadata_sync = { schedule_type: "daily", }; diff --git a/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx b/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx index c9ae0e1899a5..ca4da250112c 100644 --- a/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx +++ b/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx @@ -24,11 +24,7 @@ export default class MetricItem extends Component { - + {metric.name} diff --git a/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx b/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx index d1925cdf7f00..7d06f6095d0b 100644 --- a/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx +++ b/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx @@ -25,8 +25,8 @@ export default class SegmentItem extends Component { {segment.name} diff --git a/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx b/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx index cba89771856a..8180734d084c 100644 --- a/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx +++ b/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx @@ -10,9 +10,10 @@ import Button from "metabase/components/Button"; import * as MetabaseCore from "metabase/lib/core"; import { isCurrency } from "metabase/lib/schema_metadata"; import { isFK } from "metabase/lib/types"; -import currency from "metabase/lib/currency"; import { getGlobalSettingsForColumn } from "metabase/visualizations/lib/settings/column"; +import { currency } from "cljs/metabase.shared.util.currency"; + import _ from "underscore"; import cx from "classnames"; @@ -238,7 +239,7 @@ export class SemanticTypeAndTargetPicker extends Component { searchProp="name" searchCaseSensitive={false} > - {Object.values(currency).map(c => ( + {currency.map(([_, c]) => (
+ + + + ))} + + )} + + {!hasRecents && ( + + + + )} + + + + ); +} + +RecentsList.propTypes = propTypes; + +export default _.compose( + Recents.loadList({ + wrapped: true, + reload: true, + loadingAndErrorWrapper: false, + }), +)(RecentsList); diff --git a/frontend/src/metabase/nav/components/RecentsList.styled.jsx b/frontend/src/metabase/nav/components/RecentsList.styled.jsx new file mode 100644 index 000000000000..f7503e42f7db --- /dev/null +++ b/frontend/src/metabase/nav/components/RecentsList.styled.jsx @@ -0,0 +1,14 @@ +import styled from "styled-components"; + +export const EmptyStateContainer = styled.div` + margin: 3rem 0; +`; + +export const Header = styled.h4` + padding: 0.5rem 1rem; +`; + +export const RecentListItemContent = styled.div` + display: flex; + align-items: flex-start; +`; diff --git a/frontend/src/metabase/nav/components/RecentsList.unit.spec.js b/frontend/src/metabase/nav/components/RecentsList.unit.spec.js new file mode 100644 index 000000000000..f517c8fd3732 --- /dev/null +++ b/frontend/src/metabase/nav/components/RecentsList.unit.spec.js @@ -0,0 +1,100 @@ +import React from "react"; +import { Provider } from "react-redux"; +import { render, screen, waitFor } from "@testing-library/react"; +import xhrMock from "xhr-mock"; +import { getStore } from "__support__/entities-store"; +import RecentsList from "./RecentsList"; + +const recentsData = [ + { + user_id: 1, + model: "card", + model_id: 83, + cnt: 9, + max_ts: "2021-08-24T23:50:21.077", + model_object: { + id: 83, + name: "Question I visited", + display: "table", + }, + }, + { + user_id: 1, + model: "dashboard", + model_id: 1, + cnt: 164, + max_ts: "2021-08-24T23:49:34.577", + model_object: { + id: 1, + name: "Dashboard I visited", + }, + }, + { + user_id: 1, + model: "table", + model_id: 4, + cnt: 164, + max_ts: "2021-08-24T23:49:34.577", + model_object: { + id: 1, + name: "table_i_visited", + display_name: "Table I visited", + }, + }, +]; + +function mockRecentsEndpoint(recents) { + xhrMock.get("/api/activity/recent_views", { + body: JSON.stringify(recents), + }); +} + +async function setup(recents = recentsData) { + mockRecentsEndpoint(recents); + + const store = getStore(); + + render( + + + , + ); + + await waitFor(() => screen.queryByText("Recently viewed")); +} + +describe("RecentsList", () => { + beforeEach(() => { + xhrMock.setup(); + }); + + afterEach(() => { + xhrMock.teardown(); + }); + + it("shows list of recents", async () => { + await setup(); + await waitFor(() => screen.queryByText("Question I visited")); + expect(screen.queryByText("Recently viewed")).toBeInTheDocument(); + + const [questionType, dashboardType, tableType] = screen.queryAllByTestId( + "recently-viewed-item-type", + ); + + expect(screen.queryByText("Question I visited")).toBeInTheDocument(); + expect(questionType).toHaveTextContent("Question"); + + expect(screen.queryByText("Dashboard I visited")).toBeInTheDocument(); + expect(dashboardType).toHaveTextContent("Dashboard"); + + expect(screen.queryByText("Table I visited")).toBeInTheDocument(); + expect(tableType).toHaveTextContent("Table"); + }); + + it("shows an empty state when there are no recents", async () => { + await setup([]); + + expect(screen.queryByText("Recently viewed")).toBeInTheDocument(); + expect(screen.queryByText("Nothing here")).toBeInTheDocument(); + }); +}); diff --git a/frontend/src/metabase/nav/components/SearchBar.jsx b/frontend/src/metabase/nav/components/SearchBar.jsx index a402979dd3d1..4927c6ba94e6 100644 --- a/frontend/src/metabase/nav/components/SearchBar.jsx +++ b/frontend/src/metabase/nav/components/SearchBar.jsx @@ -1,55 +1,17 @@ /* eslint-disable react/prop-types */ import React from "react"; import ReactDOM from "react-dom"; -import { Flex } from "grid-styled"; -import styled from "styled-components"; -import { space } from "styled-system"; import { t } from "ttag"; -import { color, lighten } from "metabase/lib/colors"; - import Card from "metabase/components/Card"; import Icon from "metabase/components/Icon"; import OnClickOutsideWrapper from "metabase/components/OnClickOutsideWrapper"; -import SearchResult from "metabase/search/components/SearchResult"; -import { DefaultSearchColor } from "metabase/nav/constants"; import MetabaseSettings from "metabase/lib/settings"; -const ActiveSearchColor = lighten(color("nav"), 0.1); - -import Search from "metabase/entities/search"; - -const SearchWrapper = Flex.extend` - position: relative; - background-color: ${props => - props.active ? ActiveSearchColor : DefaultSearchColor}; - border-radius: 6px; - flex: 1 1 auto; - max-width: 50em; - align-items: center; - color: white; - transition: background 300ms ease-in; - &:hover { - background-color: ${ActiveSearchColor}; - } -`; - -const SearchInput = styled.input` - ${space}; - background-color: transparent; - width: 100%; - border: none; - color: white; - font-size: 1em; - font-weight: 700; - &:focus { - outline: none; - } - &::placeholder { - color: ${color("text-white")}; - } -`; +import { SearchInput, SearchWrapper } from "./SearchBar.styled"; +import { SearchResults } from "./SearchResults"; +import RecentsList from "./RecentsList"; const ALLOWED_SEARCH_FOCUS_ELEMENTS = new Set(["BODY", "A"]); const SEARCH_LIMIT = 50; @@ -84,7 +46,7 @@ export default class SearchBar extends React.Component { this.setState({ searchText: "" }); } } - handleKeyUp = (e: KeyboardEvent) => { + handleKeyUp = e => { const FORWARD_SLASH_KEY = 191; if ( e.keyCode === FORWARD_SLASH_KEY && @@ -95,25 +57,6 @@ export default class SearchBar extends React.Component { } }; - renderResults(results) { - if (results.length === 0) { - return ( -
  • -
    - -

    {t`Didn't find anything`}

    -
    -
  • - ); - } else { - return results.map(l => ( -
  • - -
  • - )); - } - } - render() { const { active, searchText } = this.state; return ( @@ -152,18 +95,11 @@ export default class SearchBar extends React.Component { style={{ maxHeight: 400 }} py={1} > - - {({ list }) => { - return
      {this.renderResults(list)}
    ; - }} -
    + - ) : null} + ) : ( + + )}
    )} diff --git a/frontend/src/metabase/nav/components/SearchBar.styled.jsx b/frontend/src/metabase/nav/components/SearchBar.styled.jsx new file mode 100644 index 000000000000..273b688aa444 --- /dev/null +++ b/frontend/src/metabase/nav/components/SearchBar.styled.jsx @@ -0,0 +1,39 @@ +import styled from "styled-components"; +import { space } from "styled-system"; + +import { DefaultSearchColor } from "metabase/nav/constants"; +import { color, lighten } from "metabase/lib/colors"; + +const ActiveSearchColor = lighten(color("nav"), 0.1); + +export const SearchWrapper = styled.div` + display: flex; + position: relative; + background-color: ${props => + props.active ? ActiveSearchColor : DefaultSearchColor}; + border-radius: 6px; + flex: 1 1 auto; + max-width: 50em; + align-items: center; + color: white; + transition: background 300ms ease-in; + &:hover { + background-color: ${ActiveSearchColor}; + } +`; + +export const SearchInput = styled.input` + ${space}; + background-color: transparent; + width: 100%; + border: none; + color: white; + font-size: 1em; + font-weight: 700; + &:focus { + outline: none; + } + &::placeholder { + color: ${color("text-white")}; + } +`; diff --git a/frontend/src/metabase/nav/components/SearchResults.jsx b/frontend/src/metabase/nav/components/SearchResults.jsx new file mode 100644 index 000000000000..e23d6458fe8b --- /dev/null +++ b/frontend/src/metabase/nav/components/SearchResults.jsx @@ -0,0 +1,46 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { Box } from "grid-styled"; +import { t } from "ttag"; + +import { DEFAULT_SEARCH_LIMIT } from "metabase/lib/constants"; +import Search from "metabase/entities/search"; +import SearchResult from "metabase/search/components/SearchResult"; +import EmptyState from "metabase/components/EmptyState"; + +const propTypes = { + searchText: PropTypes.string, +}; + +export const SearchResults = ({ searchText }) => { + return ( + + {({ list }) => { + const hasResults = list.length > 0; + + return ( +
      + {hasResults ? ( + list.map(item => ( +
    • + +
    • + )) + ) : ( + + + + )} +
    + ); + }} +
    + ); +}; + +SearchResults.propTypes = propTypes; diff --git a/frontend/src/metabase/nav/components/StoreLink/StoreLink.jsx b/frontend/src/metabase/nav/components/StoreLink/StoreLink.jsx new file mode 100644 index 000000000000..2ee83e41a98c --- /dev/null +++ b/frontend/src/metabase/nav/components/StoreLink/StoreLink.jsx @@ -0,0 +1,18 @@ +import React from "react"; +import { t } from "ttag"; +import Tooltip from "metabase/components/Tooltip"; +import { StoreIcon, StoreIconRoot, StoreIconWrapper } from "./StoreLink.styled"; + +const StoreLink = () => { + return ( + + + + + + + + ); +}; + +export default StoreLink; diff --git a/frontend/src/metabase/nav/components/StoreLink/StoreLink.styled.jsx b/frontend/src/metabase/nav/components/StoreLink/StoreLink.styled.jsx new file mode 100644 index 000000000000..8df56a95afca --- /dev/null +++ b/frontend/src/metabase/nav/components/StoreLink/StoreLink.styled.jsx @@ -0,0 +1,25 @@ +import styled from "styled-components"; +import { color, darken } from "metabase/lib/colors"; +import { space } from "metabase/styled-components/theme"; +import Icon, { IconWrapper } from "metabase/components/Icon"; +import ExternalLink from "metabase/components/ExternalLink"; + +export const StoreIconRoot = styled(ExternalLink)` + margin-right: ${space(1)}; +`; + +export const StoreIconWrapper = styled(IconWrapper)` + color: ${color("white")}; + + &:hover { + color: ${color("white")}; + background-color: ${darken(color("accent7"))}; + } +`; + +export const StoreIcon = styled(Icon).attrs({ + name: "store", + size: 18, +})` + margin: ${space(1)}; +`; diff --git a/frontend/src/metabase/nav/components/StoreLink/index.js b/frontend/src/metabase/nav/components/StoreLink/index.js new file mode 100644 index 000000000000..59ef9cb93dbf --- /dev/null +++ b/frontend/src/metabase/nav/components/StoreLink/index.js @@ -0,0 +1 @@ +export { default } from "./StoreLink"; diff --git a/frontend/src/metabase/nav/components/utils.js b/frontend/src/metabase/nav/components/utils.js new file mode 100644 index 000000000000..36f3829f4b33 --- /dev/null +++ b/frontend/src/metabase/nav/components/utils.js @@ -0,0 +1,15 @@ +import { t } from "ttag"; + +const TRANSLATED_NAME_BY_MODEL_TYPE = { + card: t`Question`, + dashboard: t`Dashboard`, + table: t`Table`, + database: t`Database`, + collection: t`Collection`, + segment: t`Segment`, + metric: t`Metric`, + pulse: t`Pulse`, +}; + +export const getTranslatedEntityName = type => + TRANSLATED_NAME_BY_MODEL_TYPE[type] || null; diff --git a/frontend/src/metabase/nav/containers/Navbar.jsx b/frontend/src/metabase/nav/containers/Navbar.jsx index cc3bd4d8dc01..a42856c544cc 100644 --- a/frontend/src/metabase/nav/containers/Navbar.jsx +++ b/frontend/src/metabase/nav/containers/Navbar.jsx @@ -13,6 +13,7 @@ import { Flex, Box } from "grid-styled"; import * as Urls from "metabase/lib/urls"; import { color, darken } from "metabase/lib/colors"; +import MetabaseSettings from "metabase/lib/settings"; import Icon, { IconWrapper } from "metabase/components/Icon"; import EntityMenu from "metabase/components/EntityMenu"; @@ -43,6 +44,7 @@ const mapStateToProps = (state, props) => ({ }); import { DefaultSearchColor } from "metabase/nav/constants"; +import StoreLink from "metabase/nav/components/StoreLink"; const mapDispatchToProps = { onChangeLocation: push, @@ -156,6 +158,7 @@ export default class Navbar extends Component { /> + {!MetabaseSettings.isPaidPlan() && } {this.renderModal()} @@ -216,7 +219,7 @@ export default class Navbar extends Component { - + ({ }); const mapDispatchToProps = { - prefetchTables: () => Database.actions.fetchList({ include: "tables" }), - prefetchDatabases: () => Database.actions.fetchList({ saved: true }), push, }; @@ -55,8 +49,6 @@ export default class NewQueryOptions extends Component { props: Props; UNSAFE_componentWillMount(props) { - this.props.prefetchTables(); - this.props.prefetchDatabases(); const { location, push } = this.props; if (Object.keys(location.query).length > 0) { const { database, table, ...options } = location.query; @@ -91,7 +83,7 @@ export default class NewQueryOptions extends Component { {hasDataAccess && ( - + )} {hasDataAccess && ( - + )} {hasNativeWrite && ( - + - {showTypeIcon && } - - - +
    + {showTypeIcon && } + + +
    + ); } else { const placeholderText = isEditing @@ -194,37 +208,46 @@ export default class ParameterValueWidget extends Component { : placeholder || t`Select…`; return ( - - {showTypeIcon && } -
    - {hasValue ? WidgetDefinition.format(value) : placeholderText} -
    - - - } - target={this.getTargetRef} - // make sure the full date picker will expand to fit the dual calendars - autoWidth={parameter.type === "date/all-options"} + - -
    + + {showTypeIcon && } +
    + {hasValue ? WidgetDefinition.format(value) : placeholderText} +
    + + + } + target={this.getTargetRef} + // make sure the full date picker will expand to fit the dual calendars + autoWidth={parameter.type === "date/all-options"} + > + +
    + ); } } @@ -234,9 +257,12 @@ function getFields(metadata, parameter) { if (!metadata) { return []; } - return getFieldIds(parameter) - .map(id => metadata.field(id)) - .filter(f => f != null); + return ( + parameter.fields ?? + getFieldIds(parameter) + .map(id => metadata.field(id)) + .filter(f => f != null) + ); } function getFieldIds(parameter) { @@ -258,9 +284,21 @@ function Widget({ onFocusChanged, parameters, dashboard, + disabled, }) { const DateWidget = DATE_WIDGETS[parameter.type]; const fields = getFields(metadata, parameter); + + if (disabled) { + return ( + + ); + } + if (DateWidget) { return ( @@ -294,6 +332,7 @@ function Widget({ ); } } + Widget.propTypes = { ...ParameterValueWidget.propTypes, onPopoverClose: PropTypes.func.isRequired, diff --git a/frontend/src/metabase/parameters/components/ParameterWidget.jsx b/frontend/src/metabase/parameters/components/ParameterWidget.jsx index c2bf020f7ddc..080c65497c06 100644 --- a/frontend/src/metabase/parameters/components/ParameterWidget.jsx +++ b/frontend/src/metabase/parameters/components/ParameterWidget.jsx @@ -28,15 +28,21 @@ export default class ParameterWidget extends Component { }; renderPopover(value, setValue, placeholder, isFullscreen) { - const { parameter, editingParameter, commitImmediately } = this.props; - const isEditingParameter = !!( - editingParameter && editingParameter.id === parameter.id - ); + const { + dashboard, + parameter, + editingParameter, + commitImmediately, + parameters, + } = this.props; + + const isEditingParameter = editingParameter?.id === parameter.id; + return ( ({ metadata: getMetadata(state) })) export default class Parameters extends Component { - defaultProps = { - syncQueryString: false, - }; - - constructor(props) { - super(props); - - syncQueryParamsWithURL(props); - } - componentDidUpdate() { - const { parameters, parameterValues } = this.props; + const { parameters, parameterValues, dashboard } = this.props; if (this.props.syncQueryString) { // sync parameters to URL query string - const queryParams = {}; - for (const parameter of collateParametersWithValues( + const parameterValuesBySlug = getParameterValuesBySlug( parameters, parameterValues, - )) { - if (parameter.value) { - queryParams[parameter.slug] = parameter.value; - } - } + dashboard && { preserveDefaultedParameters: true }, + ); - let search = querystring.stringify(queryParams); + let search = querystring.stringify(parameterValuesBySlug); search = search ? "?" + search : ""; if (search !== window.location.search) { diff --git a/frontend/src/metabase/parameters/components/Parameters/syncQueryParamsWithURL.js b/frontend/src/metabase/parameters/components/Parameters/syncQueryParamsWithURL.js deleted file mode 100644 index ad5a3dd33dba..000000000000 --- a/frontend/src/metabase/parameters/components/Parameters/syncQueryParamsWithURL.js +++ /dev/null @@ -1,101 +0,0 @@ -import Dimension from "metabase-lib/lib/Dimension"; - -export const syncQueryParamsWithURL = props => { - props.commitImmediately - ? syncForInternalQuestion(props) - : syncForPublicQuestion(props); -}; - -const syncForInternalQuestion = props => { - const { parameters, setParameterValue, query, metadata } = props; - - if (!setParameterValue) { - return; - } - - for (const parameter of parameters) { - const queryParam = query && query[parameter.slug]; - - if (queryParam != null || parameter.default != null) { - const parsedParam = parseQueryParams(queryParam, parameter, metadata); - - setParameterValue(parameter.id, parsedParam); - } - } -}; - -const syncForPublicQuestion = props => { - const { parameters, setMultipleParameterValues, query, metadata } = props; - - if (!setMultipleParameterValues) { - return; - } - - const parameterValues = parameters.reduce((acc, parameter) => { - const queryParam = query && query[parameter.slug]; - - if (queryParam != null || parameter.default != null) { - acc[parameter.id] = parseQueryParams(queryParam, parameter, metadata); - } - - return acc; - }, {}); - - setMultipleParameterValues(parameterValues); -}; - -const parseQueryParams = (queryParam, parameter, metadata) => { - const value = getValue(queryParam, parameter); - const fields = getFields(parameter, metadata); - - return getValueFromFields(value, fields); -}; - -const getValue = (queryParam, parameter) => { - const value = queryParam != null ? queryParam : parameter.default; - return treatValueForFieldValuesWidget(value, parameter); -}; - -const treatValueForFieldValuesWidget = (value, parameter) => { - // ParameterValueWidget uses FieldValuesWidget if there's no available - // date widget and all targets are fields. - const willUseFieldValuesWidget = - parameter.hasOnlyFieldTargets && !/^date\//.test(parameter.type); - - // If we'll use FieldValuesWidget, we should start with an array to match. - if (willUseFieldValuesWidget && !Array.isArray(value)) { - value = [value]; - } - - return value; -}; - -// field IDs can be either -// ["field", , ] or -// ["field", , ] -const getFields = (parameter, metadata) => { - const fieldIds = parameter.field_ids || []; - return fieldIds.map( - id => metadata.field(id) || Dimension.parseMBQL(id, metadata).field(), - ); -}; - -export const getValueFromFields = (value, fields) => { - if (Array.isArray(value)) { - return value.map(v => getValueFromFields(v, fields)); - } - - // [].every is always true, so only check if there are some fields - if (fields.length > 0) { - // unix dates fields are numeric but query params shouldn't be parsed as numbers - if (fields.every(f => f.isNumeric() && !f.isDate())) { - return parseFloat(value); - } - - if (fields.every(f => f.isBoolean())) { - return value === "true" ? true : value === "false" ? false : value; - } - } - - return value; -}; diff --git a/frontend/src/metabase/parameters/components/ParametersList.jsx b/frontend/src/metabase/parameters/components/ParametersList.jsx index 6975d04068f1..e316f53a06a6 100644 --- a/frontend/src/metabase/parameters/components/ParametersList.jsx +++ b/frontend/src/metabase/parameters/components/ParametersList.jsx @@ -1,15 +1,18 @@ /* eslint-disable react/prop-types */ import React from "react"; -import { - SortableContainer, - SortableElement, - SortableHandle, -} from "react-sortable-hoc"; import cx from "classnames"; import StaticParameterWidget from "./ParameterWidget"; import Icon from "metabase/components/Icon"; -import { collateParametersWithValues } from "metabase/meta/Parameter"; +import { + SortableContainer, + SortableElement, + SortableHandle, +} from "metabase/components/sortable"; +import { + getValuePopulatedParameters, + getVisibleParameters, +} from "metabase/meta/Parameter"; import type { ParameterId, @@ -105,15 +108,16 @@ function ParametersList({ } }; - const hiddenParameters = - typeof hideParameters === "string" - ? new Set(hideParameters.split(",")) - : new Set(); - const collatedParameters = collateParametersWithValues( + const valuePopulatedParameters = getValuePopulatedParameters( parameters, parameterValues, ); + const visibleValuePopulatedParameters = getVisibleParameters( + valuePopulatedParameters, + hideParameters, + ); + let ParameterWidget; let ParameterWidgetList; if (isEditing) { @@ -137,41 +141,42 @@ function ParametersList({ onSortStart={handleSortStart} onSortEnd={handleSortEnd} > - {collatedParameters - .filter(p => !hiddenParameters.has(p.slug)) - .map((parameter, index) => ( - setParameterName(parameter.id, name)) - } - setValue={ - setParameterValue && - (value => setParameterValue(parameter.id, value)) - } - setDefaultValue={ - setParameterDefaultValue && - (value => setParameterDefaultValue(parameter.id, value)) - } - remove={removeParameter && (() => removeParameter(parameter.id))} - commitImmediately={commitImmediately} - dragHandle={ - isEditing && setParameterIndex ? ( - - ) : null - } - /> - ))} + {visibleValuePopulatedParameters.map((valuePopulatedParameter, index) => ( + setParameterName(valuePopulatedParameter.id, name)) + } + setValue={ + setParameterValue && + (value => setParameterValue(valuePopulatedParameter.id, value)) + } + setDefaultValue={ + setParameterDefaultValue && + (value => + setParameterDefaultValue(valuePopulatedParameter.id, value)) + } + remove={ + removeParameter && + (() => removeParameter(valuePopulatedParameter.id)) + } + commitImmediately={commitImmediately} + dragHandle={ + isEditing && setParameterIndex ? : null + } + /> + ))} ); } diff --git a/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx b/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx index c9de5fc23c06..e15d09203f67 100644 --- a/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx +++ b/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx @@ -57,9 +57,9 @@ export default class DateMonthYearWidget extends React.Component { onChange={year => this.setState({ year: year })} /> - + {_.range(0, 12).map(m => ( - + void, - - isEditing: boolean, - - fields: Field[], - parentFocusChanged: boolean => void, - - operator?: FilterOperator, - dashboard?: DashboardWithCards, - parameter?: Parameter, - parameters?: Parameter[], - placeholder?: string, -}; - -type State = { - value: any[], - isFocused: boolean, - widgetWidth: ?number, +const propTypes = { + dashboard: PropTypes.object, + fields: PropTypes.array.isRequired, + isEditing: PropTypes.bool.isRequired, + operator: PropTypes.object.isRequired, + parameter: PropTypes.object.isRequired, + parameters: PropTypes.array.isRequired, + parentFocusChanged: PropTypes.bool, + placeholder: PropTypes.string.isRequired, + setValue: PropTypes.func.isRequired, + value: PropTypes.string, }; const BORDER_WIDTH = 1; -const normalizeValue = value => - Array.isArray(value) ? value : value != null ? [value] : []; - -// TODO: rename this something else since we're using it for more than searching and more than text -export default class ParameterFieldWidget extends Component<*, Props, State> { - props: Props; - state: State; - - _unfocusedElement: React.Component; - - constructor(props: Props) { +export default class ParameterFieldWidget extends Component { + constructor(props) { super(props); this.state = { isFocused: false, @@ -66,25 +46,7 @@ export default class ParameterFieldWidget extends Component<*, Props, State> { static noPopover = true; - static format(value, fields) { - value = normalizeValue(value); - if (value.length > 1) { - const n = value.length; - return ngettext(msgid`${n} selection`, `${n} selections`, n); - } else { - return ( - - ); - } - } - - UNSAFE_componentWillReceiveProps(nextProps: Props) { + UNSAFE_componentWillReceiveProps(nextProps) { if (this.props.value !== nextProps.value) { this.setState({ value: nextProps.value }); } @@ -145,7 +107,10 @@ export default class ParameterFieldWidget extends Component<*, Props, State> { onClick={() => focusChanged(true)} > {savedValue.length > 0 ? ( - ParameterFieldWidget.format(savedValue, fields) + ) : ( {placeholder} )} @@ -216,3 +181,5 @@ export default class ParameterFieldWidget extends Component<*, Props, State> { } } } + +ParameterFieldWidget.propTypes = propTypes; diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.jsx b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.jsx new file mode 100644 index 000000000000..5e566e567d2d --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.jsx @@ -0,0 +1,35 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { ngettext, msgid } from "ttag"; + +import Value from "metabase/components/Value"; +import { normalizeValue } from "../normalizeValue"; + +function renderNumberOfSelections(numberOfSelections) { + return ngettext( + msgid`${numberOfSelections} selection`, + `${numberOfSelections} selections`, + numberOfSelections, + ); +} + +export default function ParameterFieldWidgetValue({ savedValue, fields }) { + const values = normalizeValue(savedValue); + + const numberOfValues = values.length; + + // If there are multiple fields, turn off remapping since they might + // be remapped to different fields. + const shouldRemap = fields.length === 1; + + return numberOfValues > 1 ? ( + renderNumberOfSelections(numberOfValues) + ) : ( + + ); +} + +ParameterFieldWidgetValue.propTypes = { + savedValue: PropTypes.array, + fields: PropTypes.array, +}; diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.unit.spec.js b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.unit.spec.js new file mode 100644 index 000000000000..d6e802eb5156 --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.unit.spec.js @@ -0,0 +1,20 @@ +import React from "react"; + +import ParameterFieldWidgetValue from "./ParameterFieldWidgetValue"; +import { render, screen } from "@testing-library/react"; + +const value = "A value"; + +describe("when fields is empty array", () => { + it("renders savedValue if it is a single item", () => { + render(); + screen.getByText(value); + }); + + it("renders number of selections if multiple items", () => { + render( + , + ); + screen.getByText("2 selections"); + }); +}); diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.js b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.js new file mode 100644 index 000000000000..f83230f4451e --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.js @@ -0,0 +1,7 @@ +export function normalizeValue(value) { + if (Array.isArray(value)) { + return value; + } + + return value ? [value] : []; +} diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.unit.spec.js b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.unit.spec.js new file mode 100644 index 000000000000..72386cb63912 --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.unit.spec.js @@ -0,0 +1,26 @@ +import { normalizeValue } from "./normalizeValue"; + +it("returns empty array if value is null", () => { + const value = null; + const expected = []; + + const normalized = normalizeValue(value); + + expect(normalized).toEqual(expected); +}); + +it("returns value if value is an array", () => { + const value = [1]; + + const normalized = normalizeValue(value); + + expect(normalized).toBe(value); +}); + +it("returns value as item of array if passed value is not an array", () => { + const value = 1; + + const normalized = normalizeValue(value); + + expect(normalized).toEqual([value]); +}); diff --git a/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx b/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx index fb7aec5f40e8..903e2fe90ea5 100644 --- a/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx +++ b/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx @@ -23,6 +23,7 @@ export default class TextWidget extends Component { commitImmediately: PropTypes.bool, placeholder: PropTypes.string, focusChanged: PropTypes.func, + disabled: PropTypes.bool, }; static defaultProps = { @@ -48,6 +49,7 @@ export default class TextWidget extends Component { className, isEditing, focusChanged: parentFocusChanged, + disabled, } = this.props; const defaultPlaceholder = this.state.isFocused ? "" @@ -89,6 +91,7 @@ export default class TextWidget extends Component { placeholder={ isEditing ? t`Enter a default value...` : defaultPlaceholder } + disabled={disabled} /> ); } diff --git a/frontend/src/metabase/plugins/builtin.js b/frontend/src/metabase/plugins/builtin.js index fd795f52b31f..4024ac3b9c6f 100644 --- a/frontend/src/metabase/plugins/builtin.js +++ b/frontend/src/metabase/plugins/builtin.js @@ -1,3 +1,4 @@ import "metabase/plugins/builtin/auth/password"; import "metabase/plugins/builtin/auth/google"; import "metabase/plugins/builtin/auth/ldap"; +import "metabase/plugins/builtin/settings/hosted"; diff --git a/frontend/src/metabase/plugins/builtin/settings/hosted.js b/frontend/src/metabase/plugins/builtin/settings/hosted.js new file mode 100644 index 000000000000..609ab7a6f072 --- /dev/null +++ b/frontend/src/metabase/plugins/builtin/settings/hosted.js @@ -0,0 +1,34 @@ +import _ from "underscore"; +import { updateIn } from "icepick"; +import { t } from "ttag"; +import MetabaseSettings from "metabase/lib/settings"; +import { PLUGIN_ADMIN_SETTINGS_UPDATES } from "metabase/plugins"; +import { SettingsCloudStoreLink } from "../../components/SettingsCloudStoreLink"; + +if (MetabaseSettings.isHosted()) { + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => + _.omit(sections, ["email", "updates"]), + ); + + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => + updateIn(sections, ["general", "settings"], settings => + _.reject(settings, setting => + ["site-url", "redirect-all-requests-to-https"].includes(setting.key), + ), + ), + ); + + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => ({ + ...sections, + cloud: { + name: t`Cloud`, + settings: [ + { + key: "store-link", + display_name: t`Cloud Settings`, + widget: SettingsCloudStoreLink, + }, + ], + }, + })); +} diff --git a/frontend/src/metabase/plugins/components/PluginPlaceholder/PluginPlaceholder.jsx b/frontend/src/metabase/plugins/components/PluginPlaceholder/PluginPlaceholder.jsx new file mode 100644 index 000000000000..9f858ed6ec0b --- /dev/null +++ b/frontend/src/metabase/plugins/components/PluginPlaceholder/PluginPlaceholder.jsx @@ -0,0 +1,5 @@ +function PluginPlaceholder() { + return null; +} + +export default PluginPlaceholder; diff --git a/frontend/src/metabase/plugins/components/PluginPlaceholder/index.js b/frontend/src/metabase/plugins/components/PluginPlaceholder/index.js new file mode 100644 index 000000000000..aa21935936cc --- /dev/null +++ b/frontend/src/metabase/plugins/components/PluginPlaceholder/index.js @@ -0,0 +1 @@ +export { default } from "./PluginPlaceholder"; diff --git a/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.jsx b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.jsx new file mode 100644 index 000000000000..5cc2230510c5 --- /dev/null +++ b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.jsx @@ -0,0 +1,17 @@ +import React from "react"; +import { t } from "ttag"; +import MetabaseSettings from "metabase/lib/settings"; +import { Description, Link, LinkIcon } from "./SettingsCloudStoreLink.styled"; + +export function SettingsCloudStoreLink() { + const url = MetabaseSettings.storeUrl(); + return ( +
    + {t`Manage your Cloud account, including billing preferences and technical settings about this instance in your Metabase Store account.`} + + {t`Go to the Metabase Store`} + + +
    + ); +} diff --git a/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.styled.jsx b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.styled.jsx new file mode 100644 index 000000000000..0952a5b9ebd4 --- /dev/null +++ b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.styled.jsx @@ -0,0 +1,30 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import ExternalLink from "metabase/components/ExternalLink"; +import Icon from "metabase/components/Icon"; + +export const Description = styled.p` + color: ${color("text-dark")}; + max-width: 360px; +`; + +export const Link = styled(ExternalLink)` + display: inline-flex; + align-items: center; + color: ${color("text-white")}; + font-weight: bold; + background-color: ${color("brand")}; + padding: 12px 18px; + border-radius: 6px; + + &:hover { + opacity: 0.88; + transition: all 200ms linear; + } +`; + +export const LinkIcon = styled(Icon)` + color: ${color("text-white")}; + opacity: 0.6; + margin-left: 8px; +`; diff --git a/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/index.js b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/index.js new file mode 100644 index 000000000000..6ae42da6388f --- /dev/null +++ b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/index.js @@ -0,0 +1 @@ +export * from "./SettingsCloudStoreLink"; diff --git a/frontend/src/metabase/plugins/index.js b/frontend/src/metabase/plugins/index.js index 33b1699ee26a..37f04eb47b8e 100644 --- a/frontend/src/metabase/plugins/index.js +++ b/frontend/src/metabase/plugins/index.js @@ -1,4 +1,9 @@ +import { t } from "ttag"; +import PluginPlaceholder from "metabase/plugins/components/PluginPlaceholder"; + // Plugin integration points. All exports must be objects or arrays so they can be mutated by plugins. +const object = () => ({}); +const array = () => []; // functions called when the application is started export const PLUGIN_APP_INIT_FUCTIONS = []; @@ -16,8 +21,9 @@ export const PLUGIN_ADMIN_ROUTES = []; // functions that update the sections export const PLUGIN_ADMIN_SETTINGS_UPDATES = []; -// admin permissions grid +// admin permissions export const PLUGIN_ADMIN_PERMISSIONS_TABLE_ROUTES = []; +export const PLUGIN_ADMIN_PERMISSIONS_TABLE_GROUP_ROUTES = []; export const PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS = []; export const PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS = { controlled: [], @@ -32,6 +38,10 @@ export const PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_PERMISSION_VALUE = { // user form fields, e.x. login attributes export const PLUGIN_ADMIN_USER_FORM_FIELDS = []; +// menu items in people management tab +export const PLUGIN_ADMIN_USER_MENU_ITEMS = []; +export const PLUGIN_ADMIN_USER_MENU_ROUTES = []; + // authentication providers export const PLUGIN_AUTH_PROVIDERS = []; @@ -44,6 +54,8 @@ export const PLUGIN_SELECTORS = { getLogoBackgroundClass: (state, props) => "bg-white", }; +export const PLUGIN_FORM_WIDGETS = {}; + // snippet sidebar export const PLUGIN_SNIPPET_SIDEBAR_PLUS_MENU_OPTIONS = []; export const PLUGIN_SNIPPET_SIDEBAR_ROW_RENDERERS = {}; @@ -53,3 +65,45 @@ export const PLUGIN_SNIPPET_SIDEBAR_HEADER_BUTTONS = []; export const PLUGIN_DASHBOARD_SUBSCRIPTION_PARAMETERS_SECTION_OVERRIDE = { Component: undefined, }; + +const AUTHORITY_LEVEL_REGULAR = { + type: null, + name: t`Regular`, + icon: "folder", +}; + +export const PLUGIN_COLLECTIONS = { + authorityLevelFormFields: [], + isRegularCollection: () => true, + REGULAR_COLLECTION: AUTHORITY_LEVEL_REGULAR, + AUTHORITY_LEVEL: { + [AUTHORITY_LEVEL_REGULAR.type]: AUTHORITY_LEVEL_REGULAR, + }, +}; + +export const PLUGIN_COLLECTION_COMPONENTS = { + CollectionAuthorityLevelIcon: PluginPlaceholder, +}; + +export const PLUGIN_MODERATION = { + QuestionModerationSection: PluginPlaceholder, + ModerationStatusIcon: PluginPlaceholder, + getStatusIconForQuestion: object, + getStatusIcon: object, + getModerationTimelineEvents: array, +}; + +export const PLUGIN_CACHING = { + dashboardCacheTTLFormField: null, + databaseCacheTTLFormField: null, + questionCacheTTLFormField: null, + getQuestionsImplicitCacheTTL: () => null, +}; + +export const PLUGIN_ADVANCED_PERMISSIONS = { + DataPermissionsHelp: null, + addDatabasePermissionOptions: (permissions, _value) => permissions, + addSchemaPermissionOptions: (permissions, _value) => permissions, + addTablePermissionOptions: (permissions, _value) => permissions, + isBlockPermission: _value => false, +}; diff --git a/frontend/src/metabase/public/components/EmbedFrame.jsx b/frontend/src/metabase/public/components/EmbedFrame.jsx index b860cc1fb614..c0cfe7de300c 100644 --- a/frontend/src/metabase/public/components/EmbedFrame.jsx +++ b/frontend/src/metabase/public/components/EmbedFrame.jsx @@ -5,6 +5,7 @@ import { IFRAMED, initializeIframeResizer } from "metabase/lib/dom"; import { parseHashOptions } from "metabase/lib/browser"; import MetabaseSettings from "metabase/lib/settings"; +import { getValuePopulatedParameters } from "metabase/meta/Parameter"; import TitleAndDescription from "metabase/components/TitleAndDescription"; import Parameters from "metabase/parameters/components/Parameters/Parameters"; @@ -96,10 +97,10 @@ export default class EmbedFrame extends Component {
    ({ - ...p, - value: parameterValues && parameterValues[p.id], - }))} + parameters={getValuePopulatedParameters( + parameters, + parameterValues, + )} query={location.query} setParameterValue={setParameterValue} setMultipleParameterValues={setMultipleParameterValues} diff --git a/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx b/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx index ccf14cac8fc4..e21f1ca298e2 100644 --- a/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx +++ b/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx @@ -24,6 +24,7 @@ const DisplayOptionsPane = ({
    onChangeDisplayOptions({ @@ -32,10 +33,10 @@ const DisplayOptionsPane = ({ }) } /> - {t`Border`}
    onChangeDisplayOptions({ @@ -44,7 +45,6 @@ const DisplayOptionsPane = ({ }) } /> - {t`Title`}
    embeddingParams[parameter.slug] === "locked", + ); + + return lockedParameters; + } + + getPreviewParamsBySlug() { const { resourceParameters } = this.props; const { embeddingParams, parameterValues } = this.state; - const params = {}; - for (const parameter of resourceParameters) { - if (embeddingParams[parameter.slug] === "locked") { - params[parameter.slug] = - parameter.id in parameterValues - ? parameterValues[parameter.id] - : null; - } - } - return params; + + const lockedParameters = this.getPreviewParameters( + resourceParameters, + embeddingParams, + ); + + const parameterSlugValuePairs = lockedParameters.map(parameter => { + const value = + parameter.id in parameterValues ? parameterValues[parameter.id] : null; + return [parameter.slug, value]; + }); + + return Object.fromEntries(parameterSlugValuePairs); } render() { @@ -164,10 +174,10 @@ export default class EmbedModalContent extends Component { displayOptions, } = this.state; - const params = this.getPreviewParams(); - - const previewParameters = resourceParameters.filter( - p => embeddingParams[p.slug] === "locked", + const previewParametersBySlug = this.getPreviewParamsBySlug(); + const previewParameters = this.getPreviewParameters( + resourceParameters, + embeddingParams, ); return ( @@ -229,7 +239,7 @@ export default class EmbedModalContent extends Component { token={getSignedToken( resourceType, resource.id, - params, + previewParametersBySlug, secretKey, embeddingParams, )} @@ -237,14 +247,14 @@ export default class EmbedModalContent extends Component { siteUrl, resourceType, resource.id, - params, + previewParametersBySlug, displayOptions, secretKey, embeddingParams, )} siteUrl={siteUrl} secretKey={secretKey} - params={params} + params={previewParametersBySlug} displayOptions={displayOptions} previewParameters={previewParameters} parameterValues={parameterValues} diff --git a/frontend/src/metabase/public/containers/PublicDashboard.jsx b/frontend/src/metabase/public/containers/PublicDashboard.jsx index bb70947eb0e1..70f7b26213a3 100644 --- a/frontend/src/metabase/public/containers/PublicDashboard.jsx +++ b/frontend/src/metabase/public/containers/PublicDashboard.jsx @@ -27,7 +27,7 @@ import { getParameterValues, } from "metabase/dashboard/selectors"; -import * as dashboardActions from "metabase/dashboard/dashboard"; +import * as dashboardActions from "metabase/dashboard/actions"; import { setPublicDashboardEndpoints, diff --git a/frontend/src/metabase/public/containers/PublicQuestion.jsx b/frontend/src/metabase/public/containers/PublicQuestion.jsx index c6b882e892ed..afa23aade9f3 100644 --- a/frontend/src/metabase/public/containers/PublicQuestion.jsx +++ b/frontend/src/metabase/public/containers/PublicQuestion.jsx @@ -13,10 +13,13 @@ import type { Card } from "metabase-types/types/Card"; import type { Dataset } from "metabase-types/types/Dataset"; import type { ParameterValues } from "metabase-types/types/Parameter"; -import { getParametersBySlug } from "metabase/meta/Parameter"; import { - getParameters, - getParametersWithExtras, + getParameterValuesBySlug, + getParameterValuesByIdFromQueryParams, +} from "metabase/meta/Parameter"; +import { + getParametersFromCard, + getValueAndFieldIdPopulatedParametersFromCard, applyParameters, } from "metabase/meta/Card"; @@ -112,15 +115,20 @@ export default class PublicQuestion extends Component { this.props.addFields(card.param_fields); } - const parameterValues: ParameterValues = {}; - for (const parameter of getParameters(card)) { - parameterValues[String(parameter.id)] = query[parameter.slug]; - } - - this.setState({ card, parameterValues }, async () => { - await this.run(); - this.setState({ initialized: true }); - }); + const parameters = getValueAndFieldIdPopulatedParametersFromCard(card); + const parameterValuesById = getParameterValuesByIdFromQueryParams( + parameters, + query, + this.props.metadata, + ); + + this.setState( + { card, parameterValues: parameterValuesById }, + async () => { + await this.run(); + this.setState({ initialized: true }); + }, + ); } catch (error) { console.error("error", error); setErrorPage(error); @@ -139,18 +147,6 @@ export default class PublicQuestion extends Component { ); }; - setMultipleParameterValues = parameterValues => { - this.setState( - { - parameterValues: { - ...this.state.parameterValues, - ...parameterValues, - }, - }, - this.run, - ); - }; - run = async (): void => { const { setErrorPage, @@ -162,7 +158,7 @@ export default class PublicQuestion extends Component { return; } - const parameters = getParameters(card); + const parameters = getParametersFromCard(card); try { this.setState({ result: null }); @@ -172,7 +168,7 @@ export default class PublicQuestion extends Component { // embeds apply parameter values server-side newResult = await maybeUsePivotEndpoint(EmbedApi.cardQuery, card)({ token, - ...getParametersBySlug(parameters, parameterValues), + ...getParameterValuesBySlug(parameters, parameterValues), }); } else if (uuid) { // public links currently apply parameters client-side @@ -207,7 +203,8 @@ export default class PublicQuestion extends Component { /> ); - const parameters = card && getParametersWithExtras(card); + const parameters = + card && getValueAndFieldIdPopulatedParametersFromCard(card); return ( { +export const initializeQB = (location, params, queryParams) => { return async (dispatch, getState) => { // do this immediately to ensure old state is cleared before the user sees it dispatch(resetQB()); dispatch(cancelQuery()); - // preload metadata that's used in DataSelector - dispatch(Databases.actions.fetchList({ include: "tables" })); - dispatch(Databases.actions.fetchList({ saved: true })); - const { currentUser } = getState(); const cardId = Urls.extractEntityId(params.slug); @@ -507,12 +522,20 @@ export const initializeQB = (location, params) => { } card = question && question.card(); + const metadata = getMetadata(getState()); + const parameters = getValueAndFieldIdPopulatedParametersFromCard(card); + const parameterValues = getParameterValuesByIdFromQueryParams( + parameters, + queryParams, + metadata, + ); // Update the question to Redux state together with the initial state of UI controls dispatch.action(INITIALIZE_QB, { card, originalCard, uiControls, + parameterValues, }); // if we have loaded up a card that we can run then lets kick that off as well @@ -721,10 +744,26 @@ export const setParameterValue = createAction( }, ); +// refetches the card without triggering a run of the card's query +export const SOFT_RELOAD_CARD = "metabase/qb/SOFT_RELOAD_CARD"; +export const softReloadCard = createThunkAction(SOFT_RELOAD_CARD, () => { + return async (dispatch, getState) => { + const outdatedCard = getCard(getState()); + const action = await dispatch( + Questions.actions.fetch({ id: outdatedCard.id }, { reload: true }), + ); + + return Questions.HACK_getObjectFromAction(action); + }; +}); + export const RELOAD_CARD = "metabase/qb/RELOAD_CARD"; export const reloadCard = createThunkAction(RELOAD_CARD, () => { return async (dispatch, getState) => { - const outdatedCard = getState().qb.card; + const outdatedCard = getCard(getState()); + + dispatch(resetQB()); + const action = await dispatch( Questions.actions.fetch({ id: outdatedCard.id }, { reload: true }), ); @@ -891,6 +930,15 @@ export const updateQuestion = ( } // + // Native query should never be in notebook mode (metabase#12651) + if (getQueryBuilderMode(getState()) !== "view" && newQuestion.isNative()) { + await dispatch( + setQueryBuilderMode("view", { + shouldUpdateUrl: false, + }), + ); + } + // Replace the current question with a new one await dispatch.action(UPDATE_QUESTION, { card: newQuestion.card() }); @@ -1010,7 +1058,6 @@ export const apiUpdateQuestion = question => { // so we want the databases list to be re-fetched next time we hit "New Question" so it shows up dispatch(setRequestUnloaded(["entities", "databases"])); - dispatch(updateUrl(updatedQuestion.card(), { dirty: false })); MetabaseAnalytics.trackEvent( "QueryBuilder", "Update Card", @@ -1104,6 +1151,7 @@ export const QUERY_COMPLETED = "metabase/qb/QUERY_COMPLETED"; export const queryCompleted = (question, queryResults) => { return async (dispatch, getState) => { const [{ data }] = queryResults; + const [{ data: prevData }] = getQueryResults(getState()) || [{}]; const originalQuestion = getOriginalQuestion(getState()); const dirty = !originalQuestion || @@ -1120,7 +1168,10 @@ export const queryCompleted = (question, queryResults) => { // Otherwise, trust that the question was saved with the correct display. question = question // if we are going to trigger autoselection logic, check if the locked display no longer is "sensible". - .maybeUnlockDisplay(getSensibleDisplays(data)) + .maybeUnlockDisplay( + getSensibleDisplays(data), + prevData && getSensibleDisplays(prevData), + ) .setDefaultDisplay() .switchTableScalar(data); } @@ -1340,3 +1391,14 @@ export const showChartSettings = createAction(SHOW_CHART_SETTINGS); // these are just temporary mappings to appease the existing QB code and it's naming prefs export const onUpdateVisualizationSettings = updateCardVisualizationSettings; export const onReplaceAllVisualizationSettings = replaceAllCardVisualizationSettings; + +export const REVERT_TO_REVISION = "metabase/qb/REVERT_TO_REVISION"; +export const revertToRevision = createThunkAction( + REVERT_TO_REVISION, + revision => { + return async dispatch => { + await revision.revert(); + await dispatch(reloadCard()); + }; + }, +); diff --git a/frontend/src/metabase/query_builder/components/AggregationWidget.jsx b/frontend/src/metabase/query_builder/components/AggregationWidget.jsx index 61216094c968..78bfd52583f0 100644 --- a/frontend/src/metabase/query_builder/components/AggregationWidget.jsx +++ b/frontend/src/metabase/query_builder/components/AggregationWidget.jsx @@ -48,7 +48,6 @@ export default class AggregationWidget extends React.Component { children, className, } = this.props; - console.log("aggregation", aggregation); const popover = this.state.isOpen && ( diff --git a/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx b/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx index 0260e1fbb527..26073b73b3a6 100644 --- a/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx +++ b/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx @@ -286,7 +286,6 @@ export class AlertScheduleText extends Component { return `${verbose ? "daily at " : "Daily, "} ${hour} ${amPm}`; } else if (scheduleType === "weekly") { - console.log(schedule); const hourOfDay = schedule.schedule_hour; const day = _.find( DAY_OF_WEEK_OPTIONS, diff --git a/frontend/src/metabase/query_builder/components/AlertModals.jsx b/frontend/src/metabase/query_builder/components/AlertModals.jsx index f3d2e3463200..03d0db7b68af 100644 --- a/frontend/src/metabase/query_builder/components/AlertModals.jsx +++ b/frontend/src/metabase/query_builder/components/AlertModals.jsx @@ -15,12 +15,14 @@ import Icon from "metabase/components/Icon"; import ChannelSetupModal from "metabase/components/ChannelSetupModal"; import ButtonWithStatus from "metabase/components/ButtonWithStatus"; import PulseEditChannels from "metabase/pulse/components/PulseEditChannels"; +import { getErrorMessage } from "metabase/components/form/FormMessage"; +import { AlertModalFooter, AlertModalError } from "./AlertModals.styled"; import User from "metabase/entities/users"; // actions import { createAlert, deleteAlert, updateAlert } from "metabase/alert/alert"; -import { apiUpdateQuestion } from "metabase/query_builder/actions"; +import { apiUpdateQuestion, updateUrl } from "metabase/query_builder/actions"; import { fetchPulseFormInput } from "metabase/pulse/actions"; // selectors @@ -71,7 +73,7 @@ const textStyle = { hasConfiguredAnyChannel: hasConfiguredAnyChannelSelector(state), hasConfiguredEmailChannel: hasConfiguredEmailChannelSelector(state), }), - { createAlert, fetchPulseFormInput, apiUpdateQuestion }, + { createAlert, fetchPulseFormInput, apiUpdateQuestion, updateUrl }, ) export class CreateAlertModalContent extends Component { props: { @@ -87,6 +89,7 @@ export class CreateAlertModalContent extends Component { this.state = { hasSeenEducationalScreen: MetabaseCookies.getHasSeenAlertSplash(), alert: getDefaultAlert(question, user, visualizationSettings), + formError: null, }; } @@ -113,21 +116,28 @@ export class CreateAlertModalContent extends Component { onAlertChange = alert => this.setState({ alert }); onCreateAlert = async () => { - const { createAlert, apiUpdateQuestion, onAlertCreated } = this.props; + const { + question, + createAlert, + apiUpdateQuestion, + updateUrl, + onAlertCreated, + } = this.props; const { alert } = this.state; - // Resave the question here (for persisting the x/y axes; see #6749) - await apiUpdateQuestion(); + try { + this.setState({ formError: null }); - await createAlert(alert); + await apiUpdateQuestion(question); + await createAlert(alert); + await updateUrl(question.card(), { dirty: false }); - // should close be triggered manually like this - // but the creation notification would appear automatically ...? - // OR should the modal visibility be part of QB redux state - // (maybe check how other modals are implemented) - onAlertCreated(); - - MetabaseAnalytics.trackEvent("Alert", "Create", alert.alert_condition); + onAlertCreated(); + MetabaseAnalytics.trackEvent("Alert", "Create", alert.alert_condition); + } catch (e) { + this.setState({ formError: e }); + throw e; + } }; proceedFromEducationalScreen = () => { @@ -146,7 +156,7 @@ export class CreateAlertModalContent extends Component { user, hasLoadedChannelInfo, } = this.props; - const { alert, hasSeenEducationalScreen } = this.state; + const { alert, hasSeenEducationalScreen, formError } = this.state; const channelRequirementsMet = isAdmin ? hasConfiguredAnyChannel @@ -186,14 +196,16 @@ export class CreateAlertModalContent extends Component { alert={alert} onAlertChange={this.onAlertChange} /> -
    -
    + + {formError && ( + {getErrorMessage(formError)} + )} -
    +
    ); @@ -290,7 +302,7 @@ export class AlertEducationalScreen extends Component { question: getQuestion(state), visualizationSettings: getVisualizationSettings(state), }), - { apiUpdateQuestion, updateAlert, deleteAlert }, + { apiUpdateQuestion, updateAlert, deleteAlert, updateUrl }, ) export class UpdateAlertModalContent extends Component { props: { @@ -306,26 +318,40 @@ export class UpdateAlertModalContent extends Component { super(); this.state = { modifiedAlert: props.alert, + formError: null, }; } onAlertChange = modifiedAlert => this.setState({ modifiedAlert }); onUpdateAlert = async () => { - const { apiUpdateQuestion, updateAlert, onAlertUpdated } = this.props; + const { + question, + apiUpdateQuestion, + updateAlert, + updateUrl, + onAlertUpdated, + } = this.props; const { modifiedAlert } = this.state; - // Resave the question here (for persisting the x/y axes; see #6749) - await apiUpdateQuestion(); + try { + this.setState({ formError: null }); - await updateAlert(modifiedAlert); - onAlertUpdated(); + await apiUpdateQuestion(); + await updateAlert(modifiedAlert); + await updateUrl(question.card(), { dirty: false }); - MetabaseAnalytics.trackEvent( - "Alert", - "Update", - modifiedAlert.alert_condition, - ); + onAlertUpdated(); + + MetabaseAnalytics.trackEvent( + "Alert", + "Update", + modifiedAlert.alert_condition, + ); + } catch (e) { + this.setState({ formError: e }); + throw e; + } }; onDeleteAlert = async () => { @@ -343,7 +369,7 @@ export class UpdateAlertModalContent extends Component { user, isAdmin, } = this.props; - const { modifiedAlert } = this.state; + const { modifiedAlert, formError } = this.state; const isCurrentUser = alert.creator.id === user.id; const title = isCurrentUser ? t`Edit your alert` : t`Edit alert`; @@ -367,14 +393,16 @@ export class UpdateAlertModalContent extends Component { /> )} -
    -
    + + {formError && ( + {getErrorMessage(formError)} + )} -
    +
    ); diff --git a/frontend/src/metabase/query_builder/components/AlertModals.styled.jsx b/frontend/src/metabase/query_builder/components/AlertModals.styled.jsx new file mode 100644 index 000000000000..ed338861bfe6 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/AlertModals.styled.jsx @@ -0,0 +1,14 @@ +import styled from "styled-components"; +import { space } from "metabase/styled-components/theme"; +import { color } from "metabase/lib/colors"; + +export const AlertModalFooter = styled.div` + display: flex; + justify-content: right; + align-items: center; + margin-top: ${space(3)}; +`; + +export const AlertModalError = styled.div` + color: ${color("error")}; +`; diff --git a/frontend/src/metabase/query_builder/components/ClampedDescription.jsx b/frontend/src/metabase/query_builder/components/ClampedDescription.jsx new file mode 100644 index 000000000000..14f2b4ee440a --- /dev/null +++ b/frontend/src/metabase/query_builder/components/ClampedDescription.jsx @@ -0,0 +1,28 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import ClampedText from "metabase/components/ClampedText"; +import { TextButton } from "metabase/components/Button.styled"; + +ClampedDescription.propTypes = { + className: PropTypes.string, + description: PropTypes.string, + onEdit: PropTypes.func, +}; + +export function ClampedDescription({ className, description, onEdit }) { + if (!description && !onEdit) { + return null; + } + + return ( +
    + {description ? ( + + ) : ( + {t`Add a description`} + )} +
    + ); +} diff --git a/frontend/src/metabase/query_builder/components/DataSelector.jsx b/frontend/src/metabase/query_builder/components/DataSelector.jsx index ecca1494386e..fea74d56a65b 100644 --- a/frontend/src/metabase/query_builder/components/DataSelector.jsx +++ b/frontend/src/metabase/query_builder/components/DataSelector.jsx @@ -6,6 +6,8 @@ import { t } from "ttag"; import cx from "classnames"; import _ from "underscore"; +import { SAVED_QUESTIONS_VIRTUAL_DB_ID } from "metabase/lib/constants"; + import ListSearchField from "metabase/components/ListSearchField"; import ExternalLink from "metabase/components/ExternalLink"; import Icon from "metabase/components/Icon"; @@ -188,6 +190,7 @@ export class UnconnectedDataSelector extends Component { tableFilter: PropTypes.func, hasTableSearch: PropTypes.bool, canChangeDatabase: PropTypes.bool, + containerClassName: PropTypes.string, }; static defaultProps = { @@ -358,7 +361,8 @@ export class UnconnectedDataSelector extends Component { selectedDatabase && selectedSchema && selectedSchema.database.id !== selectedDatabase.id && - !selectedSchema.database.is_saved_questions; + selectedSchema.database.id !== SAVED_QUESTIONS_VIRTUAL_DB_ID; + const invalidTable = selectedSchema && selectedTable && @@ -463,7 +467,7 @@ export class UnconnectedDataSelector extends Component { const nextStep = this.getNextStep(); if (!nextStep) { await this.setStateWithComputedState(stateChange); - this.popover.current.toggle(); + this.popover.current && this.popover.current.toggle(); } else { await this.switchToStep(nextStep, stateChange, skipSteps); } @@ -667,7 +671,7 @@ export class UnconnectedDataSelector extends Component { }); renderActiveStep() { - const { combineDatabaseSchemaSteps, hasTableSearch } = this.props; + const { combineDatabaseSchemaSteps } = this.props; const props = { ...this.state, @@ -680,7 +684,7 @@ export class UnconnectedDataSelector extends Component { isLoading: this.state.isLoading, hasNextStep: !!this.getNextStep(), onBack: this.getPreviousStep() ? this.previousStep : null, - hasFiltering: !hasTableSearch, + hasFiltering: true, }; switch (this.state.activeStep) { @@ -770,6 +774,7 @@ export class UnconnectedDataSelector extends Component { autoWidth ref={this.popover} isInitiallyOpen={this.props.isInitiallyOpen} + containerClassName={this.props.containerClassName} triggerElement={this.getTriggerElement()} triggerClasses={this.getTriggerClasses()} horizontalAttachments={["center", "left", "right"]} @@ -967,6 +972,7 @@ const TablePicker = ({ onBack, isLoading, hasFiltering, + minTablesToShowSearch = 10, }) => { // In case DataSelector props get reseted if (!selectedDatabase) { @@ -1009,7 +1015,10 @@ const TablePicker = ({ }, ]; return ( -
    +
    = minTablesToShowSearch} onChange={item => onChangeTable(item.table)} itemIsSelected={item => item.table && selectedTable diff --git a/frontend/src/metabase/query_builder/components/DimensionList.jsx b/frontend/src/metabase/query_builder/components/DimensionList.jsx index 4031e084b6e2..e1559097fbe1 100644 --- a/frontend/src/metabase/query_builder/components/DimensionList.jsx +++ b/frontend/src/metabase/query_builder/components/DimensionList.jsx @@ -108,7 +108,11 @@ export default class DimensionList extends Component { onRemoveDimension, } = this.props; const subDimensions = - enableSubDimensions && item.dimension && item.dimension.dimensions(); + enableSubDimensions && + item.dimension && + // Do not display sub dimension if this is an FK (metabase#16787) + !item.dimension.field().isFK() && + item.dimension.dimensions(); const multiSelect = !!(onAddDimension || onRemoveDimension); @@ -142,7 +146,9 @@ export default class DimensionList extends Component { dimension={sectionDimension} dimensions={subDimensions} onChangeDimension={dimension => { - this.props.onChangeDimension(dimension); + this.props.onChangeDimension(dimension, { + isSubDimension: true, + }); onClose(); }} /> diff --git a/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx b/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx index e0cd914dde91..cd76c8eb21a3 100644 --- a/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx +++ b/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx @@ -648,43 +648,45 @@ export default class NativeQueryEditor extends Component { closeModal={this.props.closeSnippetModal} /> )} -
    - - - {showSnippetSidebarButton && ( - + - )} - cancelQuery()} - compact - className="mx2 mb2 mt-auto" - style={{ width: 40, height: 40 }} - getTooltip={() => - (this.props.nativeEditorSelectedText - ? t`Run selected text` - : t`Run query`) + - " " + - (isMac() ? t`(⌘ + enter)` : t`(Ctrl + enter)`) - } - /> -
    + + {showSnippetSidebarButton && ( + + )} + cancelQuery()} + compact + className="mx2 mb2 mt-auto" + style={{ width: 40, height: 40 }} + getTooltip={() => + (this.props.nativeEditorSelectedText + ? t`Run selected text` + : t`Run query`) + + " " + + (isMac() ? t`(⌘ + enter)` : t`(Ctrl + enter)`) + } + /> +
    + )}
    ); diff --git a/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx b/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx index 8b258648be6a..476decc46ee7 100644 --- a/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx +++ b/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx @@ -29,6 +29,7 @@ const QueryDownloadWidget = ({ dashcardId, icon, params, + visualizationSettings, }) => (

    {t`Download full results`}

    @@ -54,7 +55,7 @@ const QueryDownloadWidget = ({ )} {EXPORT_FORMATS.map(type => ( - + {dashcardId && token ? ( ) : null} @@ -104,10 +104,17 @@ const QueryDownloadWidget = ({
    ); -const UnsavedQueryButton = ({ type, result: { json_query = {} }, card }) => ( +const UnsavedQueryButton = ({ + type, + result: { json_query = {} }, + visualizationSettings, +}) => ( {type} diff --git a/frontend/src/metabase/query_builder/components/QueryModals.jsx b/frontend/src/metabase/query_builder/components/QueryModals.jsx index 3ac806057100..c78583863e05 100644 --- a/frontend/src/metabase/query_builder/components/QueryModals.jsx +++ b/frontend/src/metabase/query_builder/components/QueryModals.jsx @@ -4,6 +4,8 @@ import React from "react"; import { t } from "ttag"; import _ from "underscore"; +import { MODAL_TYPES } from "metabase/query_builder/constants"; + import Modal from "metabase/components/Modal"; import SaveQuestionModal from "metabase/containers/SaveQuestionModal"; @@ -34,13 +36,13 @@ export default class QueryModals extends React.Component { onCloseModal(); } else { // HACK: in a timeout because save modal closes itself - setTimeout(() => onOpenModal("create-alert")); + setTimeout(() => onOpenModal(MODAL_TYPES.CREATE_ALERT)); } }; render() { const { modal, question, onCloseModal, onOpenModal } = this.props; - return modal === "save" ? ( + return modal === MODAL_TYPES.SAVE ? ( { await this.props.onCreate(card); - onOpenModal("saved"); + onOpenModal(MODAL_TYPES.SAVED); }} onClose={onCloseModal} /> - ) : modal === "saved" ? ( + ) : modal === MODAL_TYPES.SAVED ? ( { - onOpenModal("add-to-dashboard"); + onOpenModal(MODAL_TYPES.ADD_TO_DASHBOARD); }} /> - ) : modal === "add-to-dashboard-save" ? ( + ) : modal === MODAL_TYPES.ADD_TO_DASHBOARD_SAVE ? ( { await this.props.onSave(card); - onOpenModal("add-to-dashboard"); + onOpenModal(MODAL_TYPES.ADD_TO_DASHBOARD); }} onCreate={async card => { await this.props.onCreate(card); - onOpenModal("add-to-dashboard"); + onOpenModal(MODAL_TYPES.ADD_TO_DASHBOARD); }} onClose={onCloseModal} multiStep /> - ) : modal === "add-to-dashboard" ? ( + ) : modal === MODAL_TYPES.ADD_TO_DASHBOARD ? ( - ) : modal === "create-alert" ? ( + ) : modal === MODAL_TYPES.CREATE_ALERT ? ( - ) : modal === "save-question-before-alert" ? ( + ) : modal === MODAL_TYPES.SAVE_QUESTION_BEFORE_ALERT ? ( - ) : modal === "save-question-before-embed" ? ( + ) : modal === MODAL_TYPES.SAVE_QUESTION_BEFORE_EMBED ? ( { await this.props.onSave(card, false); - onOpenModal("embed"); + onOpenModal(MODAL_TYPES.EMBED); }} onCreate={async card => { await this.props.onCreate(card, false); - onOpenModal("embed"); + onOpenModal(MODAL_TYPES.EMBED); }} onClose={onCloseModal} multiStep initialCollectionId={this.props.initialCollectionId} /> - ) : modal === "history" ? ( + ) : modal === MODAL_TYPES.HISTORY ? ( - ) : modal === "move" ? ( + ) : modal === MODAL_TYPES.MOVE ? ( - ) : modal === "archive" ? ( + ) : modal === MODAL_TYPES.ARCHIVE ? ( - ) : modal === "edit" ? ( + ) : modal === MODAL_TYPES.EDIT ? ( this.props.onSave(card, false)} /> - ) : modal === "embed" ? ( + ) : modal === MODAL_TYPES.EMBED ? ( - ) : modal === "clone" ? ( + ) : modal === MODAL_TYPES.CLONE ? ( onOpenModal("saved")} + onSaved={() => onOpenModal(MODAL_TYPES.SAVED)} /> ) : null; diff --git a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx index 86d25c73b28e..bc1df7c8bec4 100644 --- a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx +++ b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx @@ -126,6 +126,7 @@ export default class QueryVisualization extends Component { diff --git a/frontend/src/metabase/query_builder/components/QuestionActionButtons.jsx b/frontend/src/metabase/query_builder/components/QuestionActionButtons.jsx new file mode 100644 index 000000000000..33d7651419b1 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/QuestionActionButtons.jsx @@ -0,0 +1,84 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import { MODAL_TYPES } from "metabase/query_builder/constants"; + +import Button from "metabase/components/Button"; +import Tooltip from "metabase/components/Tooltip"; +import { Container } from "./QuestionActionButtons.styled"; + +export const EDIT_TESTID = "edit-details-button"; +export const ADD_TO_DASH_TESTID = "add-to-dashboard-button"; +export const MOVE_TESTID = "move-button"; +export const CLONE_TESTID = "clone-button"; +export const ARCHIVE_TESTID = "archive-button"; + +const ICON_SIZE = 18; + +QuestionActionButtons.propTypes = { + canWrite: PropTypes.bool.isRequired, + onOpenModal: PropTypes.func.isRequired, +}; + +export default QuestionActionButtons; + +function QuestionActionButtons({ canWrite, onOpenModal }) { + return ( + + {canWrite && ( + +
    diff --git a/frontend/src/metabase/query_builder/components/view/ViewFooter.jsx b/frontend/src/metabase/query_builder/components/view/ViewFooter.jsx index 23162b042217..412775dc220d 100644 --- a/frontend/src/metabase/query_builder/components/view/ViewFooter.jsx +++ b/frontend/src/metabase/query_builder/components/view/ViewFooter.jsx @@ -150,6 +150,7 @@ const ViewFooter = ({ className="mx1 hide sm-show" card={question.card()} result={result} + visualizationSettings={visualizationSettings} /> ), QuestionAlertWidget.shouldRender({ diff --git a/frontend/src/metabase/query_builder/components/view/ViewHeader.jsx b/frontend/src/metabase/query_builder/components/view/ViewHeader.jsx index 2a24796e7954..eadaa1c0f826 100644 --- a/frontend/src/metabase/query_builder/components/view/ViewHeader.jsx +++ b/frontend/src/metabase/query_builder/components/view/ViewHeader.jsx @@ -4,18 +4,16 @@ import { t } from "ttag"; import cx from "classnames"; import { Box } from "grid-styled"; -import Icon from "metabase/components/Icon"; import Link from "metabase/components/Link"; import ButtonBar from "metabase/components/ButtonBar"; import CollectionBadge from "metabase/questions/components/CollectionBadge"; import LastEditInfoLabel from "metabase/components/LastEditInfoLabel"; - +import SavedQuestionHeaderButton from "metabase/query_builder/components/SavedQuestionHeaderButton/SavedQuestionHeaderButton"; import ViewSection, { ViewHeading, ViewSubHeading } from "./ViewSection"; import ViewButton from "metabase/query_builder/components/view/ViewButton"; import QuestionDataSource from "./QuestionDataSource"; import QuestionDescription from "./QuestionDescription"; -import QuestionEntityMenu from "./QuestionEntityMenu"; import QuestionLineage from "./QuestionLineage"; import QuestionPreviewToggle from "./QuestionPreviewToggle"; import QuestionNotebookButton from "./QuestionNotebookButton"; @@ -25,6 +23,7 @@ import { QuestionSummarizeWidget } from "./QuestionSummaries"; import NativeQueryButton from "./NativeQueryButton"; import RunButtonWithTooltip from "../RunButtonWithTooltip"; +import { SavedQuestionHeaderButtonContainer } from "./ViewHeader.styled"; import StructuredQuery from "metabase-lib/lib/queries/StructuredQuery"; @@ -44,6 +43,8 @@ const viewTitleHeaderPropTypes = { isNativeEditorOpen: PropTypes.bool, isShowingFilterSidebar: PropTypes.bool, isShowingSummarySidebar: PropTypes.bool, + isShowingQuestionDetailsSidebar: PropTypes.bool, + isObjectDetail: PropTypes.bool, runQuestionQuery: PropTypes.func, cancelQuery: PropTypes.func, @@ -53,6 +54,9 @@ const viewTitleHeaderPropTypes = { onCloseSummary: PropTypes.func, onAddFilter: PropTypes.func, onCloseFilter: PropTypes.func, + onOpenQuestionDetails: PropTypes.func, + onCloseQuestionDetails: PropTypes.func, + onOpenQuestionHistory: PropTypes.func, isPreviewable: PropTypes.bool, isPreviewing: PropTypes.bool, @@ -114,10 +118,14 @@ export class ViewTitleHeader extends React.Component { isShowingFilterSidebar, onAddFilter, onCloseFilter, + isShowingQuestionDetailsSidebar, + onOpenQuestionDetails, + onCloseQuestionDetails, + onOpenQuestionHistory, + isObjectDetail, } = this.props; const { isFiltersExpanded } = this.state; const isShowingNotebook = queryBuilderMode === "notebook"; - const description = question.description(); const lastEditInfo = question.lastEditInfo(); const isStructured = question.isStructured(); @@ -142,42 +150,36 @@ export class ViewTitleHeader extends React.Component { {isSaved ? (
    - - {question.displayName()} - - {description && ( - + - )} - + {lastEditInfo && ( )}
    - + - {QuestionDataSource.shouldRender({ question }) && ( - - )} - - {QuestionDataSource.shouldRender({ question }) && ( + {QuestionDataSource.shouldRender(this.props) && ( )} @@ -200,7 +202,10 @@ export class ViewTitleHeader extends React.Component { {isNative ? ( t`New question` ) : ( - + )} {showFiltersInHeading && @@ -226,6 +231,7 @@ export class ViewTitleHeader extends React.Component { diff --git a/frontend/src/metabase/query_builder/components/view/ViewHeader.styled.jsx b/frontend/src/metabase/query_builder/components/view/ViewHeader.styled.jsx new file mode 100644 index 000000000000..80a66d3f67f9 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/ViewHeader.styled.jsx @@ -0,0 +1,6 @@ +import styled from "styled-components"; + +export const SavedQuestionHeaderButtonContainer = styled.div` + position: relative; + right: 0.38rem; +`; diff --git a/frontend/src/metabase/query_builder/components/view/ViewSidebar.jsx b/frontend/src/metabase/query_builder/components/view/ViewSidebar.jsx deleted file mode 100644 index 3969a19c1360..000000000000 --- a/frontend/src/metabase/query_builder/components/view/ViewSidebar.jsx +++ /dev/null @@ -1,43 +0,0 @@ -/* eslint-disable react/prop-types */ -import React from "react"; - -import cx from "classnames"; - -import { Motion, spring } from "react-motion"; - -const SPRING_CONFIG = { stiffness: 200, damping: 26 }; - -const ViewSideBar = ({ left, right, width = 355, isOpen, children }) => ( - - {motionStyle => ( - - )} - -); - -export default ViewSideBar; diff --git a/frontend/src/metabase/query_builder/components/view/ViewSidebar/ViewSidebar.jsx b/frontend/src/metabase/query_builder/components/view/ViewSidebar/ViewSidebar.jsx new file mode 100644 index 000000000000..1d7b70b87eef --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/ViewSidebar/ViewSidebar.jsx @@ -0,0 +1,28 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import { ViewSidebarAside, ViewSidebarContent } from "./ViewSidebar.styled"; + +const ViewSidebar = ({ side = "right", width = 355, isOpen, children }) => ( + // If we passed `width` as prop, it would end up in the final HTML elements. + // This would ruin the animation, so we pass it as `widthProp`. + + {children} + +); + +ViewSidebar.propTypes = { + left: PropTypes.bool, + right: PropTypes.bool, + width: PropTypes.number, + isOpen: PropTypes.bool, + side: PropTypes.oneOf[("left", "right")], + children: PropTypes.node, +}; + +export default ViewSidebar; diff --git a/frontend/src/metabase/query_builder/components/view/ViewSidebar/ViewSidebar.styled.jsx b/frontend/src/metabase/query_builder/components/view/ViewSidebar/ViewSidebar.styled.jsx new file mode 100644 index 000000000000..f7d787449d44 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/ViewSidebar/ViewSidebar.styled.jsx @@ -0,0 +1,41 @@ +import styled, { css } from "styled-components"; + +import { color } from "metabase/lib/colors"; + +export const ViewSidebarAside = styled.aside` + overflow-x: hidden; + overflow-y: auto; + opacity: 0; + position: relative; + transition: width 0.3s, opacity 0.3s; + width: 0; + + ${({ side }) => + side === "left" + ? css` + border-right: 1px solid ${color("border")}; + left: 0; + ` + : css` + border-left: 1px solid ${color("border")}; + right: 0; + `} + + ${({ isOpen, widthProp: width }) => + isOpen && + width && + css` + opacity: 1; + width: ${width}px; + `} + + @media (prefers-reduced-motion) { + transition: none; + } +`; + +export const ViewSidebarContent = styled.div` + position: absolute; + height: 100%; + width: 100%; +`; diff --git a/frontend/src/metabase/query_builder/components/view/ViewSidebar/index.js b/frontend/src/metabase/query_builder/components/view/ViewSidebar/index.js new file mode 100644 index 000000000000..80130b0a1752 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/ViewSidebar/index.js @@ -0,0 +1 @@ +export { default } from "./ViewSidebar"; diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/ChartTypeSidebar.jsx b/frontend/src/metabase/query_builder/components/view/sidebars/ChartTypeSidebar.jsx index ad25c8691de3..8629f558d5e2 100644 --- a/frontend/src/metabase/query_builder/components/view/sidebars/ChartTypeSidebar.jsx +++ b/frontend/src/metabase/query_builder/components/view/sidebars/ChartTypeSidebar.jsx @@ -87,7 +87,7 @@ const ChartTypeOption = ({ }) => ( @@ -105,6 +105,8 @@ const ChartTypeOption = ({ borderRadius: 10, padding: 12, }} + data-testid={`${visualization.uiName}-button`} + data-is-sensible={isSensible} > diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebar.jsx b/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebar.jsx new file mode 100644 index 000000000000..31e1121649af --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebar.jsx @@ -0,0 +1,28 @@ +import React, { useState } from "react"; +import PropTypes from "prop-types"; + +import QuestionDetailsSidebarPanel from "metabase/query_builder/components/view/sidebars/QuestionDetailsSidebarPanel"; +import { SIDEBAR_VIEWS } from "./constants"; + +QuestionDetailsSidebar.propTypes = { + question: PropTypes.object.isRequired, + onOpenModal: PropTypes.func.isRequired, +}; + +function QuestionDetailsSidebar({ question, onOpenModal }) { + const [view, setView] = useState(view); + + switch (view) { + case SIDEBAR_VIEWS.DETAILS: + default: + return ( + + ); + } +} + +export default QuestionDetailsSidebar; diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebarPanel.jsx b/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebarPanel.jsx new file mode 100644 index 000000000000..a6c6022a67aa --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebarPanel.jsx @@ -0,0 +1,51 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import QuestionActionButtons from "metabase/query_builder/components/QuestionActionButtons"; +import { ClampedDescription } from "metabase/query_builder/components/ClampedDescription"; +import { + Container, + SidebarPaddedContent, +} from "./QuestionDetailsSidebarPanel.styled"; +import QuestionActivityTimeline from "metabase/query_builder/components/QuestionActivityTimeline"; + +import { PLUGIN_MODERATION } from "metabase/plugins"; + +export default QuestionDetailsSidebarPanel; + +QuestionDetailsSidebarPanel.propTypes = { + question: PropTypes.object.isRequired, + onOpenModal: PropTypes.func.isRequired, + removeModerationReview: PropTypes.func.isRequired, +}; + +function QuestionDetailsSidebarPanel({ + question, + onOpenModal, + removeModerationReview, +}) { + const canWrite = question.canWrite(); + const description = question.description(); + + const onDescriptionEdit = canWrite + ? () => { + onOpenModal("edit"); + } + : undefined; + + return ( + + + + + + + + + ); +} diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebarPanel.styled.jsx b/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebarPanel.styled.jsx new file mode 100644 index 000000000000..b5810f44d0b2 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/sidebars/QuestionDetailsSidebarPanel.styled.jsx @@ -0,0 +1,15 @@ +import styled from "styled-components"; + +export const Container = styled.div` + height: 100%; + overflow-y: auto; + display: flex; + flex-direction: column; +`; + +export const SidebarPaddedContent = styled.div` + display: flex; + flex-direction: column; + row-gap: 1rem; + padding: 0.5rem 1.5rem 1rem 1.5rem; +`; diff --git a/frontend/src/metabase/query_builder/components/view/sidebars/constants.js b/frontend/src/metabase/query_builder/components/view/sidebars/constants.js new file mode 100644 index 000000000000..eb402548ff39 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/view/sidebars/constants.js @@ -0,0 +1,3 @@ +export const SIDEBAR_VIEWS = { + DETAILS: "DETAILS", +}; diff --git a/frontend/src/metabase/query_builder/constants.js b/frontend/src/metabase/query_builder/constants.js new file mode 100644 index 000000000000..a96420d5a2bc --- /dev/null +++ b/frontend/src/metabase/query_builder/constants.js @@ -0,0 +1,15 @@ +export const MODAL_TYPES = { + SAVE: "save", + EDIT: "edit", + ADD_TO_DASHBOARD: "add-to-dashboard", + MOVE: "move", + CLONE: "clone", + ARCHIVE: "archive", + SAVED: "saved", + ADD_TO_DASHBOARD_SAVE: "add-to-dashboard-save", + CREATE_ALERT: "create-alert", + SAVE_QUESTION_BEFORE_ALERT: "save-question-before-alert", + SAVE_QUESTION_BEFORE_EMBED: "save-question-before-embed", + HISTORY: "history", + EMBED: "embed", +}; diff --git a/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx b/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx index 22a1116f8f20..df547be42609 100644 --- a/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx +++ b/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx @@ -174,7 +174,11 @@ export default class QueryBuilder extends Component { } UNSAFE_componentWillMount() { - this.props.initializeQB(this.props.location, this.props.params); + this.props.initializeQB( + this.props.location, + this.props.params, + this.props.location.query, + ); } componentDidMount() { @@ -253,9 +257,10 @@ export default class QueryBuilder extends Component { }; handleSave = async card => { - const { question, apiUpdateQuestion } = this.props; + const { question, apiUpdateQuestion, updateUrl } = this.props; const questionWithUpdatedCard = question.setCard(card); await apiUpdateQuestion(questionWithUpdatedCard); + await updateUrl(questionWithUpdatedCard.card(), { dirty: false }); if (this.props.fromUrl) { this.props.onChangeLocation(this.props.fromUrl); @@ -278,11 +283,8 @@ export default class QueryBuilder extends Component { uiControls: { modal, recentlySaved }, } = this.props; - // const Panel = queryBuilderMode === "notebook" ? Notebook : View; - const Panel = View; - return ( - createPublicLink(card)} onDisablePublicLink={() => deletePublicLink(card)} onUpdateEnableEmbedding={enableEmbedding => diff --git a/frontend/src/metabase/query_builder/reducers.js b/frontend/src/metabase/query_builder/reducers.js index 2ca5a34fcb8d..3399b3d3ded0 100644 --- a/frontend/src/metabase/query_builder/reducers.js +++ b/frontend/src/metabase/query_builder/reducers.js @@ -13,6 +13,7 @@ import { SET_MODAL_SNIPPET, SET_SNIPPET_COLLECTION_ID, CLOSE_QB_NEWB_MODAL, + SOFT_RELOAD_CARD, RELOAD_CARD, API_CREATE_QUESTION, API_UPDATE_QUESTION, @@ -44,6 +45,10 @@ import { onOpenChartType, onCloseChartType, onCloseSidebars, + onOpenQuestionDetails, + onCloseQuestionDetails, + onOpenQuestionHistory, + onCloseQuestionHistory, } from "./actions"; const DEFAULT_UI_CONTROLS = { @@ -56,6 +61,7 @@ const DEFAULT_UI_CONTROLS = { isShowingFilterSidebar: false, isShowingChartTypeSidebar: false, isShowingChartSettingsSidebar: false, + isShowingQuestionDetailsSidebar: false, initialChartSetting: null, isPreviewing: true, // sql preview mode isShowingRawTable: false, // table/viz toggle @@ -68,13 +74,15 @@ const UI_CONTROLS_SIDEBAR_DEFAULTS = { isShowingFilterSidebar: false, isShowingChartSettingsSidebar: false, isShowingChartTypeSidebar: false, + isShowingQuestionDetailsSidebar: false, }; -// this is used to close toher sidebar when one is updated +// this is used to close other sidebar when one is updated const CLOSED_NATIVE_EDITOR_SIDEBARS = { isShowingTemplateTagsEditor: false, isShowingSnippetSidebar: false, isShowingDataReference: false, + isShowingQuestionDetailsSidebar: false, }; // various ui state options @@ -163,6 +171,7 @@ export const uiControls = handleActions( [SHOW_CHART_SETTINGS]: { next: (state, { payload }) => ({ ...state, + ...UI_CONTROLS_SIDEBAR_DEFAULTS, isShowingChartSettingsSidebar: true, initialChartSetting: payload, }), @@ -205,6 +214,29 @@ export const uiControls = handleActions( ...state, ...UI_CONTROLS_SIDEBAR_DEFAULTS, }), + [onOpenQuestionDetails]: state => ({ + ...state, + ...UI_CONTROLS_SIDEBAR_DEFAULTS, + isShowingQuestionDetailsSidebar: true, + questionDetailsTimelineDrawerState: undefined, + }), + [onCloseQuestionDetails]: state => ({ + ...state, + ...UI_CONTROLS_SIDEBAR_DEFAULTS, + questionDetailsTimelineDrawerState: undefined, + }), + [onOpenQuestionHistory]: state => ({ + ...state, + ...UI_CONTROLS_SIDEBAR_DEFAULTS, + isShowingQuestionDetailsSidebar: true, + questionDetailsTimelineDrawerState: "open", + }), + [onCloseQuestionHistory]: state => ({ + ...state, + ...UI_CONTROLS_SIDEBAR_DEFAULTS, + isShowingQuestionDetailsSidebar: true, + questionDetailsTimelineDrawerState: "closed", + }), [onCloseSidebars]: state => ({ ...state, ...UI_CONTROLS_SIDEBAR_DEFAULTS, @@ -220,6 +252,7 @@ export const card = handleActions( [INITIALIZE_QB]: { next: (state, { payload }) => (payload ? payload.card : null), }, + [SOFT_RELOAD_CARD]: { next: (state, { payload }) => payload }, [RELOAD_CARD]: { next: (state, { payload }) => payload }, [SET_CARD_AND_RUN]: { next: (state, { payload }) => payload.card }, [API_CREATE_QUESTION]: { next: (state, { payload }) => payload }, @@ -347,6 +380,9 @@ export const queryStartTime = handleActions( export const parameterValues = handleActions( { + [INITIALIZE_QB]: { + next: (state, { payload: { parameterValues } }) => parameterValues, + }, [SET_PARAMETER_VALUE]: { next: (state, { payload: { id, value } }) => value == null ? dissoc(state, id) : assoc(state, id, value), diff --git a/frontend/src/metabase/query_builder/selectors.js b/frontend/src/metabase/query_builder/selectors.js index 92410987afa1..2a98569a5d55 100644 --- a/frontend/src/metabase/query_builder/selectors.js +++ b/frontend/src/metabase/query_builder/selectors.js @@ -13,7 +13,7 @@ import { getVisualizationTransformed, } from "metabase/visualizations"; import { getComputedSettingsForSeries } from "metabase/visualizations/lib/settings/visualization"; -import { getParametersWithExtras } from "metabase/meta/Card"; +import { getValueAndFieldIdPopulatedParametersFromCard } from "metabase/meta/Card"; import { normalizeParameterValue } from "metabase/meta/Parameter"; import Utils from "metabase/lib/utils"; @@ -118,7 +118,8 @@ export const getDatabaseFields = createSelector( export const getParameters = createSelector( [getCard, getParameterValues], - (card, parameterValues) => getParametersWithExtras(card, parameterValues), + (card, parameterValues) => + getValueAndFieldIdPopulatedParametersFromCard(card, parameterValues), ); const getLastRunDatasetQuery = createSelector( @@ -161,23 +162,38 @@ const getNextRunParameterValues = createSelector( .filter(p => p !== undefined), ); +function normalizeClause(clause) { + return typeof clause.raw === "function" ? clause.raw() : clause; +} + // Certain differences in a query should be ignored. `normalizeQuery` -// standardizes the query before comparision in `getIsResultDirty`. -function normalizeQuery(query, tableMetadata) { +// standardizes the query before comparison in `getIsResultDirty`. +export function normalizeQuery(query, tableMetadata) { if (!query) { return query; } - if (query.query && tableMetadata) { - query = updateIn(query, ["query", "fields"], fields => { - fields = fields - ? // if the query has fields, copy them before sorting - [...fields] - : // if the fields aren't set, we get them from the table metadata - tableMetadata.fields.map(({ id }) => ["field", id, null]); - return fields.sort((a, b) => - JSON.stringify(b).localeCompare(JSON.stringify(a)), - ); - }); + if (query.query) { + if (tableMetadata) { + query = updateIn(query, ["query", "fields"], fields => { + fields = fields + ? // if the query has fields, copy them before sorting + [...fields] + : // if the fields aren't set, we get them from the table metadata + tableMetadata.fields.map(({ id }) => ["field", id, null]); + return fields.sort((a, b) => + JSON.stringify(b).localeCompare(JSON.stringify(a)), + ); + }); + } + ["aggregation", "breakout", "filter", "joins", "order-by"].forEach( + clauseList => { + if (query.query[clauseList]) { + query = updateIn(query, ["query", clauseList], clauses => + clauses.map(normalizeClause), + ); + } + }, + ); } if (query.native && query.native["template-tags"] == null) { query = assocIn(query, ["native", "template-tags"], {}); @@ -234,8 +250,12 @@ export const getMode = createSelector( ); export const getIsObjectDetail = createSelector( - [getMode], - mode => mode && mode.name() === "object", + [getMode, getQueryResults], + (mode, results) => { + // It handles filtering by a manually set PK column that is not unique + const hasMultipleRows = results?.some(({ data }) => data?.rows.length > 1); + return mode?.name() === "object" && !hasMultipleRows; + }, ); export const getIsDirty = createSelector( @@ -286,6 +306,7 @@ export const getRawSeries = createSelector( ) => { let display = question && question.display(); let settings = question && question.settings(); + if (isObjectDetail) { display = "object"; } else if (isShowingRawTable) { @@ -462,3 +483,8 @@ export const getIsLiveResizable = createSelector( } }, ); + +export const getQuestionDetailsTimelineDrawerState = createSelector( + [getUiControls], + uiControls => uiControls && uiControls.questionDetailsTimelineDrawerState, +); diff --git a/frontend/src/metabase/query_builder/utils.unit.spec.js b/frontend/src/metabase/query_builder/utils.unit.spec.js new file mode 100644 index 000000000000..209616ca71f4 --- /dev/null +++ b/frontend/src/metabase/query_builder/utils.unit.spec.js @@ -0,0 +1,205 @@ +import Question from "metabase-lib/lib/Question"; +import Aggregation from "metabase-lib/lib/queries/structured/Aggregation"; +import Breakout from "metabase-lib/lib/queries/structured/Breakout"; +import Filter from "metabase-lib/lib/queries/structured/Filter"; +import Join from "metabase-lib/lib/queries/structured/Join"; +import OrderBy from "metabase-lib/lib/queries/structured/OrderBy"; +import { + SAMPLE_DATASET, + ORDERS, + PRODUCTS, + metadata, +} from "__support__/sample_dataset_fixture"; +import { normalizeQuery } from "./selectors"; + +function toFieldRef(field) { + return ["field", field.id, null]; +} + +function sortFields(f1, f2) { + return JSON.stringify(f2).localeCompare(JSON.stringify(f1)); +} + +function getTableFields(tableId) { + const table = SAMPLE_DATASET.tables.find(table => table.id === tableId); + return table.fields.map(toFieldRef).sort(sortFields); +} + +function getQuestion({ type = "query", query = {} } = {}) { + const queryObjectKey = type === "query" ? "query" : "native"; + let queryObject = {}; + + if (type === "query") { + queryObject = { + ...query, + "source-table": ORDERS.id, + }; + } else { + queryObject = query; + } + + return new Question({ + display: "table", + visualization_settings: {}, + dataset_query: { + type, + database: SAMPLE_DATASET.id, + [queryObjectKey]: queryObject, + }, + }); +} + +function setup(questionOpts) { + const question = getQuestion(questionOpts); + const query = question.query(); + const tableMetadata = question.isStructured() + ? metadata.table(query.sourceTableId()) + : null; + return { question, query, datasetQuery: query.datasetQuery(), tableMetadata }; +} + +const FEW_ORDERS_TABLE_FIELDS = [ + ORDERS.ID, + ORDERS.TOTAL, + ORDERS.CREATED_AT, +].map(toFieldRef); + +const TEST_CLAUSE = { + AGGREGATION: ["count"], + BREAKOUT: toFieldRef(ORDERS.CREATED_AT), + FILTER: [">=", toFieldRef(ORDERS.TOTAL), 20], + ORDER_BY: ["asc", ["aggregation", 0]], + JOIN: { + alias: "Products", + condition: ["=", toFieldRef(ORDERS.PRODUCT_ID), toFieldRef(PRODUCTS.ID)], + }, +}; + +describe("normalizeQuery", () => { + it("does nothing if query is nullish", () => { + expect(normalizeQuery(null)).toBe(null); + expect(normalizeQuery(undefined)).toBe(undefined); + }); + + describe("structured query", () => { + it("adds explicit list of fields if missing", () => { + const { datasetQuery, query, tableMetadata } = setup(); + const expectedFields = getTableFields(query.sourceTableId()); + + const normalizedQuery = normalizeQuery(datasetQuery, tableMetadata); + + expect(normalizedQuery.query).toEqual( + expect.objectContaining({ + fields: expectedFields, + }), + ); + }); + + it("sorts query fields if they're set explicitly", () => { + const { datasetQuery, tableMetadata } = setup({ + query: { fields: FEW_ORDERS_TABLE_FIELDS }, + }); + + const normalizedQuery = normalizeQuery(datasetQuery, tableMetadata); + + expect(normalizedQuery.query.fields).toEqual( + FEW_ORDERS_TABLE_FIELDS.sort(sortFields), + ); + }); + + it("does nothing to query fields if table metadata is not provided", () => { + const { datasetQuery } = setup({ + query: { fields: FEW_ORDERS_TABLE_FIELDS }, + }); + + const normalizedQuery = normalizeQuery(datasetQuery); + + expect(normalizedQuery).toEqual(datasetQuery); + }); + + it("converts clauses into plain MBQL objects", () => { + const { datasetQuery } = setup({ + query: { + aggregation: [new Aggregation(TEST_CLAUSE.AGGREGATION)], + breakout: [new Breakout(TEST_CLAUSE.BREAKOUT)], + filter: [new Filter(TEST_CLAUSE.FILTER)], + joins: [new Join(TEST_CLAUSE.JOIN)], + "order-by": [new OrderBy(TEST_CLAUSE.ORDER_BY)], + }, + }); + + const { query: normalizedQuery } = normalizeQuery(datasetQuery); + + expect(normalizedQuery).toEqual({ + ...datasetQuery.query, + aggregation: [TEST_CLAUSE.AGGREGATION], + breakout: [TEST_CLAUSE.BREAKOUT], + filter: [TEST_CLAUSE.FILTER], + joins: [TEST_CLAUSE.JOIN], + "order-by": [TEST_CLAUSE.ORDER_BY], + }); + expect(normalizedQuery.aggregation[0]).not.toBeInstanceOf(Aggregation); + expect(normalizedQuery.breakout[0]).not.toBeInstanceOf(Breakout); + expect(normalizedQuery.filter[0]).not.toBeInstanceOf(Filter); + expect(normalizedQuery.joins[0]).not.toBeInstanceOf(Join); + expect(normalizedQuery["order-by"][0]).not.toBeInstanceOf(OrderBy); + }); + + it("does nothing to clauses if they're plain MBQL already", () => { + const { datasetQuery } = setup({ + query: { + aggregation: [TEST_CLAUSE.AGGREGATION], + breakout: [TEST_CLAUSE.BREAKOUT], + filter: [TEST_CLAUSE.FILTER], + joins: [TEST_CLAUSE.JOIN], + "order-by": [TEST_CLAUSE.ORDER_BY], + }, + }); + + const { query: normalizedQuery } = normalizeQuery(datasetQuery); + + expect(normalizedQuery).toEqual(datasetQuery.query); + expect(normalizedQuery.aggregation[0]).not.toBeInstanceOf(Aggregation); + expect(normalizedQuery.breakout[0]).not.toBeInstanceOf(Breakout); + expect(normalizedQuery.filter[0]).not.toBeInstanceOf(Filter); + expect(normalizedQuery.joins[0]).not.toBeInstanceOf(Join); + expect(normalizedQuery["order-by"][0]).not.toBeInstanceOf(OrderBy); + }); + }); + + describe("native query", () => { + it("assigns empty object to template tags if missing", () => { + const { datasetQuery } = setup({ + type: "native", + }); + + const normalizedQuery = normalizeQuery(datasetQuery); + + expect(normalizedQuery).toEqual({ + ...datasetQuery, + native: { + ...datasetQuery.native, + "template-tags": {}, + }, + }); + }); + + it("does nothing to template tags if they're set explicitly", () => { + const { datasetQuery } = setup({ + type: "native", + query: { + "template-tags": { + total: { + name: "total", + type: "dimension", + }, + }, + }, + }); + + const normalizedQuery = normalizeQuery(datasetQuery); + + expect(normalizedQuery).toEqual(datasetQuery); + }); + }); +}); diff --git a/frontend/src/metabase/questions/components/CollectionBadge.jsx b/frontend/src/metabase/questions/components/CollectionBadge.jsx index 83b2e5803faa..17e4b9180aea 100644 --- a/frontend/src/metabase/questions/components/CollectionBadge.jsx +++ b/frontend/src/metabase/questions/components/CollectionBadge.jsx @@ -1,33 +1,53 @@ -/* eslint-disable react/prop-types */ import React from "react"; +import PropTypes from "prop-types"; import Badge from "metabase/components/Badge"; import Collection from "metabase/entities/collections"; +import { PLUGIN_COLLECTIONS } from "metabase/plugins"; -@Collection.load({ - id: (state, props) => props.collectionId || "root", - wrapped: true, - loadingAndErrorWrapper: false, - properties: ["name"], -}) -class CollectionBadge extends React.Component { - render() { - const { collection, analyticsContext, ...props } = this.props; - if (!collection) { - return null; - } - return ( - - {collection.getName()} - - ); +const propTypes = { + collection: PropTypes.object, + analyticsContext: PropTypes.string.isRequired, + className: PropTypes.string, +}; + +const IRREGULAR_ICON_WIDTH = 14; +const IRREGULAR_ICON_PROPS = { + width: IRREGULAR_ICON_WIDTH, + height: 16, + + // Workaround: if a CollectionBadge icon has a tooltip, the default offset x is incorrect + targetOffsetX: IRREGULAR_ICON_WIDTH, +}; + +function CollectionBadge({ collection, analyticsContext, className }) { + if (!collection) { + return null; } + const isRegular = PLUGIN_COLLECTIONS.isRegularCollection(collection); + const icon = { + ...collection.getIcon(), + ...(isRegular ? { size: 12 } : IRREGULAR_ICON_PROPS), + }; + return ( + + {collection.getName()} + + ); } -export default CollectionBadge; +CollectionBadge.propTypes = propTypes; + +export default Collection.load({ + id: (state, props) => props.collectionId || "root", + wrapped: true, + loadingAndErrorWrapper: false, + properties: ["name", "authority_level"], +})(CollectionBadge); diff --git a/frontend/src/metabase/reducers-main.js b/frontend/src/metabase/reducers-main.js index e0e6cc5d9fe0..44cfe750d355 100644 --- a/frontend/src/metabase/reducers-main.js +++ b/frontend/src/metabase/reducers-main.js @@ -10,11 +10,8 @@ import admin from "metabase/admin/admin"; /* setup */ import * as setup from "metabase/setup/reducers"; -/* user settings */ -import * as user from "metabase/user/reducers"; - /* dashboards */ -import dashboard from "metabase/dashboard/dashboard"; +import dashboard from "metabase/dashboard/reducers"; import * as home from "metabase/home/reducers"; /* query builder */ @@ -44,6 +41,5 @@ export default { reference, revisions, setup: combineReducers(setup), - user: combineReducers(user), admin, }; diff --git a/frontend/src/metabase/reducers-public.js b/frontend/src/metabase/reducers-public.js index 0d800e5a0321..c19bf3915f1f 100644 --- a/frontend/src/metabase/reducers-public.js +++ b/frontend/src/metabase/reducers-public.js @@ -2,7 +2,7 @@ import commonReducers from "./reducers-common"; -import dashboard from "metabase/dashboard/dashboard"; +import dashboard from "metabase/dashboard/reducers"; export default { ...commonReducers, diff --git a/frontend/src/metabase/redux/metadata.js b/frontend/src/metabase/redux/metadata.js index ba7e86f9a6b2..3e9b352d56d9 100644 --- a/frontend/src/metabase/redux/metadata.js +++ b/frontend/src/metabase/redux/metadata.js @@ -81,10 +81,25 @@ export const fetchTableMetadata = (id, reload = false) => { return Tables.actions.fetchMetadataAndForeignTables({ id }, { reload }); }; -export const fetchField = (id, reload = false) => { - deprecated("metabase/redux/metadata fetchField"); - return Fields.actions.fetch({ id }, { reload }); -}; +export const METADATA_FETCH_FIELD = "metabase/metadata/FETCH_FIELD"; +export const fetchField = createThunkAction( + METADATA_FETCH_FIELD, + (id, reload = false) => { + deprecated("metabase/redux/metadata fetchField"); + return async dispatch => { + const action = await dispatch(Fields.actions.fetch({ id }, { reload })); + const field = Fields.HACK_getObjectFromAction(action); + if (field?.dimensions?.human_readable_field_id != null) { + await dispatch( + Fields.actions.fetch( + { id: field.dimensions.human_readable_field_id }, + { reload }, + ), + ); + } + }; + }, +); export const FETCH_FIELD_VALUES = Fields.actions.fetchFieldValues.toString(); export const fetchFieldValues = (id, reload = false) => { @@ -303,6 +318,8 @@ export const loadMetadataForQueries = queries => dispatch => return (foreignTables ? Tables.actions.fetchMetadataAndForeignTables : Tables.actions.fetchMetadata)({ id }); + } else if (type === "field") { + return Fields.actions.fetch({ id }); } else { console.warn(`loadMetadataForQueries: type ${type} not implemented`); } diff --git a/frontend/src/metabase/redux/metadata.unit.spec.js b/frontend/src/metabase/redux/metadata.unit.spec.js new file mode 100644 index 000000000000..931577379436 --- /dev/null +++ b/frontend/src/metabase/redux/metadata.unit.spec.js @@ -0,0 +1,140 @@ +import Fields from "metabase/entities/fields"; +import Tables from "metabase/entities/tables"; +import { fetchField, loadMetadataForQuery } from "./metadata"; + +describe("deprecated metadata actions", () => { + let dispatch; + beforeEach(() => { + jest.clearAllMocks(); + + dispatch = jest.fn(a => a); + }); + + describe("fetchField", () => { + it("should fetch a field", async () => { + Fields.actions.fetch = jest.fn(() => + Promise.resolve({ + type: Fields.actionTypes.FETCH_ACTION, + payload: { + result: 0, + entities: { + fields: { + 0: { id: 0 }, + }, + }, + }, + }), + ); + + await fetchField(0, false)(dispatch); + expect(Fields.actions.fetch).toHaveBeenCalledWith( + { id: 0 }, + { reload: false }, + ); + expect(Fields.actions.fetch.mock.calls.length).toBe(1); + }); + + it("should fetch the field associated with a field's dimensions.human_readable_field_id property", async () => { + Fields.actions.fetch = jest.fn(); + + Fields.actions.fetch.mockReturnValueOnce( + Promise.resolve({ + type: Fields.actionTypes.FETCH_ACTION, + payload: { + result: 0, + entities: { + fields: { + 0: { id: 0, dimensions: { human_readable_field_id: 1 } }, + }, + }, + }, + }), + ); + + Fields.actions.fetch.mockReturnValueOnce( + Promise.resolve({ + type: Fields.actionTypes.FETCH_ACTION, + payload: { + result: 1, + entities: { + fields: { + 0: { id: 0, dimensions: { human_readable_field_id: 1 } }, + 1: { id: 1 }, + }, + }, + }, + }), + ); + + await fetchField(0, true)(dispatch); + expect(Fields.actions.fetch).toHaveBeenCalledWith( + { id: 0 }, + { reload: true }, + ); + expect(Fields.actions.fetch).toHaveBeenCalledWith( + { id: 1 }, + { reload: true }, + ); + expect(Fields.actions.fetch.mock.calls.length).toBe(2); + }); + }); + + describe("loadMetadataForQuery", () => { + beforeEach(() => { + Fields.actions.fetch = jest.fn(() => + Promise.resolve({ + type: Fields.actionTypes.FETCH_ACTION, + payload: {}, + }), + ); + + Tables.actions.fetchMetadata = jest.fn(() => + Promise.resolve({ + type: Tables.actionTypes.FETCH_METADATA, + payload: {}, + }), + ); + + Tables.actions.fetchMetadataAndForeignTables = jest.fn(() => + Promise.resolve({ + type: Tables.actionTypes.FETCH_TABLE_METADATA, + payload: {}, + }), + ); + }); + + it("should send requests for any tables/fields needed by the query", () => { + const query = { + dependentMetadata: () => [ + { + type: "table", + id: 1, + }, + { + type: "table", + id: 1, + }, + { + foreignTables: true, + type: "table", + id: 2, + }, + { + type: "field", + id: 3, + }, + { type: "card", id: 4 }, + ], + }; + + loadMetadataForQuery(query)(dispatch); + expect(Tables.actions.fetchMetadata).toHaveBeenCalledWith({ id: 1 }); + expect(Tables.actions.fetchMetadataAndForeignTables).toHaveBeenCalledWith( + { id: 2 }, + ); + expect(Tables.actions.fetchMetadata.mock.calls.length).toBe(1); + + expect(Fields.actions.fetch).toHaveBeenCalledWith({ id: 3 }); + }); + }); +}); diff --git a/frontend/src/metabase/redux/requests.js b/frontend/src/metabase/redux/requests.js index 9755309e5cb8..681d2d7cb6f8 100644 --- a/frontend/src/metabase/redux/requests.js +++ b/frontend/src/metabase/redux/requests.js @@ -1,4 +1,4 @@ -import { handleActions, createAction } from "metabase/lib/redux"; +import { handleActions, createAction } from "redux-actions"; import { updateIn, assoc } from "icepick"; export const setRequestLoading = createAction( diff --git a/frontend/src/metabase/reference/components/Detail.jsx b/frontend/src/metabase/reference/components/Detail.jsx index 8735684875f1..bda51a8ccaa3 100644 --- a/frontend/src/metabase/reference/components/Detail.jsx +++ b/frontend/src/metabase/reference/components/Detail.jsx @@ -5,7 +5,6 @@ import { Link } from "react-router"; import S from "./Detail.css"; import { t } from "ttag"; import cx from "classnames"; -import pure from "recompose/pure"; const Detail = ({ name, @@ -64,4 +63,4 @@ Detail.propTypes = { field: PropTypes.object, }; -export default pure(Detail); +export default React.memo(Detail); diff --git a/frontend/src/metabase/reference/components/EditHeader.jsx b/frontend/src/metabase/reference/components/EditHeader.jsx index 04c2c6239dbd..4aea2c1f6c0d 100644 --- a/frontend/src/metabase/reference/components/EditHeader.jsx +++ b/frontend/src/metabase/reference/components/EditHeader.jsx @@ -1,7 +1,6 @@ import React from "react"; import PropTypes from "prop-types"; import cx from "classnames"; -import pure from "recompose/pure"; import { t } from "ttag"; import S from "./EditHeader.css"; @@ -81,4 +80,4 @@ EditHeader.propTypes = { revisionMessageFormField: PropTypes.object, }; -export default pure(EditHeader); +export default React.memo(EditHeader); diff --git a/frontend/src/metabase/reference/components/EditableReferenceHeader.jsx b/frontend/src/metabase/reference/components/EditableReferenceHeader.jsx index 2872191056af..a3377baf38af 100644 --- a/frontend/src/metabase/reference/components/EditableReferenceHeader.jsx +++ b/frontend/src/metabase/reference/components/EditableReferenceHeader.jsx @@ -2,7 +2,6 @@ import React from "react"; import PropTypes from "prop-types"; import { Link } from "react-router"; import cx from "classnames"; -import pure from "recompose/pure"; import { t } from "ttag"; import S from "./ReferenceHeader.css"; import L from "metabase/components/List.css"; @@ -110,4 +109,4 @@ EditableReferenceHeader.propTypes = { nameFormField: PropTypes.object, }; -export default pure(EditableReferenceHeader); +export default React.memo(EditableReferenceHeader); diff --git a/frontend/src/metabase/reference/components/Field.jsx b/frontend/src/metabase/reference/components/Field.jsx index ce745e6ccc85..1914a17e689c 100644 --- a/frontend/src/metabase/reference/components/Field.jsx +++ b/frontend/src/metabase/reference/components/Field.jsx @@ -15,7 +15,6 @@ import Select from "metabase/components/Select"; import Icon from "metabase/components/Icon"; import cx from "classnames"; -import pure from "recompose/pure"; const Field = ({ field, foreignKeys, url, icon, isEditing, formField }) => (
    @@ -127,4 +126,4 @@ Field.propTypes = { formField: PropTypes.object, }; -export default pure(Field); +export default React.memo(Field); diff --git a/frontend/src/metabase/reference/components/FieldToGroupBy.jsx b/frontend/src/metabase/reference/components/FieldToGroupBy.jsx index 4d67308e6612..8423c7b17429 100644 --- a/frontend/src/metabase/reference/components/FieldToGroupBy.jsx +++ b/frontend/src/metabase/reference/components/FieldToGroupBy.jsx @@ -1,7 +1,6 @@ /* eslint-disable react/prop-types */ import React from "react"; import PropTypes from "prop-types"; -import pure from "recompose/pure"; import { t } from "ttag"; import cx from "classnames"; import S from "./FieldToGroupBy.css"; @@ -42,4 +41,4 @@ FieldToGroupBy.propTypes = { secondaryOnClick: PropTypes.func, }; -export default pure(FieldToGroupBy); +export default React.memo(FieldToGroupBy); diff --git a/frontend/src/metabase/reference/components/FieldTypeDetail.jsx b/frontend/src/metabase/reference/components/FieldTypeDetail.jsx index 6dfca7fe97b4..4b044a26eb59 100644 --- a/frontend/src/metabase/reference/components/FieldTypeDetail.jsx +++ b/frontend/src/metabase/reference/components/FieldTypeDetail.jsx @@ -2,7 +2,6 @@ import React from "react"; import PropTypes from "prop-types"; import cx from "classnames"; import { getIn } from "icepick"; -import pure from "recompose/pure"; import { t } from "ttag"; import * as MetabaseCore from "metabase/lib/core"; import { isNumericBaseType } from "metabase/lib/schema_metadata"; @@ -88,4 +87,4 @@ FieldTypeDetail.propTypes = { isEditing: PropTypes.bool.isRequired, }; -export default pure(FieldTypeDetail); +export default React.memo(FieldTypeDetail); diff --git a/frontend/src/metabase/reference/components/ReferenceHeader.jsx b/frontend/src/metabase/reference/components/ReferenceHeader.jsx index 78a4f4c088ed..4db9ea67af22 100644 --- a/frontend/src/metabase/reference/components/ReferenceHeader.jsx +++ b/frontend/src/metabase/reference/components/ReferenceHeader.jsx @@ -2,7 +2,6 @@ import React from "react"; import PropTypes from "prop-types"; import { Link } from "react-router"; import cx from "classnames"; -import pure from "recompose/pure"; import S from "./ReferenceHeader.css"; import L from "metabase/components/List.css"; @@ -61,4 +60,4 @@ ReferenceHeader.propTypes = { headerLink: PropTypes.string, }; -export default pure(ReferenceHeader); +export default React.memo(ReferenceHeader); diff --git a/frontend/src/metabase/reference/components/UsefulQuestions.jsx b/frontend/src/metabase/reference/components/UsefulQuestions.jsx index e2d31bc7fa5d..9fa1144804fd 100644 --- a/frontend/src/metabase/reference/components/UsefulQuestions.jsx +++ b/frontend/src/metabase/reference/components/UsefulQuestions.jsx @@ -1,6 +1,5 @@ import React from "react"; import PropTypes from "prop-types"; -import pure from "recompose/pure"; import { t } from "ttag"; import S from "./UsefulQuestions.css"; import D from "metabase/reference/components/Detail.css"; @@ -25,4 +24,4 @@ UsefulQuestions.propTypes = { questions: PropTypes.array.isRequired, }; -export default pure(UsefulQuestions); +export default React.memo(UsefulQuestions); diff --git a/frontend/src/metabase/reference/databases/DatabaseSidebar.jsx b/frontend/src/metabase/reference/databases/DatabaseSidebar.jsx index 679b1b9f6bd3..7d1a0b8b2324 100644 --- a/frontend/src/metabase/reference/databases/DatabaseSidebar.jsx +++ b/frontend/src/metabase/reference/databases/DatabaseSidebar.jsx @@ -7,7 +7,6 @@ import Breadcrumbs from "metabase/components/Breadcrumbs"; import SidebarItem from "metabase/components/SidebarItem"; import cx from "classnames"; -import pure from "recompose/pure"; const DatabaseSidebar = ({ database, style, className }) => (
    @@ -43,4 +42,4 @@ DatabaseSidebar.propTypes = { style: PropTypes.object, }; -export default pure(DatabaseSidebar); +export default React.memo(DatabaseSidebar); diff --git a/frontend/src/metabase/reference/databases/FieldList.jsx b/frontend/src/metabase/reference/databases/FieldList.jsx index df50df3fa587..fd42cd47674b 100644 --- a/frontend/src/metabase/reference/databases/FieldList.jsx +++ b/frontend/src/metabase/reference/databases/FieldList.jsx @@ -91,6 +91,7 @@ export default class FieldList extends Component { loadingError: PropTypes.object, submitting: PropTypes.bool, resetForm: PropTypes.func, + "data-testid": PropTypes.string, }; render() { @@ -123,6 +124,7 @@ export default class FieldList extends Component { this.props, ), )} + testID={this.props["data-testid"]} > {isEditing && ( (
    @@ -47,4 +46,4 @@ BaseSidebar.propTypes = { style: PropTypes.object, }; -export default pure(BaseSidebar); +export default React.memo(BaseSidebar); diff --git a/frontend/src/metabase/reference/metrics/MetricSidebar.jsx b/frontend/src/metabase/reference/metrics/MetricSidebar.jsx index 8c090fd5b087..dc44ffdc3eb3 100644 --- a/frontend/src/metabase/reference/metrics/MetricSidebar.jsx +++ b/frontend/src/metabase/reference/metrics/MetricSidebar.jsx @@ -3,7 +3,6 @@ import React from "react"; import PropTypes from "prop-types"; import { t } from "ttag"; import cx from "classnames"; -import pure from "recompose/pure"; import MetabaseSettings from "metabase/lib/settings"; @@ -64,4 +63,4 @@ MetricSidebar.propTypes = { style: PropTypes.object, }; -export default pure(MetricSidebar); +export default React.memo(MetricSidebar); diff --git a/frontend/src/metabase/reference/segments/SegmentFieldSidebar.jsx b/frontend/src/metabase/reference/segments/SegmentFieldSidebar.jsx index 208e92b42b27..997d9d85ac23 100644 --- a/frontend/src/metabase/reference/segments/SegmentFieldSidebar.jsx +++ b/frontend/src/metabase/reference/segments/SegmentFieldSidebar.jsx @@ -7,7 +7,6 @@ import Breadcrumbs from "metabase/components/Breadcrumbs"; import SidebarItem from "metabase/components/SidebarItem"; import cx from "classnames"; -import pure from "recompose/pure"; const SegmentFieldSidebar = ({ segment, field, style, className }) => (
    @@ -41,4 +40,4 @@ SegmentFieldSidebar.propTypes = { style: PropTypes.object, }; -export default pure(SegmentFieldSidebar); +export default React.memo(SegmentFieldSidebar); diff --git a/frontend/src/metabase/reference/segments/SegmentSidebar.jsx b/frontend/src/metabase/reference/segments/SegmentSidebar.jsx index 5c6e96717180..4210983f1ec9 100644 --- a/frontend/src/metabase/reference/segments/SegmentSidebar.jsx +++ b/frontend/src/metabase/reference/segments/SegmentSidebar.jsx @@ -3,7 +3,6 @@ import React from "react"; import PropTypes from "prop-types"; import { t } from "ttag"; import cx from "classnames"; -import pure from "recompose/pure"; import MetabaseSettings from "metabase/lib/settings"; @@ -70,4 +69,4 @@ SegmentSidebar.propTypes = { style: PropTypes.object, }; -export default pure(SegmentSidebar); +export default React.memo(SegmentSidebar); diff --git a/frontend/src/metabase/routes.jsx b/frontend/src/metabase/routes.jsx index 78a74dc13552..8a6df1dfca8e 100644 --- a/frontend/src/metabase/routes.jsx +++ b/frontend/src/metabase/routes.jsx @@ -39,13 +39,12 @@ import QueryBuilder from "metabase/query_builder/containers/QueryBuilder"; import CollectionEdit from "metabase/collections/containers/CollectionEdit"; import CollectionCreate from "metabase/collections/containers/CollectionCreate"; import ArchiveCollectionModal from "metabase/components/ArchiveCollectionModal"; -import CollectionPermissionsModal from "metabase/admin/permissions/containers/CollectionPermissionsModal"; +import CollectionPermissionsModal from "metabase/admin/permissions/components/CollectionPermissionsModal/CollectionPermissionsModal"; import UserCollectionList from "metabase/containers/UserCollectionList"; import PulseEditApp from "metabase/pulse/containers/PulseEditApp"; import SetupApp from "metabase/setup/containers/SetupApp"; import PostSetupApp from "metabase/setup/containers/PostSetupApp"; -import UserSettingsApp from "metabase/user/containers/UserSettingsApp"; // new question import NewQueryOptions from "metabase/new_query/containers/NewQueryOptions"; @@ -74,6 +73,7 @@ import TableQuestionsContainer from "metabase/reference/databases/TableQuestions import FieldListContainer from "metabase/reference/databases/FieldListContainer"; import FieldDetailContainer from "metabase/reference/databases/FieldDetailContainer"; +import getAccountRoutes from "metabase/account/routes"; import getAdminRoutes from "metabase/admin/routes"; import PublicQuestion from "metabase/public/containers/PublicQuestion"; @@ -322,8 +322,8 @@ export const getRoutes = store => ( - {/* USER */} - + {/* ACCOUNT */} + {getAccountRoutes(store, IsAuthenticated)} {/* ADMIN */} {getAdminRoutes(store, IsAdmin)} diff --git a/frontend/src/metabase/schema.js b/frontend/src/metabase/schema.js index 1182f38e75da..a1a855eee6fc 100644 --- a/frontend/src/metabase/schema.js +++ b/frontend/src/metabase/schema.js @@ -112,6 +112,10 @@ export const parseSchemaId = id => { export const generateSchemaId = (dbId, schemaName) => `${dbId}:${schemaName || ""}`; +export const RecentsSchema = new schema.Entity("recents", undefined, { + idAttribute: ({ model, model_id }) => `${model}:${model_id}`, +}); + export const LoginHistorySchema = new schema.Entity("loginHistory", undefined, { idAttribute: ({ timestamp }) => `${timestamp}`, }); diff --git a/frontend/src/metabase/search/components/CollectionBadge.jsx b/frontend/src/metabase/search/components/CollectionBadge.jsx new file mode 100644 index 000000000000..71bece7d5a24 --- /dev/null +++ b/frontend/src/metabase/search/components/CollectionBadge.jsx @@ -0,0 +1,29 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import * as Urls from "metabase/lib/urls"; + +import { + CollectionBadgeRoot, + CollectionLink, + AuthorityLevelIcon, +} from "./CollectionBadge.styled"; + +const propTypes = { + collection: PropTypes.shape({ + name: PropTypes.string, + }), +}; + +export function CollectionBadge({ collection }) { + return ( + + + + {collection.name} + + + ); +} + +CollectionBadge.propTypes = propTypes; diff --git a/frontend/src/metabase/search/components/CollectionBadge.styled.jsx b/frontend/src/metabase/search/components/CollectionBadge.styled.jsx new file mode 100644 index 000000000000..c259dee3242d --- /dev/null +++ b/frontend/src/metabase/search/components/CollectionBadge.styled.jsx @@ -0,0 +1,26 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Link from "metabase/components/Link"; + +import { PLUGIN_COLLECTION_COMPONENTS } from "metabase/plugins"; + +const { CollectionAuthorityLevelIcon } = PLUGIN_COLLECTION_COMPONENTS; + +export const CollectionBadgeRoot = styled.div` + display: inline-block; +`; + +export const CollectionLink = styled(Link)` + display: flex; + align-items: center; + text-decoration: dashed; + &:hover { + color: ${color("brand")}; + } +`; + +export const AuthorityLevelIcon = styled(CollectionAuthorityLevelIcon).attrs({ + size: 13, +})` + padding-right: 2px; +`; diff --git a/frontend/src/metabase/search/components/InfoText.jsx b/frontend/src/metabase/search/components/InfoText.jsx new file mode 100644 index 000000000000..12903a2f0b9f --- /dev/null +++ b/frontend/src/metabase/search/components/InfoText.jsx @@ -0,0 +1,112 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t, jt } from "ttag"; + +import * as Urls from "metabase/lib/urls"; + +import Icon from "metabase/components/Icon"; +import Link from "metabase/components/Link"; + +import Schema from "metabase/entities/schemas"; +import Database from "metabase/entities/databases"; +import Table from "metabase/entities/tables"; +import { PLUGIN_COLLECTIONS } from "metabase/plugins"; +import { getTranslatedEntityName } from "metabase/nav/components/utils"; +import { CollectionBadge } from "./CollectionBadge"; + +const searchResultPropTypes = { + database_id: PropTypes.number, + table_id: PropTypes.number, + model: PropTypes.string, + getCollection: PropTypes.func, + collection: PropTypes.object, + table_schema: PropTypes.string, +}; + +const infoTextPropTypes = { + result: PropTypes.shape(searchResultPropTypes), +}; + +export function InfoText({ result }) { + switch (result.model) { + case "card": + return jt`Saved question in ${formatCollection(result.getCollection())}`; + case "collection": + return getCollectionInfoText(result.collection); + case "database": + return t`Database`; + case "table": + return ; + case "segment": + return jt`Segment of ${}`; + case "metric": + return jt`Metric for ${}`; + default: + return jt`${getTranslatedEntityName(result.model)} in ${formatCollection( + result.getCollection(), + )}`; + } +} + +InfoText.propTypes = infoTextPropTypes; + +function formatCollection(collection) { + return collection.id && ; +} + +function getCollectionInfoText(collection) { + if (PLUGIN_COLLECTIONS.isRegularCollection(collection)) { + return t`Collection`; + } + const level = PLUGIN_COLLECTIONS.AUTHORITY_LEVEL[collection.authority_level]; + return `${level.name} ${t`Collection`}`; +} + +function TablePath({ result }) { + return jt`Table in ${( + + {" "} + {result.table_schema && ( + + {({ list }) => + list?.length > 1 ? ( + + + {/* we have to do some {} manipulation here to make this look like the table object that browseSchema was written for originally */} + + {result.table_schema} + + + ) : null + } + + )} + + )}`; +} + +TablePath.propTypes = { + result: PropTypes.shape(searchResultPropTypes), +}; + +function TableLink({ result }) { + return ( + + + {({ table }) => (table ? {table.display_name} : null)} + + + ); +} + +TableLink.propTypes = { + result: PropTypes.shape(searchResultPropTypes), +}; diff --git a/frontend/src/metabase/search/components/InfoText.unit.spec.js b/frontend/src/metabase/search/components/InfoText.unit.spec.js new file mode 100644 index 000000000000..08c4b517139d --- /dev/null +++ b/frontend/src/metabase/search/components/InfoText.unit.spec.js @@ -0,0 +1,126 @@ +import React from "react"; +import { Provider } from "react-redux"; +import { render, screen, waitFor } from "@testing-library/react"; +import xhrMock from "xhr-mock"; +import { getStore } from "__support__/entities-store"; + +import { InfoText } from "./InfoText"; + +const collection = { id: 1, name: "Collection Name" }; +const table = { id: 1, display_name: "Table Name" }; +const database = { id: 1, name: "Database Name" }; + +async function setup(result) { + xhrMock.get("/api/table/1", { + body: JSON.stringify(table), + }); + + xhrMock.get("/api/database/1", { + body: JSON.stringify(database), + }); + + const store = getStore(); + + render( + + + , + ); +} + +describe("InfoText", () => { + beforeEach(() => { + xhrMock.setup(); + }); + + afterEach(() => { + xhrMock.teardown(); + }); + + it("shows collection info for a question", async () => { + await setup({ + model: "card", + getCollection: () => collection, + }); + expect(screen.queryByText("Saved question in")).toHaveTextContent( + "Saved question in Collection Name", + ); + }); + + it("shows collection info for a collection", async () => { + const collection = { id: 1, name: "Collection Name" }; + await setup({ + model: "collection", + collection, + }); + expect(screen.queryByText("Collection")).toBeInTheDocument(); + }); + + it("shows Database for databases", async () => { + await setup({ + model: "database", + }); + expect(screen.queryByText("Database")).toBeInTheDocument(); + }); + + it("shows segment's table name", async () => { + await setup({ + model: "segment", + table_id: 1, + database_id: 1, + }); + + await waitFor(() => screen.queryByText("Table Name")); + expect(screen.queryByText("Segment of")).toHaveTextContent( + "Segment of Table Name", + ); + }); + + it("shows metric's table name", async () => { + await setup({ + model: "metric", + table_id: 1, + database_id: 1, + }); + + await waitFor(() => screen.queryByText("Table Name")); + expect(screen.queryByText("Metric for")).toHaveTextContent( + "Metric for Table Name", + ); + }); + + it("shows table's schema", async () => { + await setup({ + model: "table", + table_id: 1, + database_id: 1, + }); + + await waitFor(() => screen.queryByText("Database Name")); + expect(screen.queryByText("Table in")).toHaveTextContent( + "Table in Database Name", + ); + }); + + it("shows pulse's collection", async () => { + await setup({ + model: "pulse", + getCollection: () => collection, + }); + + expect(screen.queryByText("Pulse in")).toHaveTextContent( + "Pulse in Collection Name", + ); + }); + + it("shows dashboard's collection", async () => { + await setup({ + model: "dashboard", + getCollection: () => collection, + }); + + expect(screen.queryByText("Dashboard in")).toHaveTextContent( + "Dashboard in Collection Name", + ); + }); +}); diff --git a/frontend/src/metabase/search/components/SearchResult.info.js b/frontend/src/metabase/search/components/SearchResult.info.js index f26f351fe3ac..1ecda5888c1c 100644 --- a/frontend/src/metabase/search/components/SearchResult.info.js +++ b/frontend/src/metabase/search/components/SearchResult.info.js @@ -5,11 +5,16 @@ export const category = "search"; export const description = `Displays search results w/ optional context in typeahead and on the search results page`; +const DEMO_URL = "/_internal/components/searchresult"; + const COLLECTION_EXAMPLE = { model: "collection", id: 1, name: "Revenue", - getIcon: () => "folder", + getIcon: () => ({ name: "folder" }), + getUrl: () => DEMO_URL, + getCollection: () => {}, + collection: {}, }; const DASHBOARD_EXAMPLE = { @@ -22,7 +27,9 @@ const DASHBOARD_EXAMPLE = { id: "root", name: "Our analytics", }, - getIcon: () => "dashboard", + getIcon: () => ({ name: "dashboard" }), + getUrl: () => DEMO_URL, + getCollection: () => COLLECTION_EXAMPLE, }; const QUESTION_EXAMPLE = { @@ -30,7 +37,9 @@ const QUESTION_EXAMPLE = { id: 1, name: "Revenue by region", collection: COLLECTION_EXAMPLE, - getIcon: () => "table", + getIcon: () => ({ name: "table" }), + getUrl: () => DEMO_URL, + getCollection: () => COLLECTION_EXAMPLE, }; const LONG_TITLE_DASHBOARD_EXAMPLE = { @@ -41,10 +50,12 @@ const LONG_TITLE_DASHBOARD_EXAMPLE = { const QUESTION_CONTEXT_EXAMPLE = { ...QUESTION_EXAMPLE, name: "Poorly named item", - context: { - match: "description", - content: "This is actually about Revenue", - }, + context: [ + { + match: "description", + content: "This is actually about Revenue", + }, + ], }; export const examples = { diff --git a/frontend/src/metabase/search/components/SearchResult.jsx b/frontend/src/metabase/search/components/SearchResult.jsx index 27c5c8e0e0f3..3fdd00de2050 100644 --- a/frontend/src/metabase/search/components/SearchResult.jsx +++ b/frontend/src/metabase/search/components/SearchResult.jsx @@ -1,229 +1,92 @@ /* eslint-disable react/prop-types */ import React from "react"; import { Box, Flex } from "grid-styled"; -import styled from "styled-components"; -import { t, jt } from "ttag"; -import * as Urls from "metabase/lib/urls"; -import { color, lighten } from "metabase/lib/colors"; -import { capitalize } from "metabase/lib/formatting"; +import { color } from "metabase/lib/colors"; import Icon from "metabase/components/Icon"; -import Link from "metabase/components/Link"; import Text from "metabase/components/type/Text"; -import Schema from "metabase/entities/schemas"; -import Database from "metabase/entities/databases"; -import Table from "metabase/entities/tables"; +import { PLUGIN_COLLECTIONS, PLUGIN_MODERATION } from "metabase/plugins"; -function getColorForIconWrapper(props) { - if (props.item.collection_position) { - return color("warning"); - } - switch (props.type) { - case "collection": - return lighten("brand", 0.35); - default: - return color("brand"); - } -} - -const IconWrapper = styled.div` - display: flex; - align-items: center; - justify-content: center; - width: 32px; - height: 32px; - color: ${getColorForIconWrapper}; - margin-right: 10px; - flex-shrink: 0; -`; +import { + IconWrapper, + ResultLink, + Title, + TitleWrapper, + Description, + ContextText, + ContextContainer, +} from "./SearchResult.styled"; +import { InfoText } from "./InfoText"; -const ResultLink = styled(Link)` - display: block; - background-color: transparent; - min-height: ${props => (props.compact ? "36px" : "54px")}; - padding-top: 8px; - padding-bottom: 8px; - padding-left: 14px; - padding-right: ${props => (props.compact ? "20px" : "32px")}; +const DEFAULT_ICON_SIZE = 20; - &:hover { - background-color: ${lighten("brand", 0.63)}; - - h3 { - color: ${color("brand")}; - } - } - - ${Link} { - text-underline-position: under; - text-decoration: underline ${color("text-light")}; - text-decoration-style: dashed; - &:hover { - color: ${color("brand")}; - text-decoration-color: ${color("brand")}; - } - } +function TableIcon() { + return ; +} - ${Text} { - margin-top: 0; - margin-bottom: 0; - font-size: 13px; - line-height: 19px; +function CollectionIcon({ item }) { + const iconProps = { ...item.getIcon() }; + const isRegular = PLUGIN_COLLECTIONS.isRegularCollection(item.collection); + if (isRegular) { + iconProps.size = DEFAULT_ICON_SIZE; + } else { + iconProps.width = 20; + iconProps.height = 24; } + return ; +} - h3 { - font-size: ${props => (props.compact ? "14px" : "16px")}; - line-height: 1.2em; - word-wrap: break-word; - margin-bottom: 0; - } +const ModelIconComponentMap = { + table: TableIcon, + collection: CollectionIcon, +}; - .Icon-info { - color: ${color("text-light")}; - } -`; +function DefaultIcon({ item }) { + return ; +} -function ItemIcon({ item, type }) { +export function ItemIcon({ item, type }) { + const IconComponent = ModelIconComponentMap[type] || DefaultIcon; return ( - {type === "table" ? ( - - ) : ( - - )} + ); } -const CollectionLink = styled(Link)` - text-decoration: dashed; - &:hover { - color: ${color("brand")}; - } -`; - -function CollectionBadge({ collection }) { - return ( - - {collection.name} - - ); -} - -const Title = styled("h3")` - margin-bottom: 4px; -`; - function Score({ scores }) { return (
    {JSON.stringify(scores, null, 2)}
    ); } -const Context = styled("p")` - line-height: 1.4em; - color: ${color("text-medium")}; - margin-top: 0; -`; -function formatContext(context, compact) { +function Context({ context }) { + if (!context) { + return null; + } + return ( - !compact && - context && ( - - {contextText(context)} - - ) + + + {context.map(({ is_match, text }, i) => { + if (!is_match) { + return {text}; + } + + return ( + + {" "} + {text} + + ); + })} + + ); } -function formatCollection(collection) { - return collection.id && ; -} - -const Description = styled(Text)` - padding-left: 8px; - margin-top: 6px !important; - border-left: 2px solid ${lighten("brand", 0.45)}; -`; - -function contextText(context) { - return context.map(function({ is_match, text }, i) { - if (is_match) { - return ( - - {" "} - {text} - - ); - } else { - return {text}; - } - }); -} - -function InfoText({ result }) { - const collection = result.getCollection(); - switch (result.model) { - case "card": - return jt`Saved question in ${formatCollection(collection)}`; - case "collection": - return t`Collection`; - case "database": - return t`Database`; - case "table": - return ( - - {jt`Table in ${( - - {" "} - {result.table_schema && ( - - {({ list }) => - list && list.length > 1 ? ( - - - {/* we have to do some {} manipulation here to make this look like the table object that browseSchema was written for originally */} - - {result.table_schema} - - - ) : null - } - - )} - - )}`} - - ); - case "segment": - case "metric": - return ( - - {result.model === "segment" ? t`Segment of ` : t`Metric for `} - - - {({ table }) => - table ? {table.display_name} : null - } - - - - ); - default: - return jt`${capitalize(result.model)} in ${formatCollection(collection)}`; - } -} - export default function SearchResult({ result, compact }) { return ( - {result.name} + + {result.name} + + @@ -244,7 +113,7 @@ export default function SearchResult({ result, compact }) { - {formatContext(result.context, compact)} + {compact || } ); } diff --git a/frontend/src/metabase/search/components/SearchResult.styled.jsx b/frontend/src/metabase/search/components/SearchResult.styled.jsx new file mode 100644 index 000000000000..b2af5acc4d65 --- /dev/null +++ b/frontend/src/metabase/search/components/SearchResult.styled.jsx @@ -0,0 +1,99 @@ +import styled from "styled-components"; + +import { color, lighten } from "metabase/lib/colors"; + +import Link from "metabase/components/Link"; +import Text from "metabase/components/type/Text"; +import { space } from "metabase/styled-components/theme"; + +function getColorForIconWrapper(props) { + if (props.item.collection_position) { + return color("saturated-yellow"); + } + return props.type === "collection" ? lighten("brand", 0.35) : color("brand"); +} + +export const IconWrapper = styled.div` + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + color: ${getColorForIconWrapper}; + margin-right: 10px; + flex-shrink: 0; +`; + +export const ResultLink = styled(Link)` + display: block; + background-color: transparent; + min-height: ${props => (props.compact ? "36px" : "54px")}; + padding-top: ${space(1)}; + padding-bottom: ${space(1)}; + padding-left: 14px; + padding-right: ${props => (props.compact ? "20px" : space(3))}; + + &:hover { + background-color: ${lighten("brand", 0.63)}; + + h3 { + color: ${color("brand")}; + } + } + + ${Link} { + text-underline-position: under; + text-decoration: underline ${color("text-light")}; + text-decoration-style: dashed; + &:hover { + color: ${color("brand")}; + text-decoration-color: ${color("brand")}; + } + } + + ${Text} { + margin-top: 0; + margin-bottom: 0; + font-size: 13px; + line-height: 19px; + } + + h3 { + font-size: ${props => (props.compact ? "14px" : "16px")}; + line-height: 1.2em; + word-wrap: break-word; + margin-bottom: 0; + } + + .Icon-info { + color: ${color("text-light")}; + } +`; + +export const TitleWrapper = styled.div` + display: flex; + grid-gap: 0.25rem; + align-items: center; +`; + +export const ContextText = styled("p")` + line-height: 1.4em; + color: ${color("text-medium")}; + margin-top: 0; +`; + +export const Title = styled("h3")` + margin-bottom: 4px; +`; + +export const Description = styled(Text)` + padding-left: ${space(1)}; + margin-top: ${space(1)} !important; + border-left: 2px solid ${lighten("brand", 0.45)}; +`; + +export const ContextContainer = styled.div` + margin-left: 42px; + margin-top: 12px; + max-width: 620px; +`; diff --git a/frontend/src/metabase/search/components/SearchResult.unit.spec.js b/frontend/src/metabase/search/components/SearchResult.unit.spec.js new file mode 100644 index 000000000000..4ee2148b19f8 --- /dev/null +++ b/frontend/src/metabase/search/components/SearchResult.unit.spec.js @@ -0,0 +1,95 @@ +/* eslint-disable react/prop-types */ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import { PLUGIN_COLLECTIONS } from "metabase/plugins"; +import SearchResult from "./SearchResult"; + +function collection({ + id = 1, + name = "Marketing", + authority_level = null, + getIcon = () => ({ name: "folder" }), + getUrl = () => `/collection/${id}`, + getCollection = () => {}, +} = {}) { + const collection = { + id, + name, + authority_level, + getIcon, + getUrl, + getCollection, + model: "collection", + }; + collection.collection = collection; + return collection; +} + +describe("SearchResult > Collections", () => { + const regularCollection = collection(); + + describe("OSS", () => { + const officialCollection = collection({ + authority_level: "official", + }); + + it("renders regular collection correctly", () => { + render(); + expect(screen.queryByText(regularCollection.name)).toBeInTheDocument(); + expect(screen.queryByText("Collection")).toBeInTheDocument(); + expect(screen.queryByLabelText("folder icon")).toBeInTheDocument(); + expect(screen.queryByLabelText("badge icon")).toBeNull(); + }); + + it("renders official collections as regular", () => { + render(); + expect(screen.queryByText(regularCollection.name)).toBeInTheDocument(); + expect(screen.queryByText("Collection")).toBeInTheDocument(); + expect(screen.queryByLabelText("folder icon")).toBeInTheDocument(); + expect(screen.queryByLabelText("badge icon")).toBeNull(); + }); + }); + + describe("EE", () => { + const officialCollection = collection({ + authority_level: "official", + getIcon: () => ({ name: "badge" }), + }); + + const ORIGINAL_COLLECTIONS_PLUGIN = { ...PLUGIN_COLLECTIONS }; + + beforeAll(() => { + PLUGIN_COLLECTIONS.isRegularCollection = c => !c.authority_level; + PLUGIN_COLLECTIONS.AUTHORITY_LEVEL = { + ...ORIGINAL_COLLECTIONS_PLUGIN.AUTHORITY_LEVEL, + official: { + name: "Official", + icon: "badge", + }, + }; + }); + + afterAll(() => { + PLUGIN_COLLECTIONS.isRegularCollection = + ORIGINAL_COLLECTIONS_PLUGIN.isRegularCollection; + PLUGIN_COLLECTIONS.AUTHORITY_LEVEL = + ORIGINAL_COLLECTIONS_PLUGIN.AUTHORITY_LEVEL; + }); + + it("renders regular collection correctly", () => { + render(); + expect(screen.queryByText(regularCollection.name)).toBeInTheDocument(); + expect(screen.queryByText("Collection")).toBeInTheDocument(); + expect(screen.queryByLabelText("folder icon")).toBeInTheDocument(); + expect(screen.queryByLabelText("badge icon")).toBeNull(); + }); + + it("renders official collections correctly", () => { + render(); + expect(screen.queryByText(regularCollection.name)).toBeInTheDocument(); + expect(screen.queryByText("Official Collection")).toBeInTheDocument(); + expect(screen.queryByLabelText("badge icon")).toBeInTheDocument(); + expect(screen.queryByLabelText("folder icon")).toBeNull(); + }); + }); +}); diff --git a/frontend/src/metabase/selectors/metadata.js b/frontend/src/metabase/selectors/metadata.js index 96158bc80f3f..d52e02231479 100644 --- a/frontend/src/metabase/selectors/metadata.js +++ b/frontend/src/metabase/selectors/metadata.js @@ -13,7 +13,7 @@ import Metric from "metabase-lib/lib/metadata/Metric"; import Segment from "metabase-lib/lib/metadata/Segment"; import _ from "underscore"; -import { shallowEqual } from "recompose"; +import shallowEqual from "./shallowEqual"; import { getFieldValues, getRemappings } from "metabase/lib/query/field"; import { getIn } from "icepick"; @@ -22,10 +22,14 @@ import { getIn } from "icepick"; export const getNormalizedDatabases = state => state.entities.databases; export const getNormalizedSchemas = state => state.entities.schemas; const getNormalizedTablesUnfiltered = state => state.entities.tables; +const getIncludeHiddenTables = (_state, props) => props?.includeHiddenTables; export const getNormalizedTables = createSelector( - [getNormalizedTablesUnfiltered], + [getNormalizedTablesUnfiltered, getIncludeHiddenTables], // remove hidden tables from the metadata graph - tables => filterValues(tables, table => table.visibility_type == null), + (tables, includeHiddenTables) => + includeHiddenTables + ? tables + : filterValues(tables, table => table.visibility_type == null), ); const getNormalizedFieldsUnfiltered = state => state.entities.fields; @@ -68,6 +72,13 @@ export const getShallowFields = getNormalizedFields; export const getShallowMetrics = getNormalizedMetrics; export const getShallowSegments = getNormalizedSegments; +export const instantiateDatabase = obj => new Database(obj); +export const instantiateSchema = obj => new Schema(obj); +export const instantiateTable = obj => new Table(obj); +export const instantiateField = obj => new Field(obj); +export const instantiateSegment = obj => new Segment(obj); +export const instantiateMetric = obj => new Metric(obj); + // fully connected graph of all databases, tables, fields, segments, and metrics // TODO: do this lazily using ES6 Proxies export const getMetadata = createSelector( @@ -81,15 +92,13 @@ export const getMetadata = createSelector( ], (databases, schemas, tables, fields, segments, metrics): Metadata => { const meta = new Metadata(); - meta.databases = copyObjects(meta, databases, Database); - meta.schemas = copyObjects(meta, schemas, Schema); - meta.tables = copyObjects(meta, tables, Table); - meta.fields = copyObjects(meta, fields, Field); - meta.segments = copyObjects(meta, segments, Segment); - meta.metrics = copyObjects(meta, metrics, Metric); - - // database - hydrateList(meta.databases, "tables", meta.tables); + meta.databases = copyObjects(meta, databases, instantiateDatabase); + meta.schemas = copyObjects(meta, schemas, instantiateSchema); + meta.tables = copyObjects(meta, tables, instantiateTable); + meta.fields = copyObjects(meta, fields, instantiateField); + meta.segments = copyObjects(meta, segments, instantiateSegment); + meta.metrics = copyObjects(meta, metrics, instantiateMetric); + // schema hydrate(meta.schemas, "database", s => meta.database(s.database)); // table @@ -99,6 +108,16 @@ export const getMetadata = createSelector( hydrate(meta.tables, "db", t => meta.database(t.db_id || t.db)); hydrate(meta.tables, "schema", t => meta.schema(t.schema)); + hydrate(meta.databases, "tables", database => { + if (database.tables?.length > 0) { + return database.tables.map(tableId => meta.table(tableId)); + } + + return Object.values(meta.tables).filter( + table => table.db_id === database.id, + ); + }); + // NOTE: special handling for schemas // This is pretty hacky // hydrateList(meta.databases, "schemas", meta.schemas); @@ -260,11 +279,11 @@ export const makeGetMergedParameterFieldValues = () => { // UTILS: // clone each object in the provided mapping of objects -export function copyObjects(metadata, objects, Klass) { +export function copyObjects(metadata, objects, instantiate) { const copies = {}; for (const object of Object.values(objects)) { if (object && object.id != null) { - copies[object.id] = new Klass(object); + copies[object.id] = instantiate(object); copies[object.id].metadata = metadata; } else { console.warn("Missing id:", object); diff --git a/frontend/src/metabase/selectors/shallowEqual.js b/frontend/src/metabase/selectors/shallowEqual.js new file mode 100644 index 000000000000..5e64178a8549 --- /dev/null +++ b/frontend/src/metabase/selectors/shallowEqual.js @@ -0,0 +1,76 @@ +/** + * NOTE: + * Copied directly from `https://github.com/acdlite/recompose/blob/master/src/packages/recompose/shallowEqual.js` + * as a temporary solution until we find an alternative to this utility. It was the only thing blocking the complete removal of `recompose` lib. + * + * Please see: https://github.com/metabase/metabase/pull/16829. + * + * Copyright (c) 2013-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @providesModule shallowEqual + * @typechecks + */ + +/* eslint-disable no-self-compare */ + +const hasOwnProperty = Object.prototype.hasOwnProperty; + +/** + * inlined Object.is polyfill to avoid requiring consumers ship their own + * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is + */ +function is(x, y) { + // SameValue algorithm + if (x === y) { + // Steps 1-5, 7-10 + // Steps 6.b-6.e: +0 != -0 + // Added the nonzero y check to make Flow happy, but it is redundant + return x !== 0 || y !== 0 || 1 / x === 1 / y; + } + // Step 6.a: NaN == NaN + return x !== x && y !== y; +} + +/** + * Performs equality by iterating through keys on an object and returning false + * when any key has values which are not strictly equal between the arguments. + * Returns true when the values of all keys are strictly equal. + */ +function shallowEqual(objA, objB) { + if (is(objA, objB)) { + return true; + } + + if ( + typeof objA !== "object" || + objA === null || + typeof objB !== "object" || + objB === null + ) { + return false; + } + + const keysA = Object.keys(objA); + const keysB = Object.keys(objB); + + if (keysA.length !== keysB.length) { + return false; + } + + // Test for A's keys different from B. + for (let i = 0; i < keysA.length; i++) { + if ( + !hasOwnProperty.call(objB, keysA[i]) || + !is(objA[keysA[i]], objB[keysA[i]]) + ) { + return false; + } + } + + return true; +} + +export default shallowEqual; diff --git a/frontend/src/metabase/selectors/user.js b/frontend/src/metabase/selectors/user.js index 07f75ae45e34..50a2dc7317af 100644 --- a/frontend/src/metabase/selectors/user.js +++ b/frontend/src/metabase/selectors/user.js @@ -2,6 +2,11 @@ import { createSelector } from "reselect"; export const getUser = state => state.currentUser; +export const getUserId = createSelector( + [getUser], + user => user?.id, +); + export const getUserIsAdmin = createSelector( [getUser], user => (user && user.is_superuser) || false, diff --git a/frontend/src/metabase/selectors/user.unit.spec.js b/frontend/src/metabase/selectors/user.unit.spec.js new file mode 100644 index 000000000000..9bde3e13bdb5 --- /dev/null +++ b/frontend/src/metabase/selectors/user.unit.spec.js @@ -0,0 +1,23 @@ +import { getUserIsAdmin } from "./user"; + +describe("metabase/selectors/user", () => { + it("should return true if user is an admin", () => { + const state = { + currentUser: { + is_superuser: true, + }, + }; + + expect(getUserIsAdmin(state)).toBe(true); + }); + + it("should return false if user is not an admin", () => { + const state = { + currentUser: { + is_superuser: false, + }, + }; + + expect(getUserIsAdmin(state)).toBe(false); + }); +}); diff --git a/frontend/src/metabase/services.js b/frontend/src/metabase/services.js index b9307d604b68..5cfc8f978773 100644 --- a/frontend/src/metabase/services.js +++ b/frontend/src/metabase/services.js @@ -231,6 +231,7 @@ export const MetabaseApi = { db_sync_schema: POST("/api/database/:dbId/sync_schema"), db_rescan_values: POST("/api/database/:dbId/rescan_values"), db_discard_values: POST("/api/database/:dbId/discard_values"), + db_get_db_ids_with_deprecated_drivers: GET("/db-ids-with-deprecated-drivers"), table_list: GET("/api/table"), // table_get: GET("/api/table/:tableId"), table_update: PUT("/api/table/:id"), @@ -307,24 +308,29 @@ export const MetabaseApi = { }), }; +export const ModerationReviewApi = { + create: POST("/api/moderation-review"), + update: PUT("/api/moderation-review/:id"), +}; + export const PulseApi = { list: GET("/api/pulse"), create: POST("/api/pulse"), get: GET("/api/pulse/:pulseId"), update: PUT("/api/pulse/:id"), - delete: DELETE("/api/pulse/:pulseId"), test: POST("/api/pulse/test"), form_input: GET("/api/pulse/form_input"), preview_card: GET("/api/pulse/preview_card_info/:id"), + unsubscribe: DELETE("/api/pulse/:id/subscription"), }; export const AlertApi = { list: GET("/api/alert"), list_for_question: GET("/api/alert/question/:questionId"), + get: GET("/api/alert/:id"), create: POST("/api/alert"), update: PUT("/api/alert/:id"), - delete: DELETE("/api/alert/:id"), - unsubscribe: PUT("/api/alert/:id/unsubscribe"), + unsubscribe: DELETE("/api/alert/:id/subscription"), }; export const SegmentApi = { @@ -470,5 +476,3 @@ function setParamsEndpoints(prefix: string) { prefix + "/dashboard/:dashId/params/:paramId/search/:query", ); } - -global.services = exports; diff --git a/frontend/src/metabase/setup/components/Setup.jsx b/frontend/src/metabase/setup/components/Setup.jsx index 18ec4319a8e9..47d360d0a633 100644 --- a/frontend/src/metabase/setup/components/Setup.jsx +++ b/frontend/src/metabase/setup/components/Setup.jsx @@ -8,6 +8,7 @@ import MetabaseAnalytics from "metabase/lib/analytics"; import MetabaseSettings from "metabase/lib/settings"; import AddDatabaseHelpCard from "metabase/components/AddDatabaseHelpCard"; +import DriverWarning from "metabase/components/DriverWarning"; import ExternalLink from "metabase/components/ExternalLink"; import LogoIcon from "metabase/components/LogoIcon"; import NewsletterForm from "metabase/components/NewsletterForm"; @@ -212,6 +213,11 @@ export default class Setup extends Component { backgroundColor: color("white"), }} /> +
    ); diff --git a/frontend/src/metabase/setup/components/Setup.styled.js b/frontend/src/metabase/setup/components/Setup.styled.jsx similarity index 100% rename from frontend/src/metabase/setup/components/Setup.styled.js rename to frontend/src/metabase/setup/components/Setup.styled.jsx diff --git a/frontend/src/metabase/sharing/components/AddEditSidebar.jsx b/frontend/src/metabase/sharing/components/AddEditSidebar.jsx index e8876bf48d88..89674b075e85 100644 --- a/frontend/src/metabase/sharing/components/AddEditSidebar.jsx +++ b/frontend/src/metabase/sharing/components/AddEditSidebar.jsx @@ -15,7 +15,7 @@ import Text from "metabase/components/type/Text"; import ModalWithTrigger from "metabase/components/ModalWithTrigger"; import RecipientPicker from "metabase/pulse/components/RecipientPicker"; import SchedulePicker from "metabase/components/SchedulePicker"; -import SendTestEmail from "metabase/components/SendTestEmail"; +import SendTestPulse from "metabase/components/SendTestPulse"; import Sidebar from "metabase/dashboard/components/Sidebar"; import Toggle from "metabase/components/Toggle"; import Select, { Option } from "metabase/components/Select"; @@ -56,6 +56,7 @@ export const AddEditSlackSidebar = connect(mapStateToProps)( function _AddEditEmailSidebar({ pulse, formInput, + formError, channel, channelSpec, users, @@ -76,10 +77,10 @@ function _AddEditEmailSidebar({ }) { return (
    @@ -125,10 +126,13 @@ function _AddEditEmailSidebar({ } />
    -
    {PLUGIN_DASHBOARD_SUBSCRIPTION_PARAMETERS_SECTION_OVERRIDE.Component ? ( @@ -184,6 +188,7 @@ function _AddEditEmailSidebar({ _AddEditEmailSidebar.propTypes = { pulse: PropTypes.object.isRequired, formInput: PropTypes.object.isRequired, + formError: PropTypes.object, channel: PropTypes.object.isRequired, channelSpec: PropTypes.object.isRequired, users: PropTypes.array, @@ -266,6 +271,7 @@ function getConfirmItems(pulse) { function _AddEditSlackSidebar({ pulse, formInput, + formError, channel, channelSpec, parameters, @@ -276,16 +282,17 @@ function _AddEditSlackSidebar({ onCancel, onChannelPropertyChange, onChannelScheduleChange, + testPulse, toggleSkipIfEmpty, handleArchive, setPulseParameters, }) { return (
    @@ -317,6 +324,16 @@ function _AddEditSlackSidebar({ onChannelScheduleChange(newSchedule, changedProp) } /> +
    + +
    {PLUGIN_DASHBOARD_SUBSCRIPTION_PARAMETERS_SECTION_OVERRIDE.Component ? ( { + if (!includeAttachment) { + this.disableAllCards(); + } + this.setState({ isEnabled: includeAttachment }); }; @@ -160,6 +164,12 @@ export default class EmailAttachmentPicker extends Component { }); }; + disableAllCards() { + const selectedCardIds = new Set(); + this.updatePulseCards(this.state.selectedAttachmentType, selectedCardIds); + this.setState({ selectedCardIds }); + } + areAllSelected(allCards, selectedCardSet) { return allCards.length === selectedCardSet.size; } @@ -174,49 +184,52 @@ export default class EmailAttachmentPicker extends Component { return (
    - + {isEnabled && (
    -
    -
      -
    • +
        +
      • - {t`Questions to attach`}
      • {cards.map(card => (
      • { - this.onToggleCard(card); - }} > { + this.onToggleCard(card); + }} className="mr1" /> - {card.name}
      • ))}
      diff --git a/frontend/src/metabase/sharing/components/EmailAttachmentPicker.unit.spec.js b/frontend/src/metabase/sharing/components/EmailAttachmentPicker.unit.spec.js new file mode 100644 index 000000000000..c7ff3b12a4db --- /dev/null +++ b/frontend/src/metabase/sharing/components/EmailAttachmentPicker.unit.spec.js @@ -0,0 +1,197 @@ +import React from "react"; +import { render, fireEvent, screen } from "@testing-library/react"; + +import EmailAttachmentPicker from "./EmailAttachmentPicker"; + +describe("EmailAttachmentPicker", () => { + describe("when instantiated without any cards with attachments", () => { + let pulse; + let setPulse; + beforeEach(() => { + pulse = createPulse(); + setPulse = jest.fn(); + render( + , + ); + }); + + it("should have a Toggle that is not toggled", () => { + const toggle = screen.getByLabelText("Attach results"); + expect(toggle).toBeInTheDocument(); + expect(toggle).toHaveAttribute("aria-checked", "false"); + }); + + it("should have a clickable toggle that reveals attachment type and a checkbox per question", () => { + expect(screen.queryByText("File format")).toBeNull(); + expect(screen.queryByText("Questions to attach")).toBeNull(); + expect(screen.queryByText("card1")).toBeNull(); + expect(screen.queryByText("card2")).toBeNull(); + + const toggle = screen.getByLabelText("Attach results"); + fireEvent.click(toggle); + + const csvFormatInput = screen.getByLabelText(".csv"); + expect(csvFormatInput).toBeChecked(); + + const toggleAllCheckbox = screen.getByLabelText("Questions to attach"); + expect(toggleAllCheckbox).not.toBeChecked(); + + const card1Checkbox = screen.getByLabelText("card1"); + expect(card1Checkbox).not.toBeChecked(); + + const card2Checkbox = screen.getByLabelText("card2"); + expect(card2Checkbox).not.toBeChecked(); + }); + }); + + describe("when instantiated with cards with attachments", () => { + let pulse; + let setPulse; + beforeEach(() => { + pulse = createPulse(); + pulse.cards[0]["include_xls"] = true; + setPulse = jest.fn(); + render( + , + ); + }); + + it("should have a toggled Toggle", () => { + const toggle = screen.getByLabelText("Attach results"); + expect(toggle).toBeInTheDocument(); + expect(toggle).toHaveAttribute("aria-checked", "true"); + }); + + it("should have selected the xlsv format", () => { + const csvFormatInput = screen.getByLabelText(".csv"); + expect(csvFormatInput).not.toBeChecked(); + const xlsxFormatInput = screen.getByLabelText(".xlsx"); + expect(xlsxFormatInput).toBeChecked(); + }); + + it("should show a checked checkbox for the card with an attachment", () => { + const toggleAllCheckbox = screen.getByLabelText("Questions to attach"); + expect(toggleAllCheckbox).not.toBeChecked(); + + const card1Checkbox = screen.getByLabelText("card1"); + expect(card1Checkbox).toBeChecked(); + + const card2Checkbox = screen.getByLabelText("card2"); + expect(card2Checkbox).not.toBeChecked(); + }); + + it("should let you check or uncheck card checkboxes", () => { + const card1Checkbox = screen.getByLabelText("card1"); + fireEvent.click(card1Checkbox); + expect(card1Checkbox).not.toBeChecked(); + fireEvent.click(card1Checkbox); + expect(card1Checkbox).toBeChecked(); + }); + + it("should let you check all checkboxes", () => { + const card2Checkbox = screen.getByLabelText("card2"); + fireEvent.click(card2Checkbox); + expect(card2Checkbox).toBeChecked(); + expect(screen.getByLabelText("Questions to attach")).toBeChecked(); + }); + + it("should let you check/uncheck all boxes via the `Questions to attach` toggle", () => { + const toggleAllCheckbox = screen.getByLabelText("Questions to attach"); + const card1Checkbox = screen.getByLabelText("card1"); + const card2Checkbox = screen.getByLabelText("card2"); + + fireEvent.click(toggleAllCheckbox); + + expect(screen.getByLabelText("Questions to attach")).toBeChecked(); + expect(card1Checkbox).toBeChecked(); + expect(card2Checkbox).toBeChecked(); + + fireEvent.click(toggleAllCheckbox); + + expect(screen.getByLabelText("Questions to attach")).not.toBeChecked(); + expect(card1Checkbox).not.toBeChecked(); + expect(card2Checkbox).not.toBeChecked(); + }); + + it("should uncheck all boxes if disabling attachments", () => { + const toggle = screen.getByLabelText("Attach results"); + expect(screen.getByLabelText("card1")).toBeChecked(); + + fireEvent.click(toggle); + + expect(screen.queryByText("File format")).toBeNull(); + expect(screen.queryByText("Questions to attach")).toBeNull(); + expect(screen.queryByText("card1")).toBeNull(); + expect(screen.queryByText("card2")).toBeNull(); + + fireEvent.click(toggle); + expect(screen.getByLabelText("card1")).not.toBeChecked(); + }); + }); +}); + +function createPulse() { + return { + name: "Parameters", + cards: [ + { + id: 4, + collection_id: null, + description: null, + display: "map", + name: "card1", + include_csv: false, + include_xls: false, + dashboard_card_id: 3, + dashboard_id: 1, + parameter_mappings: [], + }, + { + id: 6, + collection_id: null, + description: null, + display: "scalar", + name: "card2", + include_csv: false, + include_xls: false, + dashboard_card_id: 4, + dashboard_id: 1, + parameter_mappings: [], + }, + ], + channels: [ + { + channel_type: "email", + enabled: true, + recipients: [], + details: {}, + schedule_type: "hourly", + schedule_day: "mon", + schedule_hour: 8, + schedule_frame: "first", + }, + { + channel_type: "email", + enabled: true, + recipients: [], + details: {}, + schedule_type: "hourly", + schedule_day: "mon", + schedule_hour: 8, + schedule_frame: "first", + }, + ], + skip_if_empty: false, + collection_id: null, + parameters: [], + dashboard_id: 1, + }; +} diff --git a/frontend/src/metabase/sharing/components/PulsesListSidebar.jsx b/frontend/src/metabase/sharing/components/PulsesListSidebar.jsx index a1fddd4e4dad..880b2ee88442 100644 --- a/frontend/src/metabase/sharing/components/PulsesListSidebar.jsx +++ b/frontend/src/metabase/sharing/components/PulsesListSidebar.jsx @@ -2,6 +2,7 @@ import React from "react"; import PropTypes from "prop-types"; +import cx from "classnames"; import { connect } from "react-redux"; import _ from "underscore"; import { t, ngettext, msgid } from "ttag"; @@ -29,6 +30,7 @@ export const PulsesListSidebar = connect(mapStateToProps)(_PulsesListSidebar); function _PulsesListSidebar({ pulses, + formInput, createSubscription, onCancel, editPulse, @@ -59,33 +61,46 @@ function _PulsesListSidebar({
    - {pulses.map(pulse => ( - editPulse(pulse, pulse.channels[0].channel_type)} - > -
    -
    - - + {pulses.map(pulse => { + const canEdit = canEditPulse(pulse, formInput); + + return ( + + canEdit && editPulse(pulse, pulse.channels[0].channel_type) + } + > +
    +
    + + +
    +
    - -
    - - ))} + + ); + })}
    ); @@ -93,12 +108,22 @@ function _PulsesListSidebar({ _PulsesListSidebar.propTypes = { pulses: PropTypes.array.isRequired, + formInput: PropTypes.object.isRequired, createSubscription: PropTypes.func.isRequired, onCancel: PropTypes.func.isRequired, editPulse: PropTypes.func.isRequired, parameters: PropTypes.array.isRequired, }; +function canEditPulse(pulse, formInput) { + switch (pulse.channels[0].channel_type) { + case "email": + return formInput.channels.email != null; + case "slack": + return formInput.channels.slack != null; + } +} + function buildRecipientText(pulse) { const { channels: [firstChannel], diff --git a/frontend/src/metabase/sharing/components/SharingSidebar.jsx b/frontend/src/metabase/sharing/components/SharingSidebar.jsx index 8607810a12d4..196d051e2b7b 100644 --- a/frontend/src/metabase/sharing/components/SharingSidebar.jsx +++ b/frontend/src/metabase/sharing/components/SharingSidebar.jsx @@ -11,7 +11,6 @@ import { AddEditEmailSidebar, } from "metabase/sharing/components/AddEditSidebar"; import Sidebar from "metabase/dashboard/components/Sidebar"; -import Collections from "metabase/entities/collections"; import Pulses from "metabase/entities/pulses"; import User from "metabase/entities/users"; import { normalizeParameterValue } from "metabase/meta/Parameter"; @@ -22,18 +21,14 @@ import { cleanPulse, createChannel, getPulseParameters, + NEW_PULSE_TEMPLATE, } from "metabase/lib/pulse"; -import { - getPulseId, - getEditingPulse, - getPulseFormInput, -} from "metabase/pulse/selectors"; +import { getEditingPulse, getPulseFormInput } from "metabase/pulse/selectors"; import { getUser } from "metabase/selectors/user"; import { - setEditingPulse, updateEditingPulse, saveEditingPulse, fetchPulseFormInput, @@ -98,18 +93,12 @@ const getEditingPulseWithDefaults = (state, props) => { }; const mapStateToProps = (state, props) => ({ - pulseId: getPulseId(state, props), pulse: getEditingPulseWithDefaults(state, props), formInput: getPulseFormInput(state, props), user: getUser(state), - initialCollectionId: Collections.selectors.getInitialCollectionId( - state, - props, - ), }); const mapDispatchToProps = { - setEditingPulse, updateEditingPulse, saveEditingPulse, fetchPulseFormInput, @@ -130,6 +119,8 @@ class SharingSidebar extends React.Component { editingMode: "list-pulses", // use this to know where to go "back" to returnMode: [], + isSaving: false, + formError: null, }; static propTypes = { @@ -138,9 +129,7 @@ class SharingSidebar extends React.Component { formInput: PropTypes.object.isRequired, initialCollectionId: PropTypes.number, pulse: PropTypes.object.isRequired, - pulseId: PropTypes.number, saveEditingPulse: PropTypes.func.isRequired, - setEditingPulse: PropTypes.func.isRequired, testPulse: PropTypes.func.isRequired, updateEditingPulse: PropTypes.func.isRequired, pulses: PropTypes.array.isRequired, @@ -154,8 +143,8 @@ class SharingSidebar extends React.Component { this.props.updateEditingPulse(pulse); }; - addChannel = type => { - const { dashboard, pulse, formInput } = this.props; + setPulseWithChannel = type => { + const { dashboard, formInput } = this.props; const channelSpec = formInput.channels[type]; if (!channelSpec) { @@ -165,8 +154,8 @@ class SharingSidebar extends React.Component { const channel = createChannel(channelSpec); const newPulse = { - ...pulse, - channels: pulse.channels.concat(channel), + ...NEW_PULSE_TEMPLATE, + channels: [channel], cards: nonTextCardsFromDashboard(dashboard), }; this.setPulse(newPulse); @@ -174,11 +163,6 @@ class SharingSidebar extends React.Component { componentDidMount = async () => { await this.props.fetchPulseFormInput(); - - this.props.setEditingPulse( - this.props.pulseId, - this.props.initialCollectionId, - ); }; onChannelPropertyChange = (index, name, value) => { @@ -216,6 +200,11 @@ class SharingSidebar extends React.Component { handleSave = async () => { const { pulse, dashboard, formInput } = this.props; + const { isSaving } = this.state; + + if (isSaving) { + return; + } const cleanedPulse = cleanPulse(pulse, formInput.channels); cleanedPulse.name = dashboard.name; @@ -241,11 +230,16 @@ class SharingSidebar extends React.Component { }, ); - await this.props.updateEditingPulse(cleanedPulse); - - // The order below matters; it hides the "Done" button faster and prevents two pulses from being made if it's double-clicked - this.setState({ editingMode: "list-pulses", returnMode: [] }); - await this.props.saveEditingPulse(); + try { + this.setState({ isSaving: true, formError: null }); + await this.props.updateEditingPulse(cleanedPulse); + await this.props.saveEditingPulse(); + this.setState({ editingMode: "list-pulses", returnMode: [] }); + } catch (e) { + this.setState({ formError: e }); + } finally { + this.setState({ isSaving: false }); + } }; createSubscription = () => { @@ -255,8 +249,6 @@ class SharingSidebar extends React.Component { returnMode: returnMode.concat([editingMode]), }; }); - - this.props.setEditingPulse(null, null); }; editPulse = (pulse, channelType) => { @@ -291,7 +283,7 @@ class SharingSidebar extends React.Component { }; render() { - const { editingMode } = this.state; + const { editingMode, formError } = this.state; const { pulse, pulses, @@ -310,6 +302,7 @@ class SharingSidebar extends React.Component { return ( { @@ -428,7 +424,7 @@ class SharingSidebar extends React.Component { returnMode: returnMode.concat([editingMode]), }; }); - this.addChannel("slack"); + this.setPulseWithChannel("slack"); } }} /> diff --git a/frontend/src/metabase/static-viz/components/CategoricalAreaChart/CategoricalAreaChart.jsx b/frontend/src/metabase/static-viz/components/CategoricalAreaChart/CategoricalAreaChart.jsx new file mode 100644 index 000000000000..9d897853af4d --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalAreaChart/CategoricalAreaChart.jsx @@ -0,0 +1,150 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { AxisBottom, AxisLeft } from "@visx/axis"; +import { GridRows } from "@visx/grid"; +import { scaleBand, scaleLinear } from "@visx/scale"; +import { AreaClosed, LinePath } from "@visx/shape"; +import { Text } from "@visx/text"; +import { + getXTickWidth, + getXTickLabelProps, + getYTickLabelProps, + getYTickWidth, + getXTickHeight, +} from "../../lib/axes"; +import { formatNumber } from "../../lib/numbers"; +import { truncateText } from "../../lib/text"; + +const propTypes = { + data: PropTypes.array.isRequired, + accessors: PropTypes.shape({ + x: PropTypes.func.isRequired, + y: PropTypes.func.isRequired, + }).isRequired, + settings: PropTypes.shape({ + x: PropTypes.object, + y: PropTypes.object, + colors: PropTypes.object, + }), + labels: PropTypes.shape({ + left: PropTypes.string, + bottom: PropTypes.string, + }), +}; + +const layout = { + width: 540, + height: 300, + margin: { + top: 0, + left: 55, + right: 40, + bottom: 40, + }, + font: { + size: 11, + family: "Lato, sans-serif", + }, + colors: { + brand: "#509ee3", + textLight: "#b8bbc3", + textMedium: "#949aab", + }, + barPadding: 0.2, + labelPadding: 12, + maxTickWidth: 100, + areaOpacity: 0.2, + strokeDasharray: "4", +}; + +const CategoricalAreaChart = ({ data, accessors, settings, labels }) => { + const colors = settings?.colors; + const isVertical = data.length > 10; + const xTickWidth = getXTickWidth(data, accessors, layout.maxTickWidth); + const xTickHeight = getXTickHeight(xTickWidth); + const yTickWidth = getYTickWidth(data, accessors, settings); + const xLabelOffset = xTickHeight + layout.labelPadding + layout.font.size; + const yLabelOffset = yTickWidth + layout.labelPadding; + const xMin = yLabelOffset + layout.font.size * 1.5; + const xMax = layout.width - layout.margin.right; + const yMin = isVertical ? xLabelOffset : layout.margin.bottom; + const yMax = layout.height - yMin; + const innerWidth = xMax - xMin; + const textBaseline = Math.floor(layout.font.size / 2); + const leftLabel = labels?.left; + const bottomLabel = !isVertical ? labels?.bottom : undefined; + const palette = { ...layout.colors, ...colors }; + + const xScale = scaleBand({ + domain: data.map(accessors.x), + range: [xMin, xMax], + round: true, + padding: layout.barPadding, + }); + + const yScale = scaleLinear({ + domain: [0, Math.max(...data.map(accessors.y))], + range: [yMax, 0], + nice: true, + }); + + const getXTickProps = ({ x, y, formattedValue, ...props }) => { + const textWidth = isVertical ? xTickWidth : xScale.bandwidth(); + const truncatedText = truncateText(formattedValue, textWidth); + const transform = isVertical + ? `rotate(45, ${x} ${y}) translate(-${textBaseline} 0)` + : undefined; + + return { ...props, x, y, transform, children: truncatedText }; + }; + + return ( + + + xScale(accessors.x(d)) + xScale.bandwidth() / 2} + y={d => yScale(accessors.y(d))} + /> + formatNumber(value, settings?.y)} + tickLabelProps={() => getYTickLabelProps(layout)} + /> + xScale(accessors.x(d)) + xScale.bandwidth() / 2} + y={d => yScale(accessors.y(d))} + /> + } + tickLabelProps={() => getXTickLabelProps(layout, isVertical)} + /> + + ); +}; + +CategoricalAreaChart.propTypes = propTypes; + +export default CategoricalAreaChart; diff --git a/frontend/src/metabase/static-viz/components/CategoricalAreaChart/index.js b/frontend/src/metabase/static-viz/components/CategoricalAreaChart/index.js new file mode 100644 index 000000000000..f57673cb6a57 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalAreaChart/index.js @@ -0,0 +1 @@ +export { default } from "./CategoricalAreaChart"; diff --git a/frontend/src/metabase/static-viz/components/CategoricalBarChart/CategoricalBarChart.jsx b/frontend/src/metabase/static-viz/components/CategoricalBarChart/CategoricalBarChart.jsx new file mode 100644 index 000000000000..079e48de4886 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalBarChart/CategoricalBarChart.jsx @@ -0,0 +1,147 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { AxisBottom, AxisLeft } from "@visx/axis"; +import { GridRows } from "@visx/grid"; +import { scaleBand, scaleLinear } from "@visx/scale"; +import { Bar } from "@visx/shape"; +import { Text } from "@visx/text"; +import { + getXTickWidth, + getXTickLabelProps, + getYTickLabelProps, + getYTickWidth, + getXTickHeight, +} from "../../lib/axes"; +import { formatNumber } from "../../lib/numbers"; +import { truncateText } from "../../lib/text"; + +const propTypes = { + data: PropTypes.array.isRequired, + accessors: PropTypes.shape({ + x: PropTypes.func.isRequired, + y: PropTypes.func.isRequired, + }).isRequired, + settings: PropTypes.shape({ + x: PropTypes.object, + y: PropTypes.object, + colors: PropTypes.object, + }), + labels: PropTypes.shape({ + left: PropTypes.string, + bottom: PropTypes.string, + }), +}; + +const layout = { + width: 540, + height: 300, + margin: { + top: 0, + left: 55, + right: 40, + bottom: 40, + }, + font: { + size: 11, + family: "Lato, sans-serif", + }, + colors: { + brand: "#509ee3", + textLight: "#b8bbc3", + textMedium: "#949aab", + }, + barPadding: 0.2, + labelPadding: 12, + maxTickWidth: 100, + strokeDasharray: "4", +}; + +const CategoricalBarChart = ({ data, accessors, settings, labels }) => { + const colors = settings?.colors; + const isVertical = data.length > 10; + const xTickWidth = getXTickWidth(data, accessors, layout.maxTickWidth); + const xTickHeight = getXTickHeight(xTickWidth); + const yTickWidth = getYTickWidth(data, accessors, settings); + const xLabelOffset = xTickHeight + layout.labelPadding + layout.font.size; + const yLabelOffset = yTickWidth + layout.labelPadding; + const xMin = yLabelOffset + layout.font.size * 1.5; + const xMax = layout.width - layout.margin.right; + const yMin = isVertical ? xLabelOffset : layout.margin.bottom; + const yMax = layout.height - yMin; + const innerWidth = xMax - xMin; + const innerHeight = yMax - layout.margin.top; + const textBaseline = Math.floor(layout.font.size / 2); + const leftLabel = labels?.left; + const bottomLabel = !isVertical ? labels?.bottom : undefined; + const palette = { ...layout.colors, ...colors }; + + const xScale = scaleBand({ + domain: data.map(accessors.x), + range: [xMin, xMax], + round: true, + padding: layout.barPadding, + }); + + const yScale = scaleLinear({ + domain: [0, Math.max(...data.map(accessors.y))], + range: [yMax, 0], + nice: true, + }); + + const getBarProps = d => { + const width = xScale.bandwidth(); + const height = innerHeight - yScale(accessors.y(d)); + const x = xScale(accessors.x(d)); + const y = yMax - height; + + return { x, y, width, height, fill: palette.brand }; + }; + + const getXTickProps = ({ x, y, formattedValue, ...props }) => { + const textWidth = isVertical ? xTickWidth : xScale.bandwidth(); + const truncatedText = truncateText(formattedValue, textWidth); + const transform = isVertical + ? `rotate(45, ${x} ${y}) translate(-${textBaseline} 0)` + : undefined; + + return { ...props, x, y, transform, children: truncatedText }; + }; + + return ( + + + {data.map((d, index) => ( + + ))} + formatNumber(value, settings?.y)} + tickLabelProps={() => getYTickLabelProps(layout)} + /> + } + tickLabelProps={() => getXTickLabelProps(layout, isVertical)} + /> + + ); +}; + +CategoricalBarChart.propTypes = propTypes; + +export default CategoricalBarChart; diff --git a/frontend/src/metabase/static-viz/components/CategoricalBarChart/index.js b/frontend/src/metabase/static-viz/components/CategoricalBarChart/index.js new file mode 100644 index 000000000000..74a0f3cb5bcf --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalBarChart/index.js @@ -0,0 +1 @@ +export { default } from "./CategoricalBarChart"; diff --git a/frontend/src/metabase/static-viz/components/CategoricalDonutChart/CategoricalDonutChart.jsx b/frontend/src/metabase/static-viz/components/CategoricalDonutChart/CategoricalDonutChart.jsx new file mode 100644 index 000000000000..3c08e4d5fc1e --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalDonutChart/CategoricalDonutChart.jsx @@ -0,0 +1,106 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import { Group } from "@visx/group"; +import { Pie } from "@visx/shape"; +import { Text } from "@visx/text"; +import { formatNumber } from "../../lib/numbers"; + +const propTypes = { + data: PropTypes.array, + colors: PropTypes.object, + accessors: PropTypes.shape({ + dimension: PropTypes.func, + metric: PropTypes.func, + }), + settings: PropTypes.shape({ + metric: PropTypes.object, + }), +}; + +const layout = { + width: 540, + height: 540, + margin: 20, + font: { + family: "Lato, sans-serif", + weight: 700, + }, + colors: { + textLight: "#b8bbc3", + textDark: "#4c5773", + }, + thickness: 100, + cornerRadius: 2, + padAngle: 0.02, + valueFontSize: 22, + labelFontSize: 14, +}; + +const CategoricalDonutChart = ({ data, colors, accessors, settings }) => { + const innerWidth = layout.width - layout.margin * 2; + const innerHeight = layout.height - layout.margin * 2; + const outerRadius = Math.min(innerWidth, innerHeight) / 2; + const innerRadius = outerRadius - layout.thickness; + const centerX = layout.margin + innerWidth / 2; + const centerY = layout.margin + innerHeight / 2; + const pieSortValues = (a, b) => b - a; + const textHeight = layout.valueFontSize + layout.labelFontSize; + const textCenter = textHeight / 3; + const totalValue = data.map(accessors.metric).reduce((a, b) => a + b, 0); + const totalLabel = t`Total`.toUpperCase(); + + return ( + + + + {pie => + pie.arcs.map((arc, index) => { + const path = pie.path(arc); + const dimension = arc.data[0]; + const fill = colors[dimension]; + + return ( + + + + ); + }) + } + + + + {formatNumber(totalValue, settings?.metric)} + + + {totalLabel} + + + + + ); +}; + +CategoricalDonutChart.propTypes = propTypes; + +export default CategoricalDonutChart; diff --git a/frontend/src/metabase/static-viz/components/CategoricalDonutChart/index.js b/frontend/src/metabase/static-viz/components/CategoricalDonutChart/index.js new file mode 100644 index 000000000000..885d5e5ac615 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalDonutChart/index.js @@ -0,0 +1 @@ +export { default } from "./CategoricalDonutChart"; diff --git a/frontend/src/metabase/static-viz/components/CategoricalLineChart/CategoricalLineChart.jsx b/frontend/src/metabase/static-viz/components/CategoricalLineChart/CategoricalLineChart.jsx new file mode 100644 index 000000000000..0534b7535c2e --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalLineChart/CategoricalLineChart.jsx @@ -0,0 +1,141 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { AxisBottom, AxisLeft } from "@visx/axis"; +import { GridRows } from "@visx/grid"; +import { scaleBand, scaleLinear } from "@visx/scale"; +import { LinePath } from "@visx/shape"; +import { Text } from "@visx/text"; +import { + getXTickWidth, + getXTickLabelProps, + getYTickLabelProps, + getYTickWidth, + getXTickHeight, +} from "../../lib/axes"; +import { formatNumber } from "../../lib/numbers"; +import { truncateText } from "../../lib/text"; + +const propTypes = { + data: PropTypes.array.isRequired, + accessors: PropTypes.shape({ + x: PropTypes.func.isRequired, + y: PropTypes.func.isRequired, + }).isRequired, + settings: PropTypes.shape({ + x: PropTypes.object, + y: PropTypes.object, + colors: PropTypes.object, + }), + labels: PropTypes.shape({ + left: PropTypes.string, + bottom: PropTypes.string, + }), +}; + +const layout = { + width: 540, + height: 300, + margin: { + top: 0, + left: 55, + right: 40, + bottom: 40, + }, + font: { + size: 11, + family: "Lato, sans-serif", + }, + colors: { + brand: "#509ee3", + textLight: "#b8bbc3", + textMedium: "#949aab", + }, + barPadding: 0.2, + labelPadding: 12, + maxTickWidth: 100, + strokeDasharray: "4", +}; + +const CategoricalLineChart = ({ data, accessors, settings, labels }) => { + const colors = settings?.colors; + const isVertical = data.length > 10; + const xTickWidth = getXTickWidth(data, accessors, layout.maxTickWidth); + const xTickHeight = getXTickHeight(xTickWidth); + const yTickWidth = getYTickWidth(data, accessors, settings); + const xLabelOffset = xTickHeight + layout.labelPadding + layout.font.size; + const yLabelOffset = yTickWidth + layout.labelPadding; + const xMin = yLabelOffset + layout.font.size * 1.5; + const xMax = layout.width - layout.margin.right; + const yMin = isVertical ? xLabelOffset : layout.margin.bottom; + const yMax = layout.height - yMin; + const innerWidth = xMax - xMin; + const textBaseline = Math.floor(layout.font.size / 2); + const leftLabel = labels?.left; + const bottomLabel = !isVertical ? labels?.bottom : undefined; + const palette = { ...layout.colors, ...colors }; + + const xScale = scaleBand({ + domain: data.map(accessors.x), + range: [xMin, xMax], + round: true, + padding: layout.barPadding, + }); + + const yScale = scaleLinear({ + domain: [0, Math.max(...data.map(accessors.y))], + range: [yMax, 0], + nice: true, + }); + + const getXTickProps = ({ x, y, formattedValue, ...props }) => { + const textWidth = isVertical ? xTickWidth : xScale.bandwidth(); + const truncatedText = truncateText(formattedValue, textWidth); + const transform = isVertical + ? `rotate(45, ${x} ${y}) translate(-${textBaseline} 0)` + : undefined; + + return { ...props, x, y, transform, children: truncatedText }; + }; + + return ( + + + xScale(accessors.x(d)) + xScale.bandwidth() / 2} + y={d => yScale(accessors.y(d))} + /> + formatNumber(value, settings?.y)} + tickLabelProps={() => getYTickLabelProps(layout)} + /> + } + tickLabelProps={() => getXTickLabelProps(layout, isVertical)} + /> + + ); +}; + +CategoricalLineChart.propTypes = propTypes; + +export default CategoricalLineChart; diff --git a/frontend/src/metabase/static-viz/components/CategoricalLineChart/index.js b/frontend/src/metabase/static-viz/components/CategoricalLineChart/index.js new file mode 100644 index 000000000000..c6ab45b493ba --- /dev/null +++ b/frontend/src/metabase/static-viz/components/CategoricalLineChart/index.js @@ -0,0 +1 @@ +export { default } from "./CategoricalLineChart"; diff --git a/frontend/src/metabase/static-viz/components/TimeSeriesAreaChart/TimeSeriesAreaChart.jsx b/frontend/src/metabase/static-viz/components/TimeSeriesAreaChart/TimeSeriesAreaChart.jsx new file mode 100644 index 000000000000..20628f1bcd15 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/TimeSeriesAreaChart/TimeSeriesAreaChart.jsx @@ -0,0 +1,133 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { scaleLinear, scaleTime } from "@visx/scale"; +import { GridRows } from "@visx/grid"; +import { AxisBottom, AxisLeft } from "@visx/axis"; +import { AreaClosed, LinePath } from "@visx/shape"; +import { + getXTickLabelProps, + getYTickLabelProps, + getYTickWidth, +} from "../../lib/axes"; +import { formatDate } from "../../lib/dates"; +import { formatNumber } from "../../lib/numbers"; + +const propTypes = { + data: PropTypes.array.isRequired, + accessors: PropTypes.shape({ + x: PropTypes.func, + y: PropTypes.func, + }).isRequired, + settings: PropTypes.shape({ + x: PropTypes.object, + y: PropTypes.object, + colors: PropTypes.object, + }), + labels: PropTypes.shape({ + left: PropTypes.string, + bottom: PropTypes.string, + }), +}; + +const layout = { + width: 540, + height: 300, + margin: { + top: 0, + left: 55, + right: 40, + bottom: 40, + }, + font: { + size: 11, + family: "Lato, sans-serif", + }, + colors: { + brand: "#509ee3", + brandLight: "#DDECFA", + textLight: "#b8bbc3", + textMedium: "#949aab", + }, + numTicks: 5, + strokeWidth: 2, + labelPadding: 12, + areaOpacity: 0.2, + strokeDasharray: "4", +}; + +const TimeSeriesAreaChart = ({ data, accessors, settings, labels }) => { + const colors = settings?.colors; + const yTickWidth = getYTickWidth(data, accessors, settings); + const yLabelOffset = yTickWidth + layout.labelPadding; + const xMin = yLabelOffset + layout.font.size * 1.5; + const xMax = layout.width - layout.margin.right; + const yMax = layout.height - layout.margin.bottom; + const innerWidth = xMax - xMin; + const leftLabel = labels?.left; + const bottomLabel = labels?.bottom; + const palette = { ...layout.colors, ...colors }; + + const xScale = scaleTime({ + domain: [ + Math.min(...data.map(accessors.x)), + Math.max(...data.map(accessors.x)), + ], + range: [xMin, xMax], + }); + + const yScale = scaleLinear({ + domain: [0, Math.max(...data.map(accessors.y))], + range: [yMax, 0], + nice: true, + }); + + return ( + + + xScale(accessors.x(d))} + y={d => yScale(accessors.y(d))} + /> + xScale(accessors.x(d))} + y={d => yScale(accessors.y(d))} + /> + formatNumber(value, settings?.y)} + tickLabelProps={() => getYTickLabelProps(layout)} + /> + formatDate(value, settings?.x)} + tickLabelProps={() => getXTickLabelProps(layout)} + /> + + ); +}; + +TimeSeriesAreaChart.propTypes = propTypes; + +export default TimeSeriesAreaChart; diff --git a/frontend/src/metabase/static-viz/components/TimeSeriesAreaChart/index.js b/frontend/src/metabase/static-viz/components/TimeSeriesAreaChart/index.js new file mode 100644 index 000000000000..97b1d21fca2e --- /dev/null +++ b/frontend/src/metabase/static-viz/components/TimeSeriesAreaChart/index.js @@ -0,0 +1 @@ +export { default } from "./TimeSeriesAreaChart"; diff --git a/frontend/src/metabase/static-viz/components/TimeSeriesBarChart/TimeSeriesBarChart.jsx b/frontend/src/metabase/static-viz/components/TimeSeriesBarChart/TimeSeriesBarChart.jsx new file mode 100644 index 000000000000..204d73a77b19 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/TimeSeriesBarChart/TimeSeriesBarChart.jsx @@ -0,0 +1,128 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { AxisBottom, AxisLeft } from "@visx/axis"; +import { GridRows } from "@visx/grid"; +import { scaleBand, scaleLinear } from "@visx/scale"; +import { Bar } from "@visx/shape"; +import { + getXTickLabelProps, + getYTickLabelProps, + getYTickWidth, +} from "../../lib/axes"; +import { formatDate } from "../../lib/dates"; +import { formatNumber } from "../../lib/numbers"; + +const propTypes = { + data: PropTypes.array.isRequired, + accessors: PropTypes.shape({ + x: PropTypes.func.isRequired, + y: PropTypes.func.isRequired, + }).isRequired, + settings: PropTypes.shape({ + x: PropTypes.object, + y: PropTypes.object, + colors: PropTypes.object, + }), + labels: PropTypes.shape({ + left: PropTypes.string, + bottom: PropTypes.string, + }), +}; + +const layout = { + width: 540, + height: 300, + margin: { + top: 0, + left: 55, + right: 40, + bottom: 40, + }, + font: { + size: 11, + family: "Lato, sans-serif", + }, + colors: { + brand: "#509ee3", + textLight: "#b8bbc3", + textMedium: "#949aab", + }, + numTicks: 5, + barPadding: 0.2, + labelPadding: 12, + strokeDasharray: "4", +}; + +const TimeSeriesBarChart = ({ data, accessors, settings, labels }) => { + const colors = settings?.colors; + const yTickWidth = getYTickWidth(data, accessors, settings); + const yLabelOffset = yTickWidth + layout.labelPadding; + const xMin = yLabelOffset + layout.font.size * 1.5; + const xMax = layout.width - layout.margin.right; + const yMax = layout.height - layout.margin.bottom; + const innerWidth = xMax - xMin; + const innerHeight = yMax - layout.margin.top; + const leftLabel = labels?.left; + const bottomLabel = labels?.bottom; + const palette = { ...layout.colors, ...colors }; + + const xScale = scaleBand({ + domain: data.map(accessors.x), + range: [xMin, xMax], + round: true, + padding: layout.barPadding, + }); + + const yScale = scaleLinear({ + domain: [0, Math.max(...data.map(accessors.y))], + range: [yMax, 0], + nice: true, + }); + + const getBarProps = d => { + const width = xScale.bandwidth(); + const height = innerHeight - yScale(accessors.y(d)); + const x = xScale(accessors.x(d)); + const y = yMax - height; + + return { x, y, width, height, fill: palette.brand }; + }; + + return ( + + + {data.map((d, index) => ( + + ))} + formatNumber(value, settings?.y)} + tickLabelProps={() => getYTickLabelProps(layout)} + /> + formatDate(value, settings?.x)} + tickLabelProps={() => getXTickLabelProps(layout)} + /> + + ); +}; + +TimeSeriesBarChart.propTypes = propTypes; + +export default TimeSeriesBarChart; diff --git a/frontend/src/metabase/static-viz/components/TimeSeriesBarChart/index.js b/frontend/src/metabase/static-viz/components/TimeSeriesBarChart/index.js new file mode 100644 index 000000000000..b105a05e6795 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/TimeSeriesBarChart/index.js @@ -0,0 +1 @@ +export { default } from "./TimeSeriesBarChart"; diff --git a/frontend/src/metabase/static-viz/components/TimeSeriesLineChart/TimeSeriesLineChart.jsx b/frontend/src/metabase/static-viz/components/TimeSeriesLineChart/TimeSeriesLineChart.jsx new file mode 100644 index 000000000000..a1ff189f9e85 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/TimeSeriesLineChart/TimeSeriesLineChart.jsx @@ -0,0 +1,123 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { scaleLinear, scaleTime } from "@visx/scale"; +import { GridRows } from "@visx/grid"; +import { AxisBottom, AxisLeft } from "@visx/axis"; +import { LinePath } from "@visx/shape"; +import { + getXTickLabelProps, + getYTickWidth, + getYTickLabelProps, +} from "../../lib/axes"; +import { formatDate } from "../../lib/dates"; +import { formatNumber } from "../../lib/numbers"; + +const propTypes = { + data: PropTypes.array.isRequired, + accessors: PropTypes.shape({ + x: PropTypes.func, + y: PropTypes.func, + }).isRequired, + settings: PropTypes.shape({ + x: PropTypes.object, + y: PropTypes.object, + colors: PropTypes.object, + }), + labels: PropTypes.shape({ + left: PropTypes.string, + bottom: PropTypes.string, + }), +}; + +const layout = { + width: 540, + height: 300, + margin: { + top: 0, + left: 55, + right: 40, + bottom: 40, + }, + font: { + size: 11, + family: "Lato, sans-serif", + }, + colors: { + brand: "#509ee3", + textLight: "#b8bbc3", + textMedium: "#949aab", + }, + numTicks: 5, + labelPadding: 12, + strokeWidth: 2, + strokeDasharray: "4", +}; + +const TimeSeriesLineChart = ({ data, accessors, settings, labels }) => { + const colors = settings?.colors; + const yTickWidth = getYTickWidth(data, accessors, settings); + const yLabelOffset = yTickWidth + layout.labelPadding; + const xMin = yLabelOffset + layout.font.size * 1.5; + const xMax = layout.width - layout.margin.right; + const yMax = layout.height - layout.margin.bottom; + const innerWidth = xMax - xMin; + const leftLabel = labels?.left; + const bottomLabel = labels?.bottom; + const palette = { ...layout.colors, ...colors }; + + const xScale = scaleTime({ + domain: [ + Math.min(...data.map(accessors.x)), + Math.max(...data.map(accessors.x)), + ], + range: [xMin, xMax], + }); + + const yScale = scaleLinear({ + domain: [0, Math.max(...data.map(accessors.y))], + range: [yMax, 0], + nice: true, + }); + + return ( + + + xScale(accessors.x(d))} + y={d => yScale(accessors.y(d))} + /> + formatNumber(value, settings?.y)} + tickLabelProps={() => getYTickLabelProps(layout)} + /> + formatDate(value, settings?.x)} + tickLabelProps={() => getXTickLabelProps(layout)} + /> + + ); +}; + +TimeSeriesLineChart.propTypes = propTypes; + +export default TimeSeriesLineChart; diff --git a/frontend/src/metabase/static-viz/components/TimeSeriesLineChart/index.js b/frontend/src/metabase/static-viz/components/TimeSeriesLineChart/index.js new file mode 100644 index 000000000000..d23115fed8d4 --- /dev/null +++ b/frontend/src/metabase/static-viz/components/TimeSeriesLineChart/index.js @@ -0,0 +1 @@ +export { default } from "./TimeSeriesLineChart"; diff --git a/frontend/src/metabase/static-viz/containers/StaticChart/StaticChart.jsx b/frontend/src/metabase/static-viz/containers/StaticChart/StaticChart.jsx new file mode 100644 index 000000000000..e5452c5b475f --- /dev/null +++ b/frontend/src/metabase/static-viz/containers/StaticChart/StaticChart.jsx @@ -0,0 +1,45 @@ +import React from "react"; +import PropTypes from "prop-types"; +import CategoricalAreaChart from "../../components/CategoricalAreaChart"; +import CategoricalBarChart from "../../components/CategoricalBarChart"; +import CategoricalDonutChart from "../../components/CategoricalDonutChart"; +import CategoricalLineChart from "../../components/CategoricalLineChart"; +import TimeSeriesAreaChart from "../../components/TimeSeriesAreaChart"; +import TimeSeriesBarChart from "../../components/TimeSeriesBarChart"; +import TimeSeriesLineChart from "../../components/TimeSeriesLineChart"; + +const propTypes = { + type: PropTypes.oneOf([ + "categorical/area", + "categorical/bar", + "categorical/donut", + "categorical/line", + "timeseries/area", + "timeseries/bar", + "timeseries/line", + ]).isRequired, + options: PropTypes.object.isRequired, +}; + +const StaticChart = ({ type, options }) => { + switch (type) { + case "categorical/area": + return ; + case "categorical/bar": + return ; + case "categorical/donut": + return ; + case "categorical/line": + return ; + case "timeseries/area": + return ; + case "timeseries/bar": + return ; + case "timeseries/line": + return ; + } +}; + +StaticChart.propTypes = propTypes; + +export default StaticChart; diff --git a/frontend/src/metabase/static-viz/containers/StaticChart/StaticChart.unit.spec.js b/frontend/src/metabase/static-viz/containers/StaticChart/StaticChart.unit.spec.js new file mode 100644 index 000000000000..6c624adb1285 --- /dev/null +++ b/frontend/src/metabase/static-viz/containers/StaticChart/StaticChart.unit.spec.js @@ -0,0 +1,208 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import StaticChart from "./StaticChart"; + +describe("StaticChart", () => { + it("should render categorical/line", () => { + render( + row[0], + y: row => row[1], + }, + settings: { + y: { + number_style: "currency", + currency: "USD", + currency_style: "symbol", + }, + }, + labels: { + left: "Count", + bottom: "Category", + }, + }} + />, + ); + + screen.getByText("Gadget"); + screen.getByText("Widget"); + screen.getAllByText("Count"); + screen.getAllByText("Category"); + }); + + it("should render categorical/area", () => { + render( + row[0], + y: row => row[1], + }, + settings: { + y: { + number_style: "currency", + currency: "USD", + currency_style: "symbol", + }, + }, + labels: { + left: "Count", + bottom: "Category", + }, + }} + />, + ); + + screen.getByText("Gadget"); + screen.getByText("Widget"); + screen.getAllByText("Count"); + screen.getAllByText("Category"); + }); + + it("should render categorical/bar", () => { + render( + row[0], + y: row => row[1], + }, + settings: { + y: { + number_style: "currency", + currency: "USD", + currency_style: "symbol", + }, + }, + labels: { + left: "Count", + bottom: "Category", + }, + }} + />, + ); + + screen.getByText("Gadget"); + screen.getByText("Widget"); + screen.getAllByText("Count"); + screen.getAllByText("Category"); + }); + + it("should render categorical/donut", () => { + render( + row[0], + metric: row => row[1], + }, + settings: { + metric: { + number_style: "currency", + currency: "USD", + currency_style: "symbol", + }, + }, + }} + />, + ); + + screen.getByText("$5,100.00"); + screen.getAllByText("TOTAL"); + }); + + it("should render timeseries/line", () => { + render( + new Date(row[0]).valueOf(), + y: row => row[1], + }, + settings: { + x: { + date_style: "dddd", + }, + }, + labels: { + left: "Count", + bottom: "Time", + }, + }} + />, + ); + + screen.getAllByText("Count"); + screen.getAllByText("Time"); + }); + + it("should render timeseries/area", () => { + render( + new Date(row[0]).valueOf(), + y: row => row[1], + }, + settings: { + x: { + date_style: "MMM", + }, + }, + labels: { + left: "Count", + bottom: "Time", + }, + }} + />, + ); + + screen.getAllByText("Count"); + screen.getAllByText("Time"); + }); + + it("should render timeseries/bar", () => { + render( + new Date(row[0]).valueOf(), + y: row => row[1], + }, + settings: { + x: { + date_style: "dddd", + }, + }, + labels: { + left: "Count", + bottom: "Time", + }, + }} + />, + ); + + screen.getAllByText("Count"); + screen.getAllByText("Time"); + }); +}); diff --git a/frontend/src/metabase/static-viz/containers/StaticChart/index.js b/frontend/src/metabase/static-viz/containers/StaticChart/index.js new file mode 100644 index 000000000000..18582b8799cc --- /dev/null +++ b/frontend/src/metabase/static-viz/containers/StaticChart/index.js @@ -0,0 +1 @@ +export { default } from "./StaticChart"; diff --git a/frontend/src/metabase/static-viz/index.js b/frontend/src/metabase/static-viz/index.js new file mode 100644 index 000000000000..0034b687d157 --- /dev/null +++ b/frontend/src/metabase/static-viz/index.js @@ -0,0 +1,9 @@ +import React from "react"; +import ReactDOMServer from "react-dom/server"; +import StaticChart from "./containers/StaticChart"; + +export function RenderChart(type, options) { + return ReactDOMServer.renderToStaticMarkup( + , + ); +} diff --git a/frontend/src/metabase/static-viz/lib/axes.js b/frontend/src/metabase/static-viz/lib/axes.js new file mode 100644 index 000000000000..267dbe721c3b --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/axes.js @@ -0,0 +1,38 @@ +import { formatNumber } from "./numbers"; +import { measureText } from "./text"; + +export const getXTickWidth = (data, accessors, maxWidth) => { + const tickWidth = data + .map(accessors.x) + .map(tick => String(tick)) + .map(tick => measureText(tick)) + .reduce((a, b) => Math.max(a, b), 0); + + return Math.min(tickWidth, maxWidth); +}; + +export const getXTickHeight = tickWidth => { + return Math.ceil(Math.sqrt(Math.pow(tickWidth, 2) / 2)); +}; + +export const getYTickWidth = (data, accessors, settings) => { + return data + .map(accessors.y) + .map(tick => formatNumber(tick, settings?.y)) + .map(tick => measureText(tick)) + .reduce((a, b) => Math.max(a, b), 0); +}; + +export const getXTickLabelProps = (layout, isVertical) => ({ + fontSize: layout.font.size, + fontFamily: layout.font.family, + fill: layout.colors.textMedium, + textAnchor: isVertical ? "start" : "middle", +}); + +export const getYTickLabelProps = layout => ({ + fontSize: layout.font.size, + fontFamily: layout.font.family, + fill: layout.colors.textMedium, + textAnchor: "end", +}); diff --git a/frontend/src/metabase/static-viz/lib/axes.unit.spec.js b/frontend/src/metabase/static-viz/lib/axes.unit.spec.js new file mode 100644 index 000000000000..7322c32d8fc9 --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/axes.unit.spec.js @@ -0,0 +1,30 @@ +import { getXTickHeight, getXTickWidth, getYTickWidth } from "./axes"; + +describe("getXTickWidth", () => { + it("should get tick width for x axis assuming 6px char width", () => { + const data = [{ x: 1 }, { x: 200 }, { x: 15 }]; + const accessors = { x: d => d.x }; + const maxWidth = 20; + + const xTickHeight = getXTickWidth(data, accessors, maxWidth); + + expect(xTickHeight).toBe(18); + }); +}); + +describe("getXTickHeight", () => { + it("should get tick height by width assuming 45deg rotation", () => { + expect(getXTickHeight(12)).toBe(9); + }); +}); + +describe("getYTickWidth", () => { + it("should get tick width for y axis assuming 6px char width", () => { + const data = [{ y: 1 }, { y: 20 }, { y: 15 }]; + const accessors = { y: d => d.y }; + + const yTickHeight = getYTickWidth(data, accessors); + + expect(yTickHeight).toBe(12); + }); +}); diff --git a/frontend/src/metabase/static-viz/lib/dates.js b/frontend/src/metabase/static-viz/lib/dates.js new file mode 100644 index 000000000000..9df37d74d0f2 --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/dates.js @@ -0,0 +1,71 @@ +const DEFAULT_OPTIONS = { + date_style: "M/D/YYYY", + date_abbreviate: false, + date_separator: "/", + time_style: "h:mm A", + time_enabled: false, +}; + +const DATE_FORMATS = { + YY: new Intl.DateTimeFormat("en", { year: "2-digit" }), + YYYY: new Intl.DateTimeFormat("en", { year: "numeric" }), + M: new Intl.DateTimeFormat("en", { month: "numeric" }), + MM: new Intl.DateTimeFormat("en", { month: "2-digit" }), + MMM: new Intl.DateTimeFormat("en", { month: "short" }), + MMMM: new Intl.DateTimeFormat("en", { month: "long" }), + D: new Intl.DateTimeFormat("en", { day: "numeric" }), + DD: new Intl.DateTimeFormat("en", { day: "2-digit" }), + ddd: new Intl.DateTimeFormat("en", { weekday: "short" }), + dddd: new Intl.DateTimeFormat("en", { weekday: "long" }), + H: new Intl.DateTimeFormat("en", { hour: "numeric", hour12: false }), + HH: new Intl.DateTimeFormat("en", { hour: "2-digit", hour12: false }), + h: new Intl.DateTimeFormat("en", { hour: "numeric", hour12: true }), + hh: new Intl.DateTimeFormat("en", { hour: "2-digit", hour12: true }), + m: new Intl.DateTimeFormat("en", { minute: "numeric" }), + mm: new Intl.DateTimeFormat("en", { minute: "2-digit" }), +}; + +export const formatDate = (date, options) => { + const { + date_style, + date_abbreviate, + date_separator, + time_style, + time_enabled, + } = { ...DEFAULT_OPTIONS, ...options }; + + const formattedDate = date_style + .replace(/MMMM/g, date_abbreviate ? "MMM" : "MMMM") + .replace(/dddd/g, date_abbreviate ? "ddd" : "dddd") + .replace(/\//g, date_separator) + .replace(/\w+/g, field => formatDatePart(date, field)); + + const formattedTime = time_style + .replace(/\//g, date_separator) + .replace(/\w+/g, field => formatDatePart(date, field)); + + return time_enabled ? `${formattedDate} ${formattedTime}` : formattedDate; +}; + +const formatDatePart = (date, field) => { + switch (field) { + case "h": + return findDatePart(DATE_FORMATS.h.formatToParts(date), "hour"); + case "hh": + return findDatePart(DATE_FORMATS.hh.formatToParts(date), "hour"); + case "A": + return findDatePart(DATE_FORMATS.h.formatToParts(date), "dayPeriod"); + case "Q": + return `Q${findQuarter(DATE_FORMATS.M.format(date))}`; + default: + return DATE_FORMATS[field].format(date); + } +}; + +const findDatePart = (parts, type) => { + return parts.find(part => part.type === type)?.value; +}; + +const findQuarter = month => { + return Math.floor((month - 1) / 3) + 1; +}; diff --git a/frontend/src/metabase/static-viz/lib/dates.unit.spec.js b/frontend/src/metabase/static-viz/lib/dates.unit.spec.js new file mode 100644 index 000000000000..218373c17ef2 --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/dates.unit.spec.js @@ -0,0 +1,84 @@ +import { formatDate } from "./dates"; + +describe("formatDate", () => { + it("should format a date with default settings", () => { + const date = new Date(2018, 0, 10); + + const text = formatDate(date); + + expect(text).toEqual("1/10/2018"); + }); + + it("should format a date with style option", () => { + const date = new Date(2018, 0, 10); + + const text = formatDate(date, { + date_style: "dddd, MMMM D, YYYY", + }); + + expect(text).toEqual("Wednesday, January 10, 2018"); + }); + + it("should format a date with abbreviate option", () => { + const date = new Date(2018, 0, 10); + + const text = formatDate(date, { + date_style: "dddd, MMMM D, YYYY", + date_abbreviate: true, + }); + + expect(text).toEqual("Wed, Jan 10, 2018"); + }); + + it("should format a date with separator option", () => { + const date = new Date(2018, 0, 10); + + const text = formatDate(date, { + date_style: "M/D/YYYY", + date_separator: "-", + }); + + expect(text).toEqual("1-10-2018"); + }); + + it("should format a date with time", () => { + const date = new Date(2018, 0, 10, 15, 10, 20); + + const text = formatDate(date, { + time_enabled: true, + }); + + expect(text).toEqual("1/10/2018 3:10 PM"); + }); + + it("should format a date with time and 24-hour clock", () => { + const date = new Date(2018, 0, 10, 15, 10, 20); + + const text = formatDate(date, { + time_enabled: true, + time_style: "HH:mm", + }); + + expect(text).toEqual("1/10/2018 15:10"); + }); + + it("should format a date at the end of the quarter", () => { + const date = new Date(2018, 2, 10); + + const text = formatDate(date, { + date_style: "Q", + }); + + expect(text).toEqual("Q1"); + }); + + it("should format a date at the start of the quarter", () => { + const date = new Date(2018, 3, 10); + + const text = formatDate(date, { + date_style: "Q", + }); + + expect(text).toEqual("Q2"); + }); +}); diff --git a/frontend/src/metabase/static-viz/lib/numbers.js b/frontend/src/metabase/static-viz/lib/numbers.js new file mode 100644 index 000000000000..eec8809c34c6 --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/numbers.js @@ -0,0 +1,40 @@ +const DEFAULT_OPTIONS = { + number_style: "decimal", + currency: undefined, + currency_style: "symbol", + number_separators: ".,", + decimals: undefined, + scale: 1, + prefix: "", + suffix: "", +}; + +export const formatNumber = (number, options) => { + const { + number_style, + currency, + currency_style, + number_separators: [decimal_separator, grouping_separator], + decimals, + scale, + prefix, + suffix, + } = { ...DEFAULT_OPTIONS, ...options }; + + const format = new Intl.NumberFormat("en", { + style: number_style !== "scientific" ? number_style : "decimal", + notation: number_style !== "scientific" ? "standard" : "scientific", + currency: currency, + currencyDisplay: currency_style, + useGrouping: true, + minimumFractionDigits: decimals, + maximumFractionDigits: decimals != null ? decimals : 2, + }); + + const formattedNumber = format + .format(number * scale) + .replace(/\./g, decimal_separator) + .replace(/,/g, grouping_separator); + + return `${prefix}${formattedNumber}${suffix}`; +}; diff --git a/frontend/src/metabase/static-viz/lib/numbers.unit.spec.js b/frontend/src/metabase/static-viz/lib/numbers.unit.spec.js new file mode 100644 index 000000000000..bb9978b9e24f --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/numbers.unit.spec.js @@ -0,0 +1,92 @@ +import { formatNumber } from "./numbers"; + +describe("formatNumber", () => { + it("should format a number with default options", () => { + const number = 1500; + + const text = formatNumber(number); + + expect(text).toEqual("1,500"); + }); + + it("should format a number with fractional digits", () => { + const number = 1500.234; + + const text = formatNumber(number); + + expect(text).toEqual("1,500.23"); + }); + + it("should format currency", () => { + const number = 1500; + + const text = formatNumber(number, { + number_style: "currency", + currency: "USD", + currency_style: "symbol", + }); + + expect(text).toEqual("$1,500.00"); + }); + + it("should format percents", () => { + const number = 0.867; + + const text = formatNumber(number, { + number_style: "percent", + }); + + expect(text).toEqual("86.7%"); + }); + + it("should format a number in scientific notation", () => { + const number = 1200; + + const text = formatNumber(number, { + number_style: "scientific", + }); + + expect(text).toEqual("1.2E3"); + }); + + it("should format a number with custom number separators", () => { + const number = 1500.234; + + const text = formatNumber(number, { + number_separators: ".’", + }); + + expect(text).toEqual("1’500.23"); + }); + + it("should format a number with fixed fractional precision", () => { + const number = 1500; + + const text = formatNumber(number, { + decimals: 2, + }); + + expect(text).toEqual("1,500.00"); + }); + + it("should format a number with scale", () => { + const number = 15; + + const text = formatNumber(number, { + scale: 100, + }); + + expect(text).toEqual("1,500"); + }); + + it("should format a number with a prefix and a suffix", () => { + const number = 15; + + const text = formatNumber(number, { + prefix: "prefix", + suffix: "suffix", + }); + + expect(text).toEqual("prefix15suffix"); + }); +}); diff --git a/frontend/src/metabase/static-viz/lib/text.js b/frontend/src/metabase/static-viz/lib/text.js new file mode 100644 index 000000000000..ac742a17e818 --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/text.js @@ -0,0 +1,18 @@ +const CHAR_WIDTH = 6; +const CHAR_ELLIPSES = "…"; + +export const measureText = text => { + return text.length * CHAR_WIDTH; +}; + +export const truncateText = (text, width) => { + if (measureText(text) <= width) { + return text; + } + + while (text.length && measureText(text + CHAR_ELLIPSES) > width) { + text = text.substring(0, text.length - 1); + } + + return text + CHAR_ELLIPSES; +}; diff --git a/frontend/src/metabase/static-viz/lib/text.unit.spec.js b/frontend/src/metabase/static-viz/lib/text.unit.spec.js new file mode 100644 index 000000000000..98456e8a6aa3 --- /dev/null +++ b/frontend/src/metabase/static-viz/lib/text.unit.spec.js @@ -0,0 +1,21 @@ +import { measureText, truncateText } from "./text"; + +describe("measureText", () => { + it("should measure text assuming 6px char width", () => { + expect(measureText("abc")).toBe(18); + }); +}); + +describe("truncateText", () => { + it("should not truncate text with ellipses if there is no overflow", () => { + expect(truncateText("John Doe", 48)).toBe("John Doe"); + }); + + it("should truncate text with ellipses if there is overflow", () => { + expect(truncateText("John Doe", 47)).toBe("John D…"); + }); + + it("should use ellipses in case there is no space for text at all", () => { + expect(truncateText("John Doe", 0)).toBe("…"); + }); +}); diff --git a/frontend/src/metabase/styled-components/layout/FullWidthContainer.js b/frontend/src/metabase/styled-components/layout/FullWidthContainer.js new file mode 100644 index 000000000000..310f31bcb7c3 --- /dev/null +++ b/frontend/src/metabase/styled-components/layout/FullWidthContainer.js @@ -0,0 +1,21 @@ +import styled from "styled-components"; +import { + breakpointMinSmall, + breakpointMinMedium, +} from "metabase/styled-components/theme"; + +export const FullWidthContainer = styled.div` + margin: 0 auto; + padding: 0 1em; + width: 100%; + + ${breakpointMinSmall} { + padding-left: 2em; + padding-right: 2em; + } + + ${breakpointMinMedium} { + padding-left: 3em; + padding-right: 3em; + } +`; diff --git a/frontend/src/metabase/styled-components/theme/constants.js b/frontend/src/metabase/styled-components/theme/constants.js new file mode 100644 index 000000000000..ee08cbe63e7e --- /dev/null +++ b/frontend/src/metabase/styled-components/theme/constants.js @@ -0,0 +1 @@ +export const SPACE_LEVELS = ["4px", "8px", "16px", "32px", "64px", "128px"]; diff --git a/frontend/src/metabase/styled-components/theme/index.js b/frontend/src/metabase/styled-components/theme/index.js new file mode 100644 index 000000000000..9d0b5ba75805 --- /dev/null +++ b/frontend/src/metabase/styled-components/theme/index.js @@ -0,0 +1,2 @@ +export * from "./media-queries"; +export * from "./space"; diff --git a/frontend/src/metabase/styled-components/theme/space.js b/frontend/src/metabase/styled-components/theme/space.js new file mode 100644 index 000000000000..b7fb5f0df9e1 --- /dev/null +++ b/frontend/src/metabase/styled-components/theme/space.js @@ -0,0 +1,12 @@ +import { SPACE_LEVELS as levels } from "./constants"; + +/** + * Returns a pixel amount: 4px, 8px, 16px, on to 128px + * @param {number} level must be an integer between 0 and 5 + * @returns {string} + */ +export function space(level = 0) { + const spaceInteger = levels[level]; + + return spaceInteger || ""; +} diff --git a/frontend/src/metabase/styled-components/theme/space.unit.spec.js b/frontend/src/metabase/styled-components/theme/space.unit.spec.js new file mode 100644 index 000000000000..895af8f42c99 --- /dev/null +++ b/frontend/src/metabase/styled-components/theme/space.unit.spec.js @@ -0,0 +1,15 @@ +import { space } from "./space"; + +it("returns pixel amount for acceptable levels", () => { + expect(space(0)).toBe("4px"); + expect(space(1)).toBe("8px"); + expect(space(2)).toBe("16px"); + expect(space(3)).toBe("32px"); + expect(space(4)).toBe("64px"); + expect(space(5)).toBe("128px"); +}); + +it("returns empty string for unacceptable integer levels", () => { + expect(space(-1)).toBe(""); + expect(space(6)).toBe(""); +}); diff --git a/frontend/src/metabase/user/actions.js b/frontend/src/metabase/user/actions.js deleted file mode 100644 index e82e9c1c579e..000000000000 --- a/frontend/src/metabase/user/actions.js +++ /dev/null @@ -1,71 +0,0 @@ -import { createAction } from "redux-actions"; -import { t } from "ttag"; -import { createThunkAction } from "metabase/lib/redux"; - -import { UserApi, UtilApi } from "metabase/services"; - -import { refreshCurrentUser } from "metabase/redux/user"; - -// action constants -export const CHANGE_TAB = "CHANGE_TAB"; -export const UPDATE_PASSWORD = "UPDATE_PASSWORD"; -export const UPDATE_USER = "UPDATE_USER"; -export const VALIDATE_PASSWORD = "VALIDATE_PASSWORD"; - -// action creators - -export const setTab = createAction(CHANGE_TAB); - -export const updatePassword = createThunkAction(UPDATE_PASSWORD, function( - user_id, - new_password, - current_password, -) { - return async function(dispatch, getState) { - try { - await UserApi.update_password({ - id: user_id, - password: new_password, - old_password: current_password, - }); - - return { - success: true, - data: { - message: t`Password updated successfully!`, - }, - }; - } catch (error) { - return error; - } - }; -}); - -export const validatePassword = createThunkAction(VALIDATE_PASSWORD, function( - password, -) { - return async function(dispatch, getState) { - return await UtilApi.password_check({ - password: password, - }); - }; -}); - -export const updateUser = createThunkAction(UPDATE_USER, function(user) { - return async function(dispatch, getState) { - try { - await UserApi.update(user); - - dispatch(refreshCurrentUser()); - - return { - success: true, - data: { - message: t`Account updated successfully!`, - }, - }; - } catch (error) { - return error; - } - }; -}); diff --git a/frontend/src/metabase/user/components/SetUserPassword.jsx b/frontend/src/metabase/user/components/SetUserPassword.jsx deleted file mode 100644 index 831aecc8fa5e..000000000000 --- a/frontend/src/metabase/user/components/SetUserPassword.jsx +++ /dev/null @@ -1,47 +0,0 @@ -/* eslint "react/prop-types": "warn" */ -import React, { Component } from "react"; -import PropTypes from "prop-types"; -import { t } from "ttag"; - -import User from "metabase/entities/users"; - -export default class SetUserPassword extends Component { - constructor(props, context) { - super(props, context); - this.state = { formError: null, valid: false }; - } - - static propTypes = { - submitFn: PropTypes.func.isRequired, - validatePassword: PropTypes.func.isRequired, - user: PropTypes.object, - }; - - handleAsyncValidate = async values => { - try { - await this.props.validatePassword(values.password); - return {}; - } catch (error) { - return error.data.errors; - } - }; - - handleSubmit = values => { - return this.props.submitFn({ - user_id: this.props.user.id, - ...values, - }); - }; - - render() { - return ( - - ); - } -} diff --git a/frontend/src/metabase/user/components/UserSettings.jsx b/frontend/src/metabase/user/components/UserSettings.jsx deleted file mode 100644 index 34c5727e72d2..000000000000 --- a/frontend/src/metabase/user/components/UserSettings.jsx +++ /dev/null @@ -1,95 +0,0 @@ -/* eslint "react/prop-types": "warn" */ -import React, { Component } from "react"; -import PropTypes from "prop-types"; -import { Box, Flex } from "grid-styled"; - -import { t } from "ttag"; - -import User from "metabase/entities/users"; - -import Radio from "metabase/components/Radio"; -import UserAvatar from "metabase/components/UserAvatar"; - -import LoginHistoryList from "./LoginHistoryList"; -import SetUserPassword from "./SetUserPassword"; - -import { PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS } from "metabase/plugins"; - -export default class UserSettings extends Component { - static propTypes = { - tab: PropTypes.string.isRequired, - user: PropTypes.object.isRequired, - setTab: PropTypes.func.isRequired, - updatePassword: PropTypes.func.isRequired, - }; - - onUpdatePassword(details) { - this.props.updatePassword( - details.user_id, - details.password, - details.old_password, - ); - } - - render() { - const { tab, user, setTab } = this.props; - const showChangePassword = PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.every(f => - f(user), - ); - - return ( - - - - -

    {t`Account settings`}

    -
    - setTab(tab)} - /> -
    - - {tab === "details" ? ( - { - if (locale !== this.props.user.locale) { - window.location.reload(); - } - }} - /> - ) : tab === "password" && showChangePassword ? ( - - ) : tab === "loginHistory" ? ( - - ) : null} - -
    - ); - } -} diff --git a/frontend/src/metabase/user/containers/UserSettingsApp.jsx b/frontend/src/metabase/user/containers/UserSettingsApp.jsx deleted file mode 100644 index 6c90c3dfe35c..000000000000 --- a/frontend/src/metabase/user/containers/UserSettingsApp.jsx +++ /dev/null @@ -1,37 +0,0 @@ -/* eslint "react/prop-types": "warn" */ -import React, { Component } from "react"; -import { connect } from "react-redux"; - -import UserSettings from "../components/UserSettings"; -import { selectors } from "../selectors"; - -import { - setTab, - updatePassword, - updateUser, - validatePassword, -} from "../actions"; - -const mapStateToProps = (state, props) => { - return { - ...selectors(state), - user: state.currentUser, - }; -}; - -const mapDispatchToProps = { - setTab, - updatePassword, - updateUser, - validatePassword, -}; - -@connect( - mapStateToProps, - mapDispatchToProps, -) -export default class UserSettingsApp extends Component { - render() { - return ; - } -} diff --git a/frontend/src/metabase/user/reducers.js b/frontend/src/metabase/user/reducers.js deleted file mode 100644 index 778fc2bf55e8..000000000000 --- a/frontend/src/metabase/user/reducers.js +++ /dev/null @@ -1,26 +0,0 @@ -import { handleActions } from "redux-actions"; - -import { CHANGE_TAB, UPDATE_PASSWORD, UPDATE_USER } from "./actions"; - -export const tab = handleActions( - { - [CHANGE_TAB]: { next: (state, { payload }) => payload }, - }, - "details", -); - -export const updatePasswordResult = handleActions( - { - [CHANGE_TAB]: { next: (state, { payload }) => null }, - [UPDATE_PASSWORD]: { next: (state, { payload }) => payload }, - }, - null, -); - -export const updateUserResult = handleActions( - { - [CHANGE_TAB]: { next: (state, { payload }) => null }, - [UPDATE_USER]: { next: (state, { payload }) => payload }, - }, - null, -); diff --git a/frontend/src/metabase/user/selectors.js b/frontend/src/metabase/user/selectors.js deleted file mode 100644 index 0eb96ba56e77..000000000000 --- a/frontend/src/metabase/user/selectors.js +++ /dev/null @@ -1,15 +0,0 @@ -import { createSelector } from "reselect"; - -// our master selector which combines all of our partial selectors above -export const selectors = createSelector( - [ - state => state.user.tab, - state => state.user.updatePasswordResult, - state => state.user.updateUserResult, - ], - (tab, updatePasswordResult, updateUserResult) => ({ - tab, - updatePasswordResult, - updateUserResult, - }), -); diff --git a/frontend/src/metabase/visualizations/components/ChartCaption.jsx b/frontend/src/metabase/visualizations/components/ChartCaption.jsx new file mode 100644 index 000000000000..26178df87156 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/ChartCaption.jsx @@ -0,0 +1,52 @@ +import React, { useCallback } from "react"; +import PropTypes from "prop-types"; +import { iconPropTypes } from "metabase/components/Icon"; +import { ChartCaptionRoot } from "./ChartCaption.styled"; + +const propTypes = { + series: PropTypes.array.isRequired, + settings: PropTypes.object.isRequired, + icon: PropTypes.shape(iconPropTypes), + actionButtons: PropTypes.node, + onChangeCardAndRun: PropTypes.func, +}; + +const ChartCaption = ({ + series, + settings, + icon, + actionButtons, + onChangeCardAndRun, +}) => { + const title = settings["card.title"] || series[0].card.name; + const description = settings["card.description"]; + const data = series._raw || series; + const card = data[0].card; + const cardIds = new Set(data.map(s => s.card.id)); + const canSelectTitle = cardIds.size === 1 && onChangeCardAndRun; + + const handleSelectTitle = useCallback(() => { + onChangeCardAndRun({ + nextCard: card, + seriesIndex: 0, + }); + }, [card, onChangeCardAndRun]); + + if (!title) { + return null; + } + + return ( + + ); +}; + +ChartCaption.propTypes = propTypes; + +export default ChartCaption; diff --git a/frontend/src/metabase/visualizations/components/ChartCaption.styled.jsx b/frontend/src/metabase/visualizations/components/ChartCaption.styled.jsx new file mode 100644 index 000000000000..1c8252defe0a --- /dev/null +++ b/frontend/src/metabase/visualizations/components/ChartCaption.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; +import LegendCaption from "./legend/LegendCaption"; + +export const ChartCaptionRoot = styled(LegendCaption)` + margin: 0 0.5rem; + flex-shrink: 0; +`; diff --git a/frontend/src/metabase/visualizations/components/ChartSettings.jsx b/frontend/src/metabase/visualizations/components/ChartSettings.jsx index 5881ffc5fa50..0a193f84bf10 100644 --- a/frontend/src/metabase/visualizations/components/ChartSettings.jsx +++ b/frontend/src/metabase/visualizations/components/ChartSettings.jsx @@ -247,6 +247,7 @@ class ChartSettings extends Component { options={sectionNames} optionNameFn={v => v} optionValueFn={v => v} + optionKeyFn={v => v} variant="bubble" /> ); diff --git a/frontend/src/metabase/visualizations/components/LeafletGridHeatMap.jsx b/frontend/src/metabase/visualizations/components/LeafletGridHeatMap.jsx index af180937bf65..bc97ac0680b5 100644 --- a/frontend/src/metabase/visualizations/components/LeafletGridHeatMap.jsx +++ b/frontend/src/metabase/visualizations/components/LeafletGridHeatMap.jsx @@ -3,10 +3,32 @@ import L from "leaflet"; import { t } from "ttag"; import d3 from "d3"; -import { rangeForValue } from "metabase/lib/dataset"; import { color } from "metabase/lib/colors"; +import { rangeForValue } from "metabase/lib/dataset"; +import { isNumeric, isMetric } from "metabase/lib/schema_metadata"; +import { computeNumericDataInverval } from "../lib/numeric"; + +const isValidCoordinatesColumn = column => + column.binning_info || (column.source === "native" && isNumeric(column)); + +const computeValueRange = (value, values) => [ + value, + value + computeNumericDataInverval(values), +]; + +const getValueRange = (value, column, values) => { + const binningBasedResult = rangeForValue(value, column); + return binningBasedResult || computeValueRange(value, values); +}; export default class LeafletGridHeatMap extends LeafletMap { + static isSensible({ cols }) { + return ( + cols.filter(isValidCoordinatesColumn).length >= 2 && + cols.filter(isMetric).length > 0 + ); + } + componentDidMount() { super.componentDidMount(); @@ -22,10 +44,15 @@ export default class LeafletGridHeatMap extends LeafletMap { const { points, min, max } = this.props; const { latitudeColumn, longitudeColumn } = this._getLatLonColumns(); - if (!latitudeColumn.binning_info || !longitudeColumn.binning_info) { + if ( + !isValidCoordinatesColumn(latitudeColumn) || + !isValidCoordinatesColumn(longitudeColumn) + ) { throw new Error(t`Grid map requires binned longitude/latitude.`); } + const { latitudeIndex, longitudeIndex } = this._getLatLonIndexes(); + const colorScale = d3.scale .linear() .domain([min, max]) @@ -34,7 +61,13 @@ export default class LeafletGridHeatMap extends LeafletMap { const gridSquares = gridLayer.getLayers(); const totalSquares = Math.max(points.length, gridSquares.length); + + const latitudeValues = points.map(row => row[latitudeIndex]); + const longitureValues = points.map(row => row[longitudeIndex]); + for (let i = 0; i < totalSquares; i++) { + const [latitude, longiture, metric] = points[i]; + if (i >= points.length) { gridLayer.removeLayer(gridSquares[i]); } @@ -45,9 +78,19 @@ export default class LeafletGridHeatMap extends LeafletMap { } if (i < points.length) { - gridSquares[i].setStyle({ color: colorScale(points[i][2]) }); - const [latMin, latMax] = rangeForValue(points[i][0], latitudeColumn); - const [lonMin, lonMax] = rangeForValue(points[i][1], longitudeColumn); + gridSquares[i].setStyle({ color: colorScale(metric) }); + + const [latMin, latMax] = getValueRange( + latitude, + latitudeColumn, + latitudeValues, + ); + + const [lonMin, lonMax] = getValueRange( + longiture, + longitudeColumn, + longitureValues, + ); gridSquares[i].setBounds([[latMin, lonMin], [latMax, lonMax]]); } } diff --git a/frontend/src/metabase/visualizations/components/LegendHeader.jsx b/frontend/src/metabase/visualizations/components/LegendHeader.jsx index 7965363b3e95..aaf45d2a656a 100644 --- a/frontend/src/metabase/visualizations/components/LegendHeader.jsx +++ b/frontend/src/metabase/visualizations/components/LegendHeader.jsx @@ -4,7 +4,7 @@ import PropTypes from "prop-types"; import styles from "./Legend.css"; import ExplicitSize from "../../components/ExplicitSize"; -import Icon from "metabase/components/Icon"; +import Icon, { iconPropTypes } from "metabase/components/Icon"; import LegendItem from "./LegendItem"; import cx from "classnames"; @@ -26,6 +26,7 @@ export default class LegendHeader extends Component { actionButtons: PropTypes.node, description: PropTypes.string, classNameWidgets: PropTypes.string, + icon: PropTypes.shape(iconPropTypes), }; static defaultProps = { @@ -40,6 +41,7 @@ export default class LegendHeader extends Component { hovered, actionButtons, + icon, onHoverChange, onChangeCardAndRun, settings, @@ -82,6 +84,7 @@ export default class LegendHeader extends Component { + {icon && ( + + + + )} {showDot && (
    this.maxMetricsSupported) { + throw new Error(t`${this.uiName} chart does not support multiple series`); + } + const singleSeriesHasNoRows = ({ data: { cols, rows } }) => rows.length < 1; if (_.every(series, singleSeriesHasNoRows)) { throw new MinRowsError(1, 0); @@ -189,10 +200,13 @@ export default class LineAreaBarChart extends Component { } static propTypes = { + card: PropTypes.object.isRequired, series: PropTypes.array.isRequired, + settings: PropTypes.object.isRequired, actionButtons: PropTypes.node, showTitle: PropTypes.bool, isDashboard: PropTypes.bool, + headerIcon: PropTypes.shape(iconPropTypes), }; static defaultProps = {}; @@ -255,80 +269,158 @@ export default class LineAreaBarChart extends Component { return settings; } - render() { + getLegendSettings() { const { + card, series, - hovered, + settings, showTitle, actionButtons, - onChangeCardAndRun, - onVisualizationClick, - visualizationIsClickable, onAddSeries, onEditSeries, onRemoveSeries, + onChangeCardAndRun, } = this.props; - const settings = this.getSettings(); - - const hasMultiSeriesHeaderSeries = !!( - series.length > 1 || - onAddSeries || - onEditSeries || - onRemoveSeries - ); + const title = settings["card.title"] || card.name; + const description = series["card.description"]; + const rawSeries = series._raw || series; + const cardIds = new Set(rawSeries.map(s => s.card.id)); const hasTitle = showTitle && settings["card.title"]; + const hasBreakout = card._breakoutColumn != null; + const canSelectTitle = cardIds.size === 1 && onChangeCardAndRun; + + const hasMultipleSeries = series.length > 1; + const canChangeSeries = onAddSeries || onEditSeries || onRemoveSeries; + const hasLegendButtons = !hasTitle && actionButtons; + const hasLegend = hasMultipleSeries || canChangeSeries || hasLegendButtons; + + const seriesSettings = + settings.series && series.map(single => settings.series(single)); + const labels = seriesSettings + ? seriesSettings.map(s => s.title) + : series.map(single => single.card.name); + const colors = seriesSettings + ? seriesSettings.map(s => s.color) + : Object.values(normal); + + return { + title, + description, + labels, + colors, + hasTitle, + hasLegend, + hasBreakout, + canSelectTitle, + }; + } - const defaultSeries = [ - { - card: { - name: " ", - }, - }, - ]; + handleSelectTitle = () => { + const { card, onChangeCardAndRun } = this.props; + + if (onChangeCardAndRun) { + onChangeCardAndRun({ + nextCard: card, + seriesIndex: 0, + }); + } + }; + + handleSelectSeries = (event, index) => { + const { + card, + series, + visualizationIsClickable, + onEditSeries, + onVisualizationClick, + onChangeCardAndRun, + } = this.props; + + const single = series[index]; + const hasBreakout = card._breakoutColumn != null; + + if (onEditSeries && !hasBreakout) { + onEditSeries(event, index); + } else if (single.clicked && visualizationIsClickable(single.clicked)) { + onVisualizationClick({ + ...single.clicked, + element: event.currentTarget, + }); + } else if (onChangeCardAndRun) { + onChangeCardAndRun({ + nextCard: single.card, + seriesIndex: index, + }); + } + }; + + render() { + const { + series, + hovered, + headerIcon, + actionButtons, + isFullscreen, + isQueryBuilder, + onHoverChange, + onAddSeries, + onRemoveSeries, + } = this.props; + + const { + title, + description, + labels, + colors, + hasTitle, + hasLegend, + hasBreakout, + canSelectTitle, + } = this.getLegendSettings(); return ( -
    {hasTitle && ( - )} - {hasMultiSeriesHeaderSeries || (!hasTitle && actionButtons) ? ( // always show action buttons if we have them - + - ) : null} - -
    + + ); } } diff --git a/frontend/src/metabase/visualizations/components/LineAreaBarChart.styled.jsx b/frontend/src/metabase/visualizations/components/LineAreaBarChart.styled.jsx new file mode 100644 index 000000000000..ab55ed7a6364 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/LineAreaBarChart.styled.jsx @@ -0,0 +1,15 @@ +import styled from "styled-components"; +import LegendCaption from "./legend/LegendCaption"; + +export const LineAreaBarChartRoot = styled.div` + display: flex; + flex-direction: column; + padding: ${({ isQueryBuilder }) => + isQueryBuilder ? "1rem 1rem 1rem 2rem" : "0.5rem 1rem"}; + overflow: hidden; +`; + +export const ChartLegendCaption = styled(LegendCaption)` + flex: 0 0 auto; + margin-bottom: 0.5rem; +`; diff --git a/frontend/src/metabase/visualizations/components/TableInteractive.jsx b/frontend/src/metabase/visualizations/components/TableInteractive.jsx index 6c9ed57e1fed..8e1ef43aaa7d 100644 --- a/frontend/src/metabase/visualizations/components/TableInteractive.jsx +++ b/frontend/src/metabase/visualizations/components/TableInteractive.jsx @@ -356,7 +356,14 @@ export default class TableInteractive extends Component { isPivoted, series, ) { - const clickedRowData = getTableClickedObjectRowData(series, rowIndex); + const clickedRowData = getTableClickedObjectRowData( + series, + rowIndex, + columnIndex, + isPivoted, + data, + ); + return getTableCellClickedObject( data, settings, @@ -869,7 +876,6 @@ export default class TableInteractive extends Component { _benchmark() { const grid = ReactDOM.findDOMNode(this.grid); const height = grid.scrollHeight; - console.log("height", height); let top = 0; let start = Date.now(); // console.profile(); diff --git a/frontend/src/metabase/visualizations/components/TableSimple.jsx b/frontend/src/metabase/visualizations/components/TableSimple.jsx index 8430c65af114..34c2126278ea 100644 --- a/frontend/src/metabase/visualizations/components/TableSimple.jsx +++ b/frontend/src/metabase/visualizations/components/TableSimple.jsx @@ -208,6 +208,9 @@ export default class TableSimple extends Component { const clickedRowData = getTableClickedObjectRowData( series, rowIndex, + columnIndex, + isPivoted, + data, ); const column = cols[columnIndex]; const clicked = getTableCellClickedObject( diff --git a/frontend/src/metabase/visualizations/components/TitleLegendHeader.jsx b/frontend/src/metabase/visualizations/components/TitleLegendHeader.jsx deleted file mode 100644 index bb7f8edc925c..000000000000 --- a/frontend/src/metabase/visualizations/components/TitleLegendHeader.jsx +++ /dev/null @@ -1,52 +0,0 @@ -/* eslint-disable react/prop-types */ -import React from "react"; -import _ from "underscore"; -import { getIn } from "icepick"; - -import LegendHeader from "./LegendHeader"; - -export default function TitleLegendHeader({ - series, - settings, - onChangeCardAndRun, - actionButtons, -}) { - const originalSeries = series._raw || series; - const cardIds = _.uniq(originalSeries.map(s => s.card.id)); - const isComposedOfMultipleQuestions = cardIds.length > 1; - const name = settings["card.title"] || getIn(series, [0, "card", "name"]); - - if (name) { - const titleHeaderSeries = [ - { - card: { - name, - ...(isComposedOfMultipleQuestions - ? {} - : { - id: cardIds[0], - dataset_query: originalSeries[0].card.dataset_query, - display: originalSeries[0].card.display, - }), - }, - }, - ]; - - return ( - - ); - } else { - // If the title isn't provided in settings, render nothing - return null; - } -} diff --git a/frontend/src/metabase/visualizations/components/Visualization.jsx b/frontend/src/metabase/visualizations/components/Visualization.jsx index 71b0f4bb14c8..ef75b79850d8 100644 --- a/frontend/src/metabase/visualizations/components/Visualization.jsx +++ b/frontend/src/metabase/visualizations/components/Visualization.jsx @@ -1,7 +1,7 @@ import React from "react"; import ExplicitSize from "metabase/components/ExplicitSize"; -import TitleLegendHeader from "metabase/visualizations/components/TitleLegendHeader"; +import ChartCaption from "metabase/visualizations/components/ChartCaption"; import ChartTooltip from "metabase/visualizations/components/ChartTooltip"; import ChartClickActions from "metabase/visualizations/components/ChartClickActions"; import LoadingSpinner from "metabase/components/LoadingSpinner"; @@ -63,6 +63,14 @@ type Props = { isDashboard: boolean, isEditing: boolean, isSettings: boolean, + isQueryBuilder: boolean, + + headerIcon?: { + name: string, + color?: string, + size?: Number, + tooltip?: string, + }, actionButtons: React.Element, @@ -154,6 +162,7 @@ export default class Visualization extends React.PureComponent { isDashboard: false, isEditing: false, isSettings: false, + isQueryBuilder: false, onUpdateVisualizationSettings: () => {}, // prefer passing in a function that doesn't cause the application to reload onChangeLocation: location => { @@ -372,6 +381,7 @@ export default class Visualization extends React.PureComponent { isDashboard, width, height, + headerIcon, errorIcon, isSlow, expectedDuration, @@ -470,7 +480,7 @@ export default class Visualization extends React.PureComponent { ); - let { gridSize, gridUnit, classNameWidgets } = this.props; + let { gridSize, gridUnit } = this.props; if ( !gridSize && gridUnit && @@ -497,25 +507,28 @@ export default class Visualization extends React.PureComponent { const CardVisualization = visualization; + const title = settings["card.title"]; + const hasHeaderContent = title || extra; + const isHeaderEnabled = !(visualization && visualization.noHeader); + + const hasHeader = + (showTitle && + hasHeaderContent && + (loading || error || noResults || isHeaderEnabled)) || + replacementContent; + return (
    - {(showTitle && - (settings["card.title"] || extra) && - (loading || - error || - noResults || - !(visualization && visualization.noHeader))) || - replacementContent ? ( + {!!hasHeader && (
    -
    - ) : null} + )} {replacementContent ? ( replacementContent ) : // on dashboards we should show the "No results!" warning if there are no rows or there's a MinRowsError and actualRows === 0 @@ -586,6 +599,7 @@ export default class Visualization extends React.PureComponent { card={series[0].card} // convenience for single-series visualizations data={series[0].data} // convenience for single-series visualizations hovered={hovered} + headerIcon={hasHeader ? null : headerIcon} onHoverChange={this.handleHoverChange} onVisualizationClick={this.handleVisualizationClick} visualizationIsClickable={this.visualizationIsClickable} diff --git a/frontend/src/metabase/visualizations/components/legend/Legend.jsx b/frontend/src/metabase/visualizations/components/legend/Legend.jsx new file mode 100644 index 000000000000..14b5e1683901 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/Legend.jsx @@ -0,0 +1,117 @@ +import React, { useCallback, useRef, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Popover from "metabase/components/Popover"; +import { + LegendLink, + LegendLinkContainer, + LegendPopoverContainer, + LegendRoot, +} from "./Legend.styled"; +import LegendItem from "./LegendItem"; + +const POPOVER_BORDER = 1; +const POPOVER_PADDING = 8; +const POPOVER_OFFSET = POPOVER_BORDER + POPOVER_PADDING; + +const propTypes = { + className: PropTypes.string, + labels: PropTypes.array.isRequired, + colors: PropTypes.array.isRequired, + hovered: PropTypes.object, + visibleIndex: PropTypes.number, + visibleLength: PropTypes.number, + isVertical: PropTypes.bool, + onHoverChange: PropTypes.func, + onAddSeries: PropTypes.func, + onSelectSeries: PropTypes.func, + onRemoveSeries: PropTypes.func, +}; + +const Legend = ({ + className, + labels, + colors, + hovered, + visibleIndex = 0, + visibleLength = labels.length, + isVertical, + onHoverChange, + onSelectSeries, + onRemoveSeries, +}) => { + const targetRef = useRef(); + const [isOpened, setIsOpened] = useState(null); + const [maxWidth, setMaxWidth] = useState(0); + + const handleOpen = useCallback(() => { + setIsOpened(true); + setMaxWidth(targetRef.current.offsetWidth); + }, []); + + const handleClose = useCallback(() => { + setIsOpened(false); + setMaxWidth(0); + }, []); + + const overflowIndex = visibleIndex + visibleLength; + const visibleLabels = labels.slice(visibleIndex, overflowIndex); + const overflowLength = labels.length - overflowIndex; + + return ( + + {visibleLabels.map((label, index) => { + const itemIndex = index + visibleIndex; + + return ( + + ); + })} + {overflowLength > 0 && ( + + + {t`And ${overflowLength} more`} + + + )} + {isOpened && ( + + + + + + )} + + ); +}; + +Legend.propTypes = propTypes; + +export default Legend; diff --git a/frontend/src/metabase/visualizations/components/legend/Legend.styled.jsx b/frontend/src/metabase/visualizations/components/legend/Legend.styled.jsx new file mode 100644 index 000000000000..149267a179ac --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/Legend.styled.jsx @@ -0,0 +1,26 @@ +import styled from "styled-components"; +import colors, { darken } from "metabase/lib/colors"; + +export const LegendRoot = styled.div` + display: flex; + flex-direction: ${({ isVertical }) => (isVertical ? "column" : "row")}; + overflow: ${({ isVertical }) => (isVertical ? "" : "hidden")}; +`; + +export const LegendLink = styled.div` + cursor: pointer; + color: ${colors["brand"]}; + font-weight: bold; + + &:hover { + color: ${darken(colors["brand"])}; + } +`; + +export const LegendLinkContainer = styled.div` + margin-top: ${({ isVertical }) => (isVertical ? "0.5rem" : "")}; +`; + +export const LegendPopoverContainer = styled.div` + padding: 0.5rem; +`; diff --git a/frontend/src/metabase/visualizations/components/legend/LegendActions.jsx b/frontend/src/metabase/visualizations/components/legend/LegendActions.jsx new file mode 100644 index 000000000000..882b9ede63a0 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendActions.jsx @@ -0,0 +1,15 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { LegendActionsRoot } from "metabase/visualizations/components/legend/LegendActions.styled"; + +const propTypes = { + children: PropTypes.node, +}; + +const LegendActions = ({ children }) => { + return {children}; +}; + +LegendActions.propTypes = propTypes; + +export default LegendActions; diff --git a/frontend/src/metabase/visualizations/components/legend/LegendActions.styled.jsx b/frontend/src/metabase/visualizations/components/legend/LegendActions.styled.jsx new file mode 100644 index 000000000000..67de69df8b48 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendActions.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; + +export const LegendActionsRoot = styled.div` + flex: 0 0 auto; + position: relative; + margin-left: auto; +`; diff --git a/frontend/src/metabase/visualizations/components/legend/LegendCaption.jsx b/frontend/src/metabase/visualizations/components/legend/LegendCaption.jsx new file mode 100644 index 000000000000..215f247290f1 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendCaption.jsx @@ -0,0 +1,52 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { iconPropTypes } from "metabase/components/Icon"; +import Tooltip from "metabase/components/Tooltip"; +import Ellipsified from "metabase/components/Ellipsified"; +import LegendActions from "./LegendActions"; +import { + LegendCaptionRoot, + LegendDescriptionIcon, + LegendLabel, + LegendLabelIcon, +} from "./LegendCaption.styled"; + +const propTypes = { + className: PropTypes.string, + title: PropTypes.string, + description: PropTypes.string, + icon: PropTypes.shape(iconPropTypes), + actionButtons: PropTypes.node, + onSelectTitle: PropTypes.func, +}; + +const LegendCaption = ({ + className, + title, + description, + icon, + actionButtons, + onSelectTitle, +}) => { + return ( + + {icon && } + + {title} + + {description && ( + + + + )} + {actionButtons && {actionButtons}} + + ); +}; + +LegendCaption.propTypes = propTypes; + +export default LegendCaption; diff --git a/frontend/src/metabase/visualizations/components/legend/LegendCaption.styled.jsx b/frontend/src/metabase/visualizations/components/legend/LegendCaption.styled.jsx new file mode 100644 index 000000000000..e29909ae0a3f --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendCaption.styled.jsx @@ -0,0 +1,30 @@ +import styled from "styled-components"; +import colors from "metabase/lib/colors"; +import Icon from "metabase/components/Icon"; + +export const LegendCaptionRoot = styled.div` + display: flex; + align-items: center; + min-width: 0; +`; + +export const LegendLabel = styled.div` + color: ${colors["text-dark"]}; + font-weight: bold; + cursor: ${({ onClick }) => (onClick ? "pointer" : "")}; + + &:hover { + color: ${({ onClick }) => (onClick ? colors["brand"] : "")}; + } +`; + +export const LegendLabelIcon = styled(Icon)` + padding-right: 0.25rem; +`; + +export const LegendDescriptionIcon = styled(Icon).attrs({ + name: "info", +})` + color: ${colors["text-medium"]}; + margin-left: 0.5rem; +`; diff --git a/frontend/src/metabase/visualizations/components/legend/LegendItem.jsx b/frontend/src/metabase/visualizations/components/legend/LegendItem.jsx new file mode 100644 index 000000000000..d88b4f644514 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendItem.jsx @@ -0,0 +1,69 @@ +import React, { memo } from "react"; +import PropTypes from "prop-types"; +import { + LegendItemDot, + LegendItemLabel, + LegendItemRemoveIcon, + LegendItemRoot, + LegendItemTitle, +} from "./LegendItem.styled"; +import Ellipsified from "metabase/components/Ellipsified"; + +const propTypes = { + label: PropTypes.string, + index: PropTypes.number, + color: PropTypes.string, + isMuted: PropTypes.bool, + isVertical: PropTypes.bool, + onHoverChange: PropTypes.func, + onSelectSeries: PropTypes.func, + onRemoveSeries: PropTypes.func, +}; + +const LegendItem = ({ + label, + index, + color, + isMuted, + isVertical, + onHoverChange, + onSelectSeries, + onRemoveSeries, +}) => { + const handleItemClick = event => { + onSelectSeries && onSelectSeries(event, index); + }; + + const handleItemMouseEnter = event => { + onHoverChange && onHoverChange({ index, element: event.currentTarget }); + }; + + const handleItemMouseLeave = () => { + onHoverChange && onHoverChange(); + }; + + const handleRemoveClick = event => { + onRemoveSeries && onRemoveSeries(event, index); + }; + + return ( + + + + + {label} + + + {onRemoveSeries && } + + ); +}; + +LegendItem.propTypes = propTypes; + +export default memo(LegendItem); diff --git a/frontend/src/metabase/visualizations/components/legend/LegendItem.styled.jsx b/frontend/src/metabase/visualizations/components/legend/LegendItem.styled.jsx new file mode 100644 index 000000000000..a15c9561bc2e --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendItem.styled.jsx @@ -0,0 +1,56 @@ +import styled from "styled-components"; +import colors from "metabase/lib/colors"; +import Icon from "metabase/components/Icon"; + +export const LegendItemRoot = styled.div` + display: flex; + align-items: center; + min-width: 0; + overflow: hidden; + + &:not(:first-child) { + margin-top: ${({ isVertical }) => (isVertical ? "0.5rem" : "")}; + margin-left: ${({ isVertical }) => (isVertical ? "" : "0.75rem")}; + } +`; + +export const LegendItemLabel = styled.div` + display: flex; + align-items: baseline; + opacity: ${({ isMuted }) => (isMuted ? "0.4" : "1")}; + cursor: ${({ onClick }) => (onClick ? "pointer" : "")}; + overflow: hidden; + transition: opacity 0.25s linear; + + &:hover { + color: ${({ onMouseEnter }) => (onMouseEnter ? colors["brand"] : "")}; + } +`; + +export const LegendItemDot = styled.div` + flex: 0 0 auto; + width: 0.75rem; + height: 0.75rem; + border-radius: 50%; + background-color: ${({ color }) => color}; +`; + +export const LegendItemTitle = styled.div` + color: ${colors["text-dark"]}; + font-weight: bold; + margin-left: 0.5rem; + overflow: hidden; +`; + +export const LegendItemRemoveIcon = styled(Icon).attrs({ + name: "close", + size: 12, +})` + color: ${colors["text-light"]}; + cursor: pointer; + margin-left: 0.5rem; + + &:hover { + color: ${colors["text-medium"]}; + } +`; diff --git a/frontend/src/metabase/visualizations/components/legend/LegendLayout.jsx b/frontend/src/metabase/visualizations/components/legend/LegendLayout.jsx new file mode 100644 index 000000000000..b04c60f2a9d9 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendLayout.jsx @@ -0,0 +1,100 @@ +import React from "react"; +import PropTypes from "prop-types"; +import _ from "underscore"; +import ExplicitSize from "metabase/components/ExplicitSize"; +import Legend from "./Legend"; +import LegendActions from "./LegendActions"; +import { + ChartContainer, + LegendContainer, + LegendLayoutRoot, + MainContainer, +} from "./LegendLayout.styled"; + +const MIN_ITEM_WIDTH = 100; +const MIN_ITEM_HEIGHT = 25; +const MIN_ITEM_HEIGHT_LARGE = 31; +const MIN_LEGEND_WIDTH = 400; + +const propTypes = { + className: PropTypes.string, + labels: PropTypes.array.isRequired, + colors: PropTypes.array.isRequired, + hovered: PropTypes.object, + width: PropTypes.number, + height: PropTypes.number, + hasLegend: PropTypes.bool, + actionButtons: PropTypes.node, + isFullscreen: PropTypes.bool, + isQueryBuilder: PropTypes.bool, + children: PropTypes.node, + onHoverChange: PropTypes.func, + onAddSeries: PropTypes.func, + onSelectSeries: PropTypes.func, + onRemoveSeries: PropTypes.func, +}; + +const LegendLayout = ({ + className, + labels, + colors, + hovered, + width = 0, + height = 0, + hasLegend, + actionButtons, + isFullscreen, + isQueryBuilder, + children, + onHoverChange, + onAddSeries, + onSelectSeries, + onRemoveSeries, +}) => { + const itemHeight = !isFullscreen ? MIN_ITEM_HEIGHT : MIN_ITEM_HEIGHT_LARGE; + const maxXItems = Math.floor(width / MIN_ITEM_WIDTH); + const maxYItems = Math.floor(height / itemHeight); + const maxYLabels = Math.max(maxYItems - 1, 0); + const minYLabels = labels.length > maxYItems ? maxYLabels : labels.length; + + const isNarrow = width < MIN_LEGEND_WIDTH; + const isVertical = maxXItems < labels.length; + const isVisible = hasLegend && !(isVertical && isNarrow); + const visibleLength = isVertical ? minYLabels : labels.length; + + return ( + + {isVisible && ( + + + {!isVertical && actionButtons && ( + {actionButtons} + )} + + )} + + {isVertical && actionButtons && ( + {actionButtons} + )} + {children} + + + ); +}; + +LegendLayout.propTypes = propTypes; + +export default _.compose(ExplicitSize())(LegendLayout); diff --git a/frontend/src/metabase/visualizations/components/legend/LegendLayout.styled.jsx b/frontend/src/metabase/visualizations/components/legend/LegendLayout.styled.jsx new file mode 100644 index 000000000000..a902602707e3 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/legend/LegendLayout.styled.jsx @@ -0,0 +1,30 @@ +import styled from "styled-components"; + +export const LegendLayoutRoot = styled.div` + display: flex; + flex: 1 1 auto; + flex-direction: ${({ isVertical }) => (isVertical ? "row" : "column")}; + min-width: 0; + min-height: 0; +`; + +export const MainContainer = styled.div` + display: flex; + flex: 1 0 auto; + flex-direction: column; +`; + +export const LegendContainer = styled.div` + display: ${({ isVertical }) => (isVertical ? "block" : "flex")}; + max-width: ${({ isVertical }) => (isVertical ? "25%" : "")}; + max-width: ${({ isVertical }) => (isVertical ? "min(25%, 20rem)" : "")}; + margin-right: ${({ isVertical, isQueryBuilder }) => + isVertical ? (isQueryBuilder ? "2.5rem" : "0.5rem") : ""}; + margin-bottom: ${({ isVertical }) => (isVertical ? "" : "0.5rem")}; +`; + +export const ChartContainer = styled.div` + display: flex; + flex: 1 0 auto; + flex-direction: column; +`; diff --git a/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingColumns.jsx b/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingColumns.jsx index e40557cd48c5..494cc71bd521 100644 --- a/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingColumns.jsx +++ b/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingColumns.jsx @@ -56,6 +56,27 @@ class ColumnWidgets extends React.Component { } } + componentDidUpdate(prevProps) { + const { + setSidebarPropsOverride, + object, + onEndShowWidget, + currentSectionHasColumnSettings, + } = this.props; + + if ( + displayNameForColumn(object) !== displayNameForColumn(prevProps.object) || + onEndShowWidget !== prevProps.onEndShowWidget + ) { + if (setSidebarPropsOverride && !currentSectionHasColumnSettings) { + setSidebarPropsOverride({ + title: displayNameForColumn(object), + onBack: onEndShowWidget, + }); + } + } + } + componentWillUnmount() { const { setSidebarPropsOverride } = this.props; if (setSidebarPropsOverride) { diff --git a/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingSeries.jsx b/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingSeries.jsx index b25478038077..92231c63f938 100644 --- a/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingSeries.jsx +++ b/frontend/src/metabase/visualizations/components/settings/ChartNestedSettingSeries.jsx @@ -1,7 +1,7 @@ import React from "react"; import ColorPicker from "metabase/components/ColorPicker"; -import ButtonGroup from "metabase/components/ButtonGroup"; +import { SegmentedControl } from "metabase/components/SegmentedControl"; import Icon from "metabase/components/Icon"; import IconWrapper from "metabase/components/IconWrapper"; @@ -59,15 +59,18 @@ export default class ChartNestedSettingSeries extends React.Component { } /> {isLineAreaBar && !isStacked ? ( - o} - optionNameFn={o => } + options={[ + { value: "line", icon: "line" }, + { value: "area", icon: "area" }, + { value: "bar", icon: "bar" }, + ]} onChange={value => onChangeObjectSettings(single, { display: value }) } + fullWidth /> ) : null} {objects.length > 1 ? ( diff --git a/frontend/src/metabase/visualizations/components/settings/ChartSettingButtonGroup.jsx b/frontend/src/metabase/visualizations/components/settings/ChartSettingButtonGroup.jsx deleted file mode 100644 index e531f342738b..000000000000 --- a/frontend/src/metabase/visualizations/components/settings/ChartSettingButtonGroup.jsx +++ /dev/null @@ -1,17 +0,0 @@ -/* eslint-disable react/prop-types */ -import React from "react"; - -import Icon from "metabase/components/Icon"; -import ButtonGroup from "metabase/components/ButtonGroup"; - -const ChartSettingButtonGroup = ({ value, onChange, options, ...props }) => ( - (o.icon ? : o.name)} - /> -); - -export default ChartSettingButtonGroup; diff --git a/frontend/src/metabase/visualizations/components/settings/ChartSettingFieldPicker.jsx b/frontend/src/metabase/visualizations/components/settings/ChartSettingFieldPicker.jsx index fb108c00b37a..63ee49f82b55 100644 --- a/frontend/src/metabase/visualizations/components/settings/ChartSettingFieldPicker.jsx +++ b/frontend/src/metabase/visualizations/components/settings/ChartSettingFieldPicker.jsx @@ -54,6 +54,7 @@ const ChartSettingFieldPicker = ({ /> )} { + return ( + this.state.objectKeyOverride || + this.props.initialKey || + (this.props.objects.length === 1 + ? getObjectKey(this.props.objects[0]) + : null) + ); + }; handleChangeEditingObject = (editingObject: ?NestedObject) => { + // objectKeyOverride allows child components to set the editing object key to a different value than is derived + // from the props. For example, this is used by the "More options" button in ChartNestedSettingSeries. this.setState({ - editingObjectKey: editingObject ? getObjectKey(editingObject) : null, + objectKeyOverride: editingObject ? getObjectKey(editingObject) : null, }); // special prop to notify ChartSettings it should unswap replaced widget if (!editingObject && this.props.onEndShowWidget) { @@ -94,7 +90,7 @@ const chartSettingNestedSettings = ({ }; handleChangeSettingsForEditingObject = (newSettings: Settings) => { - const { editingObjectKey } = this.state; + const editingObjectKey = this.getEditingObjectKey(); if (editingObjectKey) { this.handleChangeSettingsForObjectKey(editingObjectKey, newSettings); } @@ -126,8 +122,7 @@ const chartSettingNestedSettings = ({ render() { const { series, objects, extra } = this.props; - const { editingObjectKey } = this.state; - + const editingObjectKey = this.getEditingObjectKey(); if (editingObjectKey) { const editingObject = _.find( objects, diff --git a/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx b/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx index 13a0243a358c..e2c6c337cd0c 100644 --- a/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx +++ b/frontend/src/metabase/visualizations/components/settings/ChartSettingOrderedColumns.jsx @@ -1,16 +1,17 @@ /* eslint-disable react/prop-types */ import React, { Component } from "react"; import { t } from "ttag"; +import _ from "underscore"; -import ColumnItem from "./ColumnItem"; - -import { SortableContainer, SortableElement } from "react-sortable-hoc"; - +import { + SortableContainer, + SortableElement, +} from "metabase/components/sortable"; import StructuredQuery from "metabase-lib/lib/queries/StructuredQuery"; import { keyForColumn, findColumnForColumnSetting } from "metabase/lib/dataset"; import { getFriendlyName } from "metabase/visualizations/lib/utils"; -import _ from "underscore"; +import ColumnItem from "./ColumnItem"; const SortableColumn = SortableElement( ({ columnSetting, getColumnName, onEdit, onRemove }) => ( @@ -124,7 +125,6 @@ export default class ChartSettingOrderedColumns extends Component { onRemove={this.handleDisable} onSortEnd={this.handleSortEnd} distance={5} - helperClass="z5" /> ) : (
    diff --git a/frontend/src/metabase/visualizations/components/settings/ChartSettingSegmentedControl.jsx b/frontend/src/metabase/visualizations/components/settings/ChartSettingSegmentedControl.jsx new file mode 100644 index 000000000000..a0edb332b921 --- /dev/null +++ b/frontend/src/metabase/visualizations/components/settings/ChartSettingSegmentedControl.jsx @@ -0,0 +1,8 @@ +import React from "react"; +import { SegmentedControl } from "metabase/components/SegmentedControl"; + +function ChartSettingSegmentedControl(props) { + return ; +} + +export default ChartSettingSegmentedControl; diff --git a/frontend/src/metabase/visualizations/components/settings/ChartSettingsTableFormatting.jsx b/frontend/src/metabase/visualizations/components/settings/ChartSettingsTableFormatting.jsx index 91a96d23ee51..998a88d0fc10 100644 --- a/frontend/src/metabase/visualizations/components/settings/ChartSettingsTableFormatting.jsx +++ b/frontend/src/metabase/visualizations/components/settings/ChartSettingsTableFormatting.jsx @@ -14,8 +14,10 @@ import ColorRangePicker, { ColorRangePreview, } from "metabase/components/ColorRangePicker"; import NumericInput from "metabase/components/NumericInput"; - -import { SortableContainer, SortableElement } from "react-sortable-hoc"; +import { + SortableContainer, + SortableElement, +} from "metabase/components/sortable"; import MetabaseAnalytics from "metabase/lib/analytics"; import { isNumeric, isString } from "metabase/lib/schema_metadata"; @@ -215,7 +217,6 @@ const RuleListing = ({ rules, cols, onEdit, onAdd, onRemove, onMove }) => ( onRemove={onRemove} onSortEnd={({ oldIndex, newIndex }) => onMove(oldIndex, newIndex)} distance={10} - helperClass="z5" />
    ) : null} diff --git a/frontend/src/metabase/visualizations/index.js b/frontend/src/metabase/visualizations/index.js index adc43014b0b2..5bd19baeba4f 100644 --- a/frontend/src/metabase/visualizations/index.js +++ b/frontend/src/metabase/visualizations/index.js @@ -97,6 +97,11 @@ export const extractRemappings = series => { return se; }; +export function getMaxMetricsSupported(display) { + const visualization = visualizations.get(display); + return visualization.maxMetricsSupported || Infinity; +} + // removes columns with `remapped_from` property and adds a `remapping` to the appropriate column const extractRemappedColumns = data => { const cols = data.cols.map(col => ({ diff --git a/frontend/src/metabase/visualizations/lib/LineAreaBarPostRender.js b/frontend/src/metabase/visualizations/lib/LineAreaBarPostRender.js index e521ad4f8602..8842a23feb69 100644 --- a/frontend/src/metabase/visualizations/lib/LineAreaBarPostRender.js +++ b/frontend/src/metabase/visualizations/lib/LineAreaBarPostRender.js @@ -368,6 +368,22 @@ function onRenderAddExtraClickHandlers(chart) { } } +function onRenderSetZeroGridLineClassName(chart) { + const yAxis = chart.y(); + if (!yAxis) { + return; + } + + const yZero = yAxis(0).toString(); + chart + .select(".grid-line.horizontal") + .selectAll("line") + .filter(function() { + return d3.select(this).attr("y1") === yZero; + }) + .attr("class", "zero"); +} + // the various steps that get called function onRender( chart, @@ -398,6 +414,7 @@ function onRender( onRenderSetClassName(chart, isStacked); onRenderRotateAxis(chart); onRenderAddExtraClickHandlers(chart); + onRenderSetZeroGridLineClassName(chart); } // +-------------------------------------------------------------------------------------------------------------------+ diff --git a/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js b/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js index d8daf54356ce..c15ed4e43a2f 100644 --- a/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js +++ b/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js @@ -67,6 +67,7 @@ import { import { lineAddons } from "./graph/addons"; import { initBrush } from "./graph/brush"; +import { stack, stackOffsetDiverging } from "./graph/stack"; import type { VisualizationProps } from "metabase-types/types/Visualization"; @@ -193,11 +194,17 @@ function getDimensionsAndGroupsAndUpdateSeriesDisplayNamesForStackedChart( const dimension = dataset.dimension(d => d[0]); const groups = [ - datas.map((data, seriesIndex) => - reduceGroup(dimension.group(), seriesIndex + 1, () => - warn(unaggregatedDataWarning(props.series[seriesIndex].data.cols[0])), - ), - ), + datas.map((data, seriesIndex) => { + // HACK: waterfall chart is a stacked bar chart that supports only one series + // and the groups number does not match the series number due to the implementation + const realSeriesIndex = props.chartType === "waterfall" ? 0 : seriesIndex; + + return reduceGroup(dimension.group(), seriesIndex + 1, () => + warn( + unaggregatedDataWarning(props.series[realSeriesIndex].data.cols[0]), + ), + ); + }), ]; return { dimension, groups }; @@ -433,6 +440,11 @@ function applyChartLineBarSettings( forceCenterBar || settings["graph.x_axis.scale"] !== "ordinal", ); } + + // AREA/BAR: + if (settings["stackable.stack_type"] === "stacked") { + chart.stackLayout(stack().offset(stackOffsetDiverging)); + } } // TODO - give this a good name when I figure out what it does @@ -523,12 +535,12 @@ function getCharts( const { settings, chartType, series, onChangeCardAndRun } = props; const { yAxisSplit } = yAxisProps; - const isHeterogenous = - _.uniq(series.map(single => getSeriesDisplay(settings, single))).length > 1; - const isHeterogenousOrdinal = - settings["graph.x_axis.scale"] === "ordinal" && isHeterogenous; + const displays = _.uniq(series.map(s => getSeriesDisplay(settings, s))); + const isMixedBar = displays.includes("bar") && displays.length > 1; + const isOrdinal = settings["graph.x_axis.scale"] === "ordinal"; + const isMixedOrdinalBar = isMixedBar && isOrdinal; - if (isHeterogenousOrdinal) { + if (isMixedOrdinalBar) { // HACK: ordinal + mix of line and bar results in uncentered points, shift by // half the width parent.on("renderlet.shift", () => { @@ -589,7 +601,7 @@ function getCharts( settings, seriesChartType, seriesSettings, - isHeterogenousOrdinal, + isMixedOrdinalBar, ); return chart; diff --git a/frontend/src/metabase/visualizations/lib/RowRenderer.js b/frontend/src/metabase/visualizations/lib/RowRenderer.js index 0eb8db927106..e47fbeabbd53 100644 --- a/frontend/src/metabase/visualizations/lib/RowRenderer.js +++ b/frontend/src/metabase/visualizations/lib/RowRenderer.js @@ -3,6 +3,7 @@ import crossfilter from "crossfilter"; import d3 from "d3"; import dc from "dc"; +import { t } from "ttag"; import { formatValue } from "metabase/lib/formatting"; @@ -26,10 +27,6 @@ export default function rowRenderer( ): DeregisterFunction { const { cols } = series[0].data; - if (series.length > 1) { - throw new Error("Row chart does not support multiple series"); - } - const chart = dc.rowChart(element); // disable clicks @@ -117,7 +114,15 @@ export default function rowRenderer( .elasticX(true) .dimension(dimension) .group(group) - .ordering(d => d.index); + .ordering(d => d.index) + .othersLabel(t`Others`); + + chart.xAxis().tickFormat(value => { + return formatValue(value, { + ...settings.column(cols[1]), + type: "axis", + }); + }); const labelPadHorizontal = 5; let labelsOutside = false; diff --git a/frontend/src/metabase/visualizations/lib/apply_axis.js b/frontend/src/metabase/visualizations/lib/apply_axis.js index db8f5d241709..a8f27055f012 100644 --- a/frontend/src/metabase/visualizations/lib/apply_axis.js +++ b/frontend/src/metabase/visualizations/lib/apply_axis.js @@ -490,7 +490,7 @@ export function getYValueFormatter(chart, series, yExtent) { return (value, options, seriesIndex = 0) => { const metricColumn = series[seriesIndex].data.cols[1]; const columnSettings = chart.settings.column(metricColumn); - const columnExtent = options.extent || yExtent; + const columnExtent = options.extent ?? yExtent; const roundedValue = maybeRoundValueToZero(value, columnExtent); return formatValue(roundedValue, { ...columnSettings, ...options }); }; diff --git a/frontend/src/metabase/visualizations/lib/chart_values.js b/frontend/src/metabase/visualizations/lib/chart_values.js index 179446a7da3e..9ce5516a8d7a 100644 --- a/frontend/src/metabase/visualizations/lib/chart_values.js +++ b/frontend/src/metabase/visualizations/lib/chart_values.js @@ -22,12 +22,16 @@ export function onRenderValueLabels( chart.settings.series(chart.series[seriesIndex]), ); - // See if each series is enabled. Fall back to the chart-level setting if undefined. + // See if each series is enabled, fall back to the chart-level setting if undefined. + // Scatter charts should not have labels, the setting could be enabled when switching between chart types. let showSeries = seriesSettings.map( - ({ show_series_values = chart.settings["graph.show_values"] }) => - show_series_values, + ({ display, show_series_values = chart.settings["graph.show_values"] }) => + show_series_values && !isScatter(display), ); + let displays = seriesSettings.map(settings => settings.display); + const isStacked = chart.settings["stackable.stack_type"] === "stacked"; + if ( showSeries.every(s => s === false) || // every series setting is off chart.settings["stackable.stack_type"] === "normalized" // chart is normalized @@ -35,8 +39,7 @@ export function onRenderValueLabels( return; } - let displays = seriesSettings.map(settings => settings.display); - if (chart.settings["stackable.stack_type"] === "stacked") { + if (isStacked) { // When stacked, flatten datas into one series. We'll sum values on the same x point later. datas = [datas.flat()]; @@ -50,6 +53,10 @@ export function onRenderValueLabels( return display === "bar" || display === "waterfall"; } + function isScatter(display) { + return display === "scatter"; + } + let barWidth; const barCount = displays.filter(isBarLike).length; if (barCount > 0) { @@ -76,23 +83,37 @@ export function onRenderValueLabels( const display = displays[seriesIndex]; // Sum duplicate x values in the same series. + // Positive and negative values are stacked separately, unless it is a waterfall chart data = _.chain(data) .groupBy(([x]) => xScale(x)) .values() .map(data => { const [[x]] = data; - const y = data.reduce((sum, [, y]) => sum + y, 0); - return [x, y]; + const yp = data + .filter(([, y]) => y >= 0) + .reduce((sum, [, y]) => sum + y, 0); + const yn = data + .filter(([, y]) => y < 0) + .reduce((sum, [, y]) => sum + y, 0); + + if (!isStacked) { + return [[x, yp + yn, 1]]; + } else if (yp !== yn) { + return [[x, yp, 2], [x, yn, 2]]; + } else { + return [[x, yp, 1]]; + } }) + .flatten(1) .value(); data = data - .map(([x, y], i) => { + .map(([x, y, step], i) => { const isLocalMin = // first point or prior is greater than y - (i === 0 || data[i - 1][1] > y) && + (i < step || data[i - step][1] > y) && // last point point or next is greater than y - (i === data.length - 1 || data[i + 1][1] > y); + (i >= data.length - step || data[i + step][1] > y); const showLabelBelow = isLocalMin && display === "line"; const rotated = barCount > 1 && isBarLike(display) && barWidth < 40; const hidden = diff --git a/frontend/src/metabase/visualizations/lib/graph/stack.js b/frontend/src/metabase/visualizations/lib/graph/stack.js new file mode 100644 index 000000000000..020c2b22abe4 --- /dev/null +++ b/frontend/src/metabase/visualizations/lib/graph/stack.js @@ -0,0 +1,154 @@ +import d3 from "d3"; + +// d3.layout.stack applies offsets only to the first value within a group +// this wrapper does that to each value to stack positive and negative series separately + +export function stack() { + const inner = d3.layout.stack(); + + let values = inner.values(); + let order = inner.order(); + let x = inner.x(); + let y = inner.y(); + let out = inner.out(); + let offset = stackOffsetZero; + + function stack(data, index) { + const n = data.length; + + if (!n) { + return data; + } + + // convert series to canonical two-dimensional representation + let series = data.map(function(d, i) { + return values.call(stack, d, i); + }); + + // convert each series to canonical [[x,y]] representation + let points = series.map(function(d) { + return d.map(function(v, i) { + return [x.call(stack, v, i), y.call(stack, v, i)]; + }); + }); + + // compute the order of series, and permute them + const orders = order.call(stack, points, index); + series = d3.permute(series, orders); + points = d3.permute(points, orders); + + // compute the baseline + const offsets = offset.call(stack, points, index); + + // propagate it to other series + const m = series[0].length; + for (let j = 0; j < m; j++) { + for (let i = 0; i < n; i++) { + out.call(stack, series[i][j], offsets[i][j], points[i][j][1]); + } + } + + return data; + } + + stack.values = function(x) { + if (!arguments.length) { + return values; + } + + values = x; + return stack; + }; + + stack.order = function(x) { + if (!arguments.length) { + return order; + } + + order = x; + return stack; + }; + + stack.offset = function(x) { + if (!arguments.length) { + return offset; + } + + offset = x; + return stack; + }; + + stack.x = function(z) { + if (!arguments.length) { + return x; + } + + x = z; + return stack; + }; + + stack.y = function(z) { + if (!arguments.length) { + return y; + } + + y = z; + return stack; + }; + + stack.out = function(z) { + if (!arguments.length) { + return out; + } + + out = z; + return stack; + }; + + return stack; +} + +// series are stacked on top of each other, starting from zero +export function stackOffsetZero(data) { + const n = data.length; + const m = data[0].length; + const y0 = []; + + for (let i = 0; i < n; i++) { + y0[i] = []; + } + + for (let j = 0; j < m; j++) { + for (let i = 0, d = 0; i < n; i++) { + y0[i][j] = d; + d += data[i][j][1]; + } + } + + return y0; +} + +// series are stacked with separate tracks for positive and negative values +export function stackOffsetDiverging(data) { + const n = data.length; + const m = data[0].length; + const y0 = []; + + for (let i = 0; i < n; i++) { + y0[i] = []; + } + + for (let j = 0; j < m; j++) { + for (let i = 0, dp = 0, dn = 0; i < n; i++) { + if (data[i][j][1] >= 0) { + y0[i][j] = dp; + dp += data[i][j][1]; + } else { + y0[i][j] = dn; + dn += data[i][j][1]; + } + } + } + + return y0; +} diff --git a/frontend/src/metabase/visualizations/lib/graph/stack.unit.spec.js b/frontend/src/metabase/visualizations/lib/graph/stack.unit.spec.js new file mode 100644 index 000000000000..1f6f94fb087d --- /dev/null +++ b/frontend/src/metabase/visualizations/lib/graph/stack.unit.spec.js @@ -0,0 +1,29 @@ +import { stack, stackOffsetDiverging } from "./stack"; + +describe("stack", () => { + const data = [ + [{ x: 1, y: 100 }, { x: 2, y: 100 }], + [{ x: 1, y: 200 }, { x: 2, y: -200 }], + [{ x: 1, y: 300 }, { x: 2, y: 300 }], + ]; + + it("should stack series by default", () => { + stack()(data); + + expect(data).toEqual([ + [{ x: 1, y: 100, y0: 0 }, { x: 2, y: 100, y0: 0 }], + [{ x: 1, y: 200, y0: 100 }, { x: 2, y: -200, y0: 100 }], + [{ x: 1, y: 300, y0: 300 }, { x: 2, y: 300, y0: -100 }], + ]); + }); + + it("should stack series with separate positive and negative tracks", () => { + stack().offset(stackOffsetDiverging)(data); + + expect(data).toEqual([ + [{ x: 1, y: 100, y0: 0 }, { x: 2, y: 100, y0: 0 }], + [{ x: 1, y: 200, y0: 100 }, { x: 2, y: -200, y0: 0 }], + [{ x: 1, y: 300, y0: 300 }, { x: 2, y: 300, y0: 100 }], + ]); + }); +}); diff --git a/frontend/src/metabase/visualizations/lib/settings.js b/frontend/src/metabase/visualizations/lib/settings.js index c822b829e4e7..d2217084fd9e 100644 --- a/frontend/src/metabase/visualizations/lib/settings.js +++ b/frontend/src/metabase/visualizations/lib/settings.js @@ -8,7 +8,7 @@ import ChartSettingInputNumeric from "metabase/visualizations/components/setting import ChartSettingRadio from "metabase/visualizations/components/settings/ChartSettingRadio"; import ChartSettingSelect from "metabase/visualizations/components/settings/ChartSettingSelect"; import ChartSettingToggle from "metabase/visualizations/components/settings/ChartSettingToggle"; -import ChartSettingButtonGroup from "metabase/visualizations/components/settings/ChartSettingButtonGroup"; +import ChartSettingSegmentedControl from "metabase/visualizations/components/settings/ChartSettingSegmentedControl"; import ChartSettingFieldPicker from "metabase/visualizations/components/settings/ChartSettingFieldPicker"; import ChartSettingFieldsPicker from "metabase/visualizations/components/settings/ChartSettingFieldsPicker"; import ChartSettingFieldsPartition from "metabase/visualizations/components/settings/ChartSettingFieldsPartition"; @@ -70,7 +70,7 @@ const WIDGETS = { radio: ChartSettingRadio, select: ChartSettingSelect, toggle: ChartSettingToggle, - buttonGroup: ChartSettingButtonGroup, + segmentedControl: ChartSettingSegmentedControl, field: ChartSettingFieldPicker, fields: ChartSettingFieldsPicker, fieldsPartition: ChartSettingFieldsPartition, diff --git a/frontend/src/metabase/visualizations/lib/settings/column.js b/frontend/src/metabase/visualizations/lib/settings/column.js index 0eec8ade0015..35bbfb6dc740 100644 --- a/frontend/src/metabase/visualizations/lib/settings/column.js +++ b/frontend/src/metabase/visualizations/lib/settings/column.js @@ -11,6 +11,7 @@ import { isNumber, isCoordinate, isCurrency, + isDateWithoutTime, } from "metabase/lib/schema_metadata"; // HACK: cyclical dependency causing errors in unit tests @@ -29,7 +30,7 @@ import { hasHour, } from "metabase/lib/formatting/date"; -import currency from "metabase/lib/currency"; +import { currency } from "cljs/metabase.shared.util.currency"; import type { Settings, SettingDef } from "../settings"; import type { DateStyle, TimeStyle } from "metabase/lib/formatting/date"; @@ -70,20 +71,17 @@ export function columnSettings({ } import MetabaseSettings from "metabase/lib/settings"; -import { isa } from "metabase/lib/types"; export function getGlobalSettingsForColumn(column: Column) { - const settings = {}; + const columnSettings = {}; + const customFormatting = MetabaseSettings.get("custom-formatting") || {}; - const customFormatting = MetabaseSettings.get("custom-formatting"); // NOTE: the order of these doesn't matter as long as there's no overlap between settings - for (const [type, globalSettings] of Object.entries(customFormatting || {})) { - if (isa(column.semantic_type || column.base_type, type)) { - Object.assign(settings, globalSettings); - } + for (const [, globalSettings] of Object.entries(customFormatting)) { + Object.assign(columnSettings, globalSettings); } - return settings; + return columnSettings; } function getLocalSettingsForColumn(column: Column): Settings { @@ -255,7 +253,8 @@ export const DATE_COLUMN_SETTINGS = { } return { options }; }, - getHidden: ({ unit }: Column, settings: ColumnSettings) => !hasHour(unit), + getHidden: (column: Column, settings: ColumnSettings) => + !hasHour(column.unit) || isDateWithoutTime(column), getDefault: ({ unit }: Column) => (hasHour(unit) ? "minutes" : null), }, time_style: { @@ -272,7 +271,7 @@ export const DATE_COLUMN_SETTINGS = { ], }), getHidden: (column: Column, settings: ColumnSettings) => - !settings["time_enabled"], + !settings["time_enabled"] || isDateWithoutTime(column), readDependencies: ["time_enabled"], }, }; @@ -312,8 +311,8 @@ export const NUMBER_COLUMN_SETTINGS = { widget: "select", props: { // FIXME: rest of these options - options: Object.values(currency).map( - (currency: { name: string, code: string }) => ({ + options: currency.map( + ([_, currency: { name: string, code: string }]) => ({ name: currency.name, value: currency.code, }), @@ -330,24 +329,36 @@ export const NUMBER_COLUMN_SETTINGS = { widget: "radio", getProps: (column: Column, settings: ColumnSettings) => { const c = settings["currency"] || "USD"; + const symbol = getCurrency(c, "symbol"); + const code = getCurrency(c, "code"); + const name = getCurrency(c, "name"); return { options: [ + ...(symbol !== code + ? [ + { + name: t`Symbol` + ` ` + `(${symbol})`, + value: "symbol", + }, + ] + : []), { - name: t`Symbol` + ` ` + `(${getCurrency(c, "symbol")})`, - value: "symbol", - }, - { - name: t`Code` + ` ` + `(${getCurrency(c, "code")})`, + name: t`Code` + ` ` + `(${code})`, value: "code", }, { - name: t`Name` + ` ` + `(${getCurrency(c, "name")})`, + name: t`Name` + ` ` + `(${name})`, value: "name", }, ], }; }, - default: "symbol", + getDefault: (column: Column, settings: ColumnSettings) => { + const c = settings["currency"] || "USD"; + return getCurrency(c, "symbol") !== getCurrency(c, "code") + ? "symbol" + : "code"; + }, getHidden: (column: Column, settings: ColumnSettings) => settings["number_style"] !== "currency", readDependencies: ["number_style"], @@ -467,7 +478,7 @@ export function getSettingDefintionsForColumn(series: Series, column: Column) { ? visualization.columnSettings(column) : visualization.columnSettings || {}; - if (isDate(column)) { + if (isDate(column) || (column.unit && column.unit !== "default")) { return { ...extraColumnSettings, ...DATE_COLUMN_SETTINGS, diff --git a/frontend/src/metabase/visualizations/lib/settings/graph.js b/frontend/src/metabase/visualizations/lib/settings/graph.js index 2c2d3a30e345..049feeb8e99a 100644 --- a/frontend/src/metabase/visualizations/lib/settings/graph.js +++ b/frontend/src/metabase/visualizations/lib/settings/graph.js @@ -19,6 +19,7 @@ import { dimensionIsNumeric } from "metabase/visualizations/lib/numeric"; import { dimensionIsTimeseries } from "metabase/visualizations/lib/timeseries"; import _ from "underscore"; +import { getMaxMetricsSupported } from "metabase/visualizations"; // NOTE: currently we don't consider any date extracts to be histgrams const HISTOGRAM_DATE_EXTRACTS = new Set([ @@ -152,13 +153,20 @@ export const GRAPH_DATA_SETTINGS = { const options = data.cols .filter(vizSettings["graph._metric_filter"]) .map(getOptionFromColumn); + + const hasBreakout = vizSettings["graph.dimensions"].length > 1; + const addedMetricsCount = vizSettings["graph.metrics"].length; + const maxMetricsSupportedCount = getMaxMetricsSupported(card.display); + + const hasMetricsToAdd = options.length > value.length; + const canAddAnother = + addedMetricsCount < maxMetricsSupportedCount && + hasMetricsToAdd && + !hasBreakout; + return { options, - addAnother: - options.length > value.length && - vizSettings["graph.dimensions"].length < 2 - ? t`Add another series...` - : null, + addAnother: canAddAnother ? t`Add another series...` : null, columns: data.cols, showColumnSetting: true, }; @@ -250,12 +258,9 @@ export const STACKABLE_SETTINGS = { "stackable.stack_display": { section: t`Display`, title: t`Stacked chart type`, - widget: "buttonGroup", + widget: "segmentedControl", props: { - options: [ - { icon: "area", name: t`Area`, value: "area" }, - { icon: "bar", name: t`Bar`, value: "bar" }, - ], + options: [{ icon: "area", value: "area" }, { icon: "bar", value: "bar" }], }, getDefault: (series, settings) => { const displays = series.map(single => settings.series(single).display); diff --git a/frontend/src/metabase/visualizations/lib/settings/series.js b/frontend/src/metabase/visualizations/lib/settings/series.js index ce535db8279e..6d34d83718b8 100644 --- a/frontend/src/metabase/visualizations/lib/settings/series.js +++ b/frontend/src/metabase/visualizations/lib/settings/series.js @@ -63,12 +63,12 @@ export function seriesSetting({ }, "line.interpolate": { title: t`Line style`, - widget: "buttonGroup", + widget: "segmentedControl", props: { options: [ - { icon: "straight", name: t`Line`, value: "linear" }, - { icon: "curved", name: t`Curve`, value: "cardinal" }, - { icon: "stepped", name: t`Step`, value: "step-after" }, + { icon: "straight", value: "linear" }, + { icon: "curved", value: "cardinal" }, + { icon: "stepped", value: "step-after" }, ], }, getHidden: (single, settings) => @@ -80,7 +80,7 @@ export function seriesSetting({ }, "line.marker_enabled": { title: t`Show dots on lines`, - widget: "buttonGroup", + widget: "segmentedControl", props: { options: [ { name: t`Auto`, value: null }, @@ -116,7 +116,7 @@ export function seriesSetting({ }, axis: { title: t`Which axis?`, - widget: "buttonGroup", + widget: "segmentedControl", default: null, props: { options: [ diff --git a/frontend/src/metabase/visualizations/lib/table.js b/frontend/src/metabase/visualizations/lib/table.js index a4d45c66df12..67127623e6ba 100644 --- a/frontend/src/metabase/visualizations/lib/table.js +++ b/frontend/src/metabase/visualizations/lib/table.js @@ -2,13 +2,29 @@ import type { DatasetData, Column } from "metabase-types/types/Dataset"; import type { ClickObject } from "metabase-types/types/Visualization"; import { isNumber, isCoordinate } from "metabase/lib/schema_metadata"; -export function getTableClickedObjectRowData([series], rowIndex) { +export function getTableClickedObjectRowData( + [series], + rowIndex, + columnIndex, + isPivoted, + data, +) { const { rows, cols } = series.data; - return rows[rowIndex].map((value, index) => ({ - value, - col: cols[index], - })); + // if pivoted, we need to find the original rowIndex from the pivoted row/columnIndex + const originalRowIndex = isPivoted + ? data.sourceRows[rowIndex][columnIndex] + : rowIndex; + + // originalRowIndex may be null if the pivot table is empty in that cell + if (originalRowIndex === null) { + return null; + } else { + return rows[originalRowIndex].map((value, index) => ({ + value, + col: cols[index], + })); + } } export function getTableCellClickedObject( diff --git a/frontend/src/metabase/visualizations/visualizations/Funnel.jsx b/frontend/src/metabase/visualizations/visualizations/Funnel.jsx index 3dac8999fb79..cf2bc9150186 100644 --- a/frontend/src/metabase/visualizations/visualizations/Funnel.jsx +++ b/frontend/src/metabase/visualizations/visualizations/Funnel.jsx @@ -1,10 +1,13 @@ import React, { Component } from "react"; +import PropTypes from "prop-types"; import { t } from "ttag"; import { MinRowsError, ChartSettingsError, } from "metabase/visualizations/lib/errors"; +import { iconPropTypes } from "metabase/components/Icon"; + import { formatValue } from "metabase/lib/formatting"; import { getComputedSettingsForSeries } from "metabase/visualizations/lib/settings/visualization"; @@ -22,7 +25,11 @@ import _ from "underscore"; import cx from "classnames"; import type { VisualizationProps } from "metabase-types/types/Visualization"; -import TitleLegendHeader from "metabase/visualizations/components/TitleLegendHeader"; +import ChartCaption from "metabase/visualizations/components/ChartCaption"; + +const propTypes = { + headerIcon: PropTypes.shape(iconPropTypes), +}; export default class Funnel extends Component { props: VisualizationProps; @@ -172,7 +179,7 @@ export default class Funnel extends Component { } render() { - const { settings } = this.props; + const { headerIcon, settings } = this.props; const hasTitle = settings["card.title"]; @@ -188,11 +195,12 @@ export default class Funnel extends Component { return (
    {hasTitle && ( - )} {!hasTitle && @@ -210,3 +218,5 @@ export default class Funnel extends Component { } } } + +Funnel.propTypes = propTypes; diff --git a/frontend/src/metabase/visualizations/visualizations/Map.jsx b/frontend/src/metabase/visualizations/visualizations/Map.jsx index 3d29529752a5..afa3bcbdbad6 100644 --- a/frontend/src/metabase/visualizations/visualizations/Map.jsx +++ b/frontend/src/metabase/visualizations/visualizations/Map.jsx @@ -5,6 +5,7 @@ import ChoroplethMap, { getColorplethColorScale, } from "../components/ChoroplethMap"; import PinMap from "../components/PinMap"; +import LeafletGridHeatMap from "../components/LeafletGridHeatMap"; import { ChartSettingsError } from "metabase/visualizations/lib/errors"; import { @@ -46,7 +47,8 @@ export default class Map extends Component { static isSensible({ cols, rows }) { return ( PinMap.isSensible({ cols, rows }) || - ChoroplethMap.isSensible({ cols, rows }) + ChoroplethMap.isSensible({ cols, rows }) || + LeafletGridHeatMap.isSensible({ cols, rows }) ); } diff --git a/frontend/src/metabase/visualizations/visualizations/RowChart.jsx b/frontend/src/metabase/visualizations/visualizations/RowChart.jsx index 098d42a7ec90..7e547e5718ca 100644 --- a/frontend/src/metabase/visualizations/visualizations/RowChart.jsx +++ b/frontend/src/metabase/visualizations/visualizations/RowChart.jsx @@ -13,6 +13,7 @@ export default class RowChart extends LineAreaBarChart { static iconName = "horizontal_bar"; static noun = t`row chart`; + static maxMetricsSupported = 1; static supportsSeries = false; static renderer = rowRenderer; diff --git a/frontend/src/metabase/visualizations/visualizations/Table.jsx b/frontend/src/metabase/visualizations/visualizations/Table.jsx index 177a84ab8bdc..bb6bb400eba1 100644 --- a/frontend/src/metabase/visualizations/visualizations/Table.jsx +++ b/frontend/src/metabase/visualizations/visualizations/Table.jsx @@ -422,7 +422,7 @@ export default class Table extends Component { " className="mb2" /> - Every field is hidden right now + {t`Every field is hidden right now`}
    ); } diff --git a/frontend/src/metabase/visualizations/visualizations/Text.jsx b/frontend/src/metabase/visualizations/visualizations/Text.jsx index b0fe5536700c..5091909a5b0c 100644 --- a/frontend/src/metabase/visualizations/visualizations/Text.jsx +++ b/frontend/src/metabase/visualizations/visualizations/Text.jsx @@ -114,8 +114,9 @@ export default class Text extends Component { styles["text-card-markdown"], getSettingsStyle(settings), )} - source={settings.text} - /> + > + {settings.text} + ) : (