diff --git a/.babelrc b/.babelrc index 6b0168a562a9..ebe4672214fa 100644 --- a/.babelrc +++ b/.babelrc @@ -1,28 +1,27 @@ { "plugins": [ "babel-plugin-styled-components", - "transform-flow-strip-types", - "add-react-displayname", - "transform-decorators-legacy", - ["transform-builtin-extend", { - "globals": ["Error", "Array"] - }], - "syntax-trailing-function-commas" + "@babel/plugin-transform-flow-strip-types", + "@babel/plugin-proposal-export-default-from", + ["@babel/plugin-proposal-decorators", { "legacy": true }] ], - "presets": ["es2015", "stage-0", "react"], + "presets": ["@babel/preset-env", "@babel/preset-react"], "env": { "development": { "presets": [] }, "extract": { "plugins": [ - ["ttag", { - "extract": { - "output": "locales/metabase-frontend.pot" - }, - "discover": ["t", "jt"], - "numberedExpressions": true - }] + [ + "ttag", + { + "extract": { + "output": "locales/metabase-frontend.pot" + }, + "discover": ["t", "jt"], + "numberedExpressions": true + } + ] ] } } diff --git a/.circleci/config.yml b/.circleci/config.yml index cd4abcd19e8b..8ce63a60a184 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,28 +1,33 @@ version: 2.1 +######################################################################################################################## +# ORBS # +######################################################################################################################## +orbs: + aws-cli: circleci/aws-cli@4.0 + aws-ecr: circleci/aws-ecr@9.0 + + ######################################################################################################################## # EXECUTORS # ######################################################################################################################## executors: - # Our brand new builder - clojure-and-node: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 - - # CircleCI base (Lein 2.9.5) + Node + Headless browsers + Clojure CLI - big one + # CircleCI base Node + Headless browsers + Clojure CLI - big one # Maildev runs by default with all Cypress tests clojure-and-node-and-browsers: working_directory: /home/circleci/metabase/metabase/ docker: - - image: circleci/clojure:lein-2.9.5-node-browsers + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: maildev/maildev + - image: metabase/qa-databases:postgres-sample-12 + - image: metabase/qa-databases:mongo-sample-4.0 + - image: metabase/qa-databases:mysql-sample-8 java-8: working_directory: /home/circleci/metabase/metabase/ docker: - - image: circleci/clojure:openjdk-8-lein-2.9.5-buster + - image: metabase/ci:circleci-java-8-clj-1.10.3.929-07-27-2021-node-browsers # Java 11 tests also test Metabase with the at-rest encryption enabled. See # https://metabase.com/docs/latest/operations-guide/encrypting-database-details-at-rest.html for an explanation of @@ -30,19 +35,19 @@ executors: java-11: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_ENCRYPTION_SECRET_KEY: Orw0AAyzkO/kPTLJRxiyKoBHXa/d6ZcO+p+gpZO/wSQ= java-16: working_directory: /home/circleci/metabase/metabase/ docker: - - image: circleci/clojure:openjdk-16-lein-2.9.5-buster + - image: metabase/ci:circleci-java-16-clj-1.10.3.929-07-27-2021-node-browsers postgres-9-6: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: postgres MB_DB_PORT: 5432 @@ -58,7 +63,7 @@ executors: postgres-latest: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: postgres MB_DB_PORT: 5432 @@ -75,7 +80,7 @@ executors: mysql-5-7: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -88,7 +93,7 @@ executors: mysql-latest: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -101,7 +106,7 @@ executors: mariadb-10-2: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -114,7 +119,7 @@ executors: mariadb-latest: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_DB_TYPE: mysql MB_DB_HOST: localhost @@ -128,16 +133,22 @@ executors: # MYSQL_USER: root # MYSQL_ALLOW_EMPTY_PASSWORD: yes - mongo: + mongo-4-0: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: circleci/mongo:4.0 - presto: + mongo-latest: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers + - image: circleci/mongo:latest + + presto-186: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: metabase/presto-mb-ci:0.186 environment: JAVA_TOOL_OPTIONS: "-Xmx2g" @@ -145,22 +156,41 @@ executors: # OOM sometimes with the default medium size. resource_class: large + presto-jdbc-env: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers + - image: metabase/presto-mb-ci:latest # version 0.254 + environment: + JAVA_TOOL_OPTIONS: "-Xmx2g" + MB_PRESTO_JDBC_TEST_CATALOG: test_data + MB_PRESTO_JDBC_TEST_HOST: localhost + MB_PRESTO_JDBC_TEST_PORT: 8443 + MB_PRESTO_JDBC_TEST_SSL: true + MB_PRESTO_JDBC_TEST_USER: metabase + MB_PRESTO_JDBC_TEST_PASSWORD: metabase + MB_ENABLE_PRESTO_JDBC_DRIVER: true + MB_PRESTO_JDBC_TEST_ADDITIONAL_OPTIONS: > + SSLTrustStorePath=/tmp/cacerts-with-presto-ssl.jks&SSLTrustStorePassword=changeit + # (see above) + resource_class: large + sparksql: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: metabase/spark:2.1.1 vertica: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: sumitchawla/vertica sqlserver: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers environment: MB_SQLSERVER_TEST_HOST: localhost MB_SQLSERVER_TEST_PASSWORD: 'P@ssw0rd' @@ -174,7 +204,7 @@ executors: druid: working_directory: /home/circleci/metabase/metabase/ docker: - - image: metabase/ci:lein-2.9.5-clojure-1.10.3.814 + - image: metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers - image: metabase/druid:0.20.2 environment: CLUSTER_SIZE: nano-quickstart @@ -182,26 +212,6 @@ executors: # OOM all the time with the default medium size. resource_class: large - - fe-mongo-4: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: circleci/clojure:lein-2.9.5-node-browsers - - image: metabase/qa-databases:mongo-sample-4.0 - - fe-postgres-12: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: circleci/clojure:lein-2.9.5-node-browsers - - image: metabase/qa-databases:postgres-sample-12 - - fe-mysql-8: - working_directory: /home/circleci/metabase/metabase/ - docker: - - image: circleci/clojure:lein-2.9.5-node-browsers - - image: metabase/qa-databases:mysql-sample-8 - - ######################################################################################################################## # MAP FRAGMENTS AND CACHE KEYS # ######################################################################################################################## @@ -209,7 +219,8 @@ executors: # `default_parameters` isn't a key that CircleCI uses, but this form lets us reuse parameter definitions default_parameters: &Params edition: - type: string + type: enum + enum: ["oss", "ee"] default: "oss" # .BACKEND-CHECKSUMS, .FRONTEND-CHECKSUMS, and .MODULE-CHECKSUMS are created during the checkout step; see that step @@ -226,23 +237,25 @@ default_parameters: &Params # uncontrollably since old deps would continue to accumulate. Restoring big caches is really slow in Circle. It's # actually faster to recreate the deps cache from scratch whenever we need to which keeps the size down. cache-key-backend-deps: &CacheKeyBackendDeps - key: v1-{{ checksum ".CACHE-PREFIX" }}-be-deps-{{ checksum "project.clj" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }} + # TODO -- this should actually include the Java source files and the Spark SQL AOT source files as well since we now + # compile those as part of this step. FIXME + key: v5-{{ checksum ".CACHE-PREFIX" }}-be-deps-{{ checksum "deps.edn" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }} cache-key-frontend-deps: &CacheKeyFrontendDeps - key: v1-{{ checksum ".CACHE-PREFIX" }}-fe-deps-{{ checksum "yarn.lock" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-fe-deps-{{ checksum "yarn.lock" }} # Key used for implementation of run-on-change -- this is the cache key that contains the .SUCCESS dummy file # By default the key ALWAYS includes the name of the test job itself ($CIRCLE_STAGE) so you don't need to add that yourself. cache-key-run-on-change: &CacheKeyRunOnChange - key: v1-{{ checksum ".CACHE-PREFIX" }}-run-on-change-{{ .Environment.CIRCLE_STAGE }}-<< parameters.checksum >> + key: v5-{{ checksum ".CACHE-PREFIX" }}-run-on-change-{{ .Environment.CIRCLE_STAGE }}-<< parameters.checksum >> # Key for the local maven installation of metabase-core (used by build-uberjar-drivers) cache-key-metabase-core: &CacheKeyMetabaseCore - key: v1-{{ checksum ".CACHE-PREFIX" }}-metabase-core-{{ checksum ".BACKEND-CHECKSUMS" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-metabase-core-{{ checksum ".BACKEND-CHECKSUMS" }} # Key for the drivers built by build-uberjar-drivers cache-key-drivers: &CacheKeyDrivers - key: v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }}-<< parameters.edition >> + key: v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }}-<< parameters.edition >> # This is also used by the uberjar-build-drivers step; this is a unique situation because the build-drivers script has # logic to determine whether to rebuild drivers or not that is quite a bit more sophisticated that the run-on-change @@ -250,17 +263,17 @@ cache-key-drivers: &CacheKeyDrivers # redshift driver. cache-keys-drivers-with-fallback-keys: &CacheKeyDrivers_WithFallbackKeys keys: - - v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }} - - v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }} - - v1-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>- + - v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }}-{{ checksum ".BACKEND-CHECKSUMS" }} + - v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>-{{ checksum ".MODULES-CHECKSUMS" }} + - v5-{{ checksum ".CACHE-PREFIX" }}-drivers-<< parameters.edition >>- # Key for frontend client built by uberjar-build-frontend step cache-key-frontend: &CacheKeyFrontend - key: v1-{{ checksum ".CACHE-PREFIX" }}-frontend-<< parameters.edition >>-{{ checksum ".FRONTEND-CHECKSUMS" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-frontend-<< parameters.edition >>-{{ checksum ".FRONTEND-CHECKSUMS" }} # Key for uberjar built by build-uberjar cache-key-uberjar: &CacheKeyUberjar - key: v1-{{ checksum ".CACHE-PREFIX" }}-uberjar-<< parameters.edition >>-{{ checksum ".BACKEND-CHECKSUMS" }}-{{ checksum ".FRONTEND-CHECKSUMS" }} + key: v5-{{ checksum ".CACHE-PREFIX" }}-uberjar-<< parameters.edition >>-{{ checksum ".BACKEND-CHECKSUMS" }}-{{ checksum ".FRONTEND-CHECKSUMS" }} ######################################################################################################################## @@ -396,12 +409,12 @@ commands: fi echo "Created checksums for $(cat << parameters.filename >> | wc -l) files" - run-lein-command: + run-clojure-command: parameters: before-steps: type: steps default: [] - lein-command: + clojure-args: type: string after-steps: type: steps @@ -411,9 +424,9 @@ commands: - restore-be-deps-cache - steps: << parameters.before-steps >> - run: - name: lein << parameters.lein-command >> + name: clojure << parameters.clojure-args >>:<< parameters.edition >>:<< parameters.edition >>-dev command: | - lein with-profile +ci,+<< parameters.edition >> << parameters.lein-command >> + clojure << parameters.clojure-args >>:<< parameters.edition >>:<< parameters.edition >>-dev no_output_timeout: 15m - steps: << parameters.after-steps >> - store_test_results: @@ -487,6 +500,15 @@ commands: wget --output-document=plugins/<< parameters.dest >> ${<< parameters.source >>} no_output_timeout: 15m + run-command: + parameters: + command: + type: string + steps: + - run: + name: Run command + command: << parameters.command >> + jobs: ######################################################################################################################## @@ -494,7 +516,7 @@ jobs: ######################################################################################################################## checkout: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - checkout # .BACKEND-CHECKSUMS is every Clojure source file as well as dependency files like deps.edn and plugin manifests @@ -536,6 +558,9 @@ jobs: else echo '' > .CACHE-PREFIX fi + - run-yarn-command: + command-name: Create static visualization js bundle + command: build-static-viz - persist_to_workspace: root: /home/circleci/ paths: @@ -543,7 +568,7 @@ jobs: check-migrations: executor: - clojure-and-node + clojure-and-node-and-browsers steps: - attach-workspace - create-checksum-file: @@ -566,35 +591,51 @@ jobs: ######################################################################################################################## be-deps: - executor: clojure-and-node + executor: clojure-and-node-and-browsers parameters: <<: *Params steps: - attach-workspace - # This step is pretty slow, even with the cache, so only run it if project.clj has changed - # TODO -- we should cache the build script deps as well, and driver deps? + # This step is pretty slow, even with the cache, so only run it if deps.edn has changed - run-on-change: - checksum: '{{ checksum "project.clj" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }}' + checksum: 'v5-{{ checksum "deps.edn" }}-{{ checksum ".SCRIPTS-DEPS-CHECKSUMS" }}' steps: - restore-be-deps-cache - - run: lein with-profile +include-all-drivers,+cloverage,+junit,+dev,+<< parameters.edition >> deps - - run: | - cd /home/circleci/metabase/metabase/bin/build-mb && clojure -P + - run: + name: Compile Java source file(s) + command: clojure -X:deps prep + - run: + name: Compile driver AOT namespaces + command: cd modules/drivers && clojure -X:deps prep + - run: + name: Fetch dependencies + command: clojure -P -X:dev:ci:ee:ee-dev:drivers:drivers-dev + - run: + name: Fetch dependencies (./bin/build/build-mb) + command: cd /home/circleci/metabase/metabase/bin/build-mb && clojure -P -M:test + # Not sure why this is needed since you would think build-mb would fetch this stuff as well. It doesn't + # seem to fetch everything tho. :shrug: + - run: + name: Fetch dependencies (./bin/build/build-drivers) + command: cd /home/circleci/metabase/metabase/bin/build-drivers && clojure -P -M:test - save_cache: name: Cache backend dependencies <<: *CacheKeyBackendDeps paths: - /home/circleci/.m2 + - /home/circleci/.gitlibs + - /home/circleci/metabase/metabase/java/target/classes + - /home/circleci/metabase/metabase/modules/drivers/sparksql/target/classes - lein: + clojure: parameters: e: type: executor - default: clojure-and-node + default: clojure-and-node-and-browsers before-steps: type: steps default: [] - lein-command: + clojure-args: type: string after-steps: type: steps @@ -612,22 +653,22 @@ jobs: - run-on-change: checksum: '{{ checksum ".BACKEND-CHECKSUMS" }}' steps: - - run-lein-command: + - run-clojure-command: before-steps: << parameters.before-steps >> - lein-command: << parameters.lein-command >> + clojure-args: << parameters.clojure-args >> after-steps: << parameters.after-steps >> edition: << parameters.edition >> - unless: condition: << parameters.skip-when-no-change >> steps: - - run-lein-command: + - run-clojure-command: before-steps: << parameters.before-steps >> - lein-command: << parameters.lein-command >> + clojure-args: << parameters.clojure-args >> after-steps: << parameters.after-steps >> edition: << parameters.edition >> be-linter-reflection-warnings: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace - run-on-change: @@ -643,7 +684,7 @@ jobs: parameters: e: type: executor - default: clojure-and-node + default: clojure-and-node-and-browsers driver: type: string timeout: @@ -652,6 +693,9 @@ jobs: before-steps: type: steps default: [] + after-steps: + type: steps + default: [] description: type: string default: "" @@ -671,13 +715,14 @@ jobs: name: Test << parameters.driver >> driver << parameters.description >> environment: DRIVERS: << parameters.driver >> - command: << parameters.extra-env >> lein with-profile +ci,+junit,+ee test + command: << parameters.extra-env >> clojure -X:dev:ci:ee:ee-dev:drivers:drivers-dev:test no_output_timeout: << parameters.timeout >> - store_test_results: path: /home/circleci/metabase/metabase/target/junit + - steps: << parameters.after-steps >> test-build-scripts: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace - run-on-change: @@ -721,7 +766,7 @@ jobs: ######################################################################################################################## fe-deps: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace # This step is *really* slow, so we can skip it if yarn.lock hasn't changed since last time we ran it @@ -731,7 +776,7 @@ jobs: - restore-fe-deps-cache - run: name: Run yarn to install deps - command: yarn; + command: rm -rf node_modules/ && yarn --frozen-lockfile; no_output_timeout: 15m - save_cache: name: Cache frontend dependencies @@ -743,7 +788,7 @@ jobs: - /home/circleci/.cache/Cypress shared-tests-cljs: - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - run-yarn-command: command-name: Run Cljs tests for shared/ code @@ -753,7 +798,7 @@ jobs: # Unlike the other build-uberjar steps, this step should be run once overall and the results can be shared between # OSS and EE uberjars. build-uberjar-drivers: - executor: clojure-and-node + executor: clojure-and-node-and-browsers parameters: <<: *Params steps: @@ -787,25 +832,14 @@ jobs: name: Cache the built drivers <<: *CacheKeyDrivers paths: - - /home/circleci/metabase/metabase/modules/drivers/bigquery/target - - /home/circleci/metabase/metabase/modules/drivers/druid/target - - /home/circleci/metabase/metabase/modules/drivers/google/target - - /home/circleci/metabase/metabase/modules/drivers/googleanalytics/target - - /home/circleci/metabase/metabase/modules/drivers/mongo/target - - /home/circleci/metabase/metabase/modules/drivers/oracle/target - - /home/circleci/metabase/metabase/modules/drivers/presto/target - - /home/circleci/metabase/metabase/modules/drivers/redshift/target - - /home/circleci/metabase/metabase/modules/drivers/snowflake/target - - /home/circleci/metabase/metabase/modules/drivers/sparksql/target - - /home/circleci/metabase/metabase/modules/drivers/sqlite/target - - /home/circleci/metabase/metabase/modules/drivers/sqlserver/target - - /home/circleci/metabase/metabase/modules/drivers/vertica/target + - /home/circleci/metabase/metabase/resources/modules # Build the frontend client. parameters.edition determines whether we build the OSS or EE version. build-uberjar-frontend: parameters: <<: *Params - executor: clojure-and-node + executor: clojure-and-node-and-browsers + resource_class: large steps: - attach-workspace - run-on-change: @@ -829,7 +863,7 @@ jobs: build-uberjar: parameters: <<: *Params - executor: clojure-and-node + executor: clojure-and-node-and-browsers steps: - attach-workspace - run-on-change: @@ -857,7 +891,7 @@ jobs: # INTERACTIVE=false will tell the clojure build scripts not to do interactive retries etc. INTERACTIVE: "false" MB_EDITION: << parameters.edition >> - command: ./bin/build version drivers uberjar + command: ./bin/build version uberjar no_output_timeout: 15m - store_artifacts: path: /home/circleci/metabase/metabase/target/uberjar/metabase.jar @@ -880,9 +914,15 @@ jobs: source-folder: type: string default: "" + folder: + type: string + default: "" test-files: type: string default: "" + qa-db: + type: boolean + default: false before-steps: type: steps default: [] @@ -891,6 +931,7 @@ jobs: environment: MB_EDITION: << parameters.edition >> CYPRESS_GROUP: << parameters.cypress-group >> + QA_DB_ENABLED: << parameters.qa-db >> DISPLAY: "" steps: - attach-workspace @@ -904,8 +945,9 @@ jobs: name: Restore cached uberjar built in previous step <<: *CacheKeyUberjar - steps: << parameters.before-steps >> + # Make both `test-files` and `source-folder` parameters optional. Translates to: if `parameter` => run associated flag (`--spec` and `--folder`, respectively) command: | - run test-cypress-no-build <<# parameters.test-files >> --spec << parameters.test-files >> <> --folder << parameters.source-folder >> + run test-cypress-no-build <<# parameters.test-files >> --spec << parameters.test-files >> <> <<# parameters.source-folder >> --folder << parameters.source-folder >> <> after-steps: - store_artifacts: path: /home/circleci/metabase/metabase/cypress @@ -925,392 +967,486 @@ default_matrix: &Matrix workflows: version: 2 - build: + # build: + # jobs: + # - checkout + + # - check-migrations: + # requires: + # - checkout + + # - be-deps: + # requires: + # - checkout + + # - clojure: + # name: be-tests-<< matrix.edition >> + # requires: + # - be-deps + # e: java-8 + # clojure-args: -X:dev:ci:test + # skip-when-no-change: true + # <<: *Matrix + + # - clojure: + # name: be-tests-java-11-<< matrix.edition >> + # requires: + # - be-deps + # e: java-11 + # clojure-args: -X:dev:ci:test + # skip-when-no-change: true + # <<: *Matrix + + # - clojure: + # name: be-tests-java-16-<< matrix.edition >> + # requires: + # - be-deps + # e: java-16 + # clojure-args: -X:dev:ci:test + # skip-when-no-change: true + # <<: *Matrix + + # - clojure: + # name: be-linter-cloverage + # requires: + # - be-deps + # # TODO FIXME + # clojure-args: -X:dev:ee:ee-dev:test:cloverage + # after-steps: + # - run: + # name: Upload code coverage to codecov.io + # command: bash <(curl -s https://codecov.io/bash) -F back-end + + # skip-when-no-change: true + + # - test-driver: + # name: be-tests-bigquery-ee + # requires: + # - be-tests-ee + # driver: bigquery + + # - test-driver: + # name: be-tests-bigquery-cloud-sdk-ee + # requires: + # - be-tests-ee + # driver: bigquery-cloud-sdk + + # - test-driver: + # name: be-tests-druid-ee + # requires: + # - be-tests-ee + # e: druid + # driver: druid + + # - test-driver: + # name: be-tests-googleanalytics-ee + # requires: + # - be-tests-ee + # driver: googleanalytics + + # - test-driver: + # name: be-tests-mongo-ee + # description: "(Mongo 4.0)" + # requires: + # - be-tests-ee + # e: mongo-4-0 + # driver: mongo + + # - test-driver: + # name: be-tests-mongo-latest-ee + # description: "(Mongo latest)" + # requires: + # - be-tests-ee + # e: mongo-latest + # driver: mongo + + # - test-driver: + # name: be-tests-mysql-ee + # description: "(MySQL 5.7)" + # requires: + # - be-tests-ee + # e: + # name: mysql-5-7 + # driver: mysql + + # - test-driver: + # name: be-tests-mysql-latest-ee + # description: "(MySQL latest)" + # requires: + # - be-tests-ee + # e: + # name: mysql-latest + # driver: mysql + # # set up env vars for something named "MYSQL_SSL" to run MySQL SSL tests verifying connectivity with PEM cert + # # they are deliberately given a different name to prevent them from affecting the regular test run against + # # the configured MySQL instance, but there is one particular test (mysql-connect-with-ssl-and-pem-cert-test) + # # that overrides the MB_MYSQL_TEST_* values with them + # # the MYSQL_RDS_SSL_INSTANCE vars are secret and/or changeable, so they are defined in the CircleCI settings + # extra-env: >- + # MB_MYSQL_SSL_TEST_HOST=$MYSQL_RDS_SSL_INSTANCE_HOST + # MB_MYSQL_SSL_TEST_SSL=true + # MB_MYSQL_SSL_TEST_ADDITIONAL_OPTIONS='verifyServerCertificate=true' + # MB_MYSQL_SSL_TEST_SSL_CERT="$(cat /home/circleci/metabase/metabase/resources/certificates/rds-combined-ca-bundle.pem)" + # MB_MYSQL_SSL_TEST_USER=metabase + # MB_MYSQL_SSL_TEST_PASSWORD=$MYSQL_RDS_SSL_INSTANCE_PASSWORD + + # - test-driver: + # name: be-tests-mariadb-ee + # description: "(MariaDB 10.2)" + # requires: + # - be-tests-ee + # e: + # name: mariadb-10-2 + # driver: mysql + + # - test-driver: + # name: be-tests-mariadb-latest-ee + # description: "(MariaDB latest)" + # requires: + # - be-tests-ee + # e: + # name: mariadb-latest + # driver: mysql + + # - test-driver: + # name: be-tests-oracle-ee + # requires: + # - be-tests-ee + # before-steps: + # - fetch-jdbc-driver: + # source: ORACLE_JDBC_JAR + # dest: ojdbc8.jar + # driver: oracle + # extra-env: >- + # MB_ORACLE_SSL_TEST_SSL=true + # MB_ORACLE_SSL_TEST_PORT=2484 + # JVM_OPTS="-Djavax.net.ssl.trustStore=/home/circleci/metabase/metabase/resources/certificates/cacerts_with_RDS_root_ca.jks + # -Djavax.net.ssl.trustStoreType=JKS + # -Djavax.net.ssl.trustStorePassword=metabase $JAVA_OPTS" + + # - test-driver: + # name: be-tests-postgres-ee + # description: "(9.6)" + # requires: + # - be-tests-ee + # e: postgres-9-6 + # driver: postgres + + # - test-driver: + # name: be-tests-postgres-latest-ee + # description: "(Latest)" + # requires: + # - be-tests-ee + # e: postgres-latest + # driver: postgres + + # - test-driver: + # name: be-tests-presto-ee + # requires: + # - be-tests-ee + # e: presto-186 + # before-steps: + # - wait-for-port: + # port: 8080 + # driver: presto + + # - test-driver: + # name: be-tests-presto-jdbc-ee + # requires: + # - be-tests-ee + # e: presto-jdbc-env # specific env for running Presto JDBC tests (newer Presto version, SSL, etc.) + # before-steps: + # - wait-for-port: + # port: 8443 + # - run: + # name: Create temp cacerts file based on bundled JDK one + # command: cp $JAVA_HOME/lib/security/cacerts /tmp/cacerts-with-presto-ssl.jks + # - run: + # name: Capture Presto server self signed CA + # command: | + # while [[ ! -s /tmp/presto-ssl-ca.pem ]]; + # do echo "Waiting to capture SSL CA" \ + # && openssl s_client -connect localhost:8443 2>/dev/null /tmp/presto-ssl-ca.pem \ + # && sleep 1; done + # - run: + # name: Convert Presto CA from PEM to DER + # command: openssl x509 -outform der -in /tmp/presto-ssl-ca.pem -out /tmp/presto-ssl-ca.der + # - run: + # name: Import Presto CA into temp cacerts file + # command: | + # sudo keytool -noprompt -import -alias presto -keystore /tmp/cacerts-with-presto-ssl.jks \ + # -storepass changeit -file /tmp/presto-ssl-ca.der -trustcacerts + # after-steps: + # - run: + # name: Capture max memory usage + # command: cat /sys/fs/cgroup/memory/memory.max_usage_in_bytes + # when: always + # driver: presto-jdbc + + # - test-driver: + # name: be-tests-redshift-ee + # requires: + # - be-tests-ee + # driver: redshift + # timeout: 15m + + # - test-driver: + # name: be-tests-snowflake-ee + # requires: + # - be-tests-ee + # driver: snowflake + # timeout: 115m + + # - test-driver: + # name: be-tests-sparksql-ee + # requires: + # - be-tests-ee + # e: sparksql + # before-steps: + # - wait-for-port: + # port: 10000 + # driver: sparksql + + # - test-driver: + # name: be-tests-sqlite-ee + # requires: + # - be-tests-ee + # driver: sqlite + + # - test-driver: + # name: be-tests-sqlserver-ee + # requires: + # - be-tests-ee + # e: sqlserver + # driver: sqlserver + + # - test-driver: + # name: be-tests-vertica-ee + # requires: + # - be-tests-ee + # e: vertica + # before-steps: + # - fetch-jdbc-driver: + # source: VERTICA_JDBC_JAR + # dest: vertica-jdbc-7.1.2-0.jar + # driver: vertica + + # - test-build-scripts: + # requires: + # - be-deps + + # - build-uberjar-drivers: + # name: build-uberjar-drivers-<< matrix.edition >> + # requires: + # - be-deps + # <<: *Matrix + + # - build-uberjar-frontend: + # name: build-uberjar-frontend-<< matrix.edition >> + # requires: + # - fe-deps + # <<: *Matrix + + # - build-uberjar: + # name: build-uberjar-<< matrix.edition >> + # requires: + # - build-uberjar-drivers-<< matrix.edition >> + # - build-uberjar-frontend-<< matrix.edition >> + # <<: *Matrix + + # - fe-deps: + # requires: + # - checkout + # - shared-tests-cljs: + # requires: + # - fe-deps + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # folder: ["admin", "binning", "collections", "dashboard", "dashboard-filters", "dashboard-filters-sql", "moderation", "native", "native-filters", "onboarding", "permissions", "question", "sharing", "smoketest", "visualizations"] + # name: e2e-tests-<< matrix.folder >>-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "smoketest-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/smoketest + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-admin-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "admin-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/admin + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-collections-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "collections-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/collections + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-dashboard-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "dashboard-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/dashboard + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-dashboard-filters-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "dashboard-filters-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/dashboard-filters + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-onboarding-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "onboarding-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/onboarding + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-native-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "native-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/native + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-native-filters-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "native-filters-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/native-filters + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-question-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "question-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/question + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-binning-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "binning-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/binning + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-sharing-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "sharing-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/sharing + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: e2e-tests-visualizations-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "visualizations-<< matrix.edition >>" + # source-folder: frontend/test/metabase/scenarios/visualizations + + # - fe-tests-cypress: + # name: e2e-tests-mongo-4-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # e: fe-mongo-4 + # cypress-group: "mongo" + # source-folder: frontend/test/metabase-db/mongo + # before-steps: + # - wait-for-port: + # port: 27017 + # <<: *Matrix + + # - fe-tests-cypress: + # name: e2e-tests-postgres-12-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # e: fe-postgres-12 + # cypress-group: "postgres" + # source-folder: frontend/test/metabase-db/postgres + # before-steps: + # - wait-for-port: + # port: 5432 + # <<: *Matrix + + # - fe-tests-cypress: + # name: e2e-tests-mysql-8-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # e: fe-mysql-8 + # cypress-group: "mysql" + # source-folder: frontend/test/metabase-db/mysql + # before-steps: + # - wait-for-port: + # port: 3306 + # - wait-for-port: + # port: 5432 + # - wait-for-port: + # port: 27017 + + # - fe-tests-cypress: + # matrix: + # parameters: + # edition: ["ee", "oss"] + # name: percy-visual-tests-<< matrix.edition >> + # requires: + # - build-uberjar-<< matrix.edition >> + # cypress-group: "percy-visual-<< matrix.edition >>" + # test-files: "./frontend/test/metabase-visual/**/*.cy.spec.js" + build_and_push_image: jobs: - - checkout - - - check-migrations: - requires: - - checkout - - - be-deps: - requires: - - checkout - - - lein: - name: be-tests-<< matrix.edition >> - requires: - - be-deps - e: java-8 - lein-command: with-profile +junit test - skip-when-no-change: true - <<: *Matrix - - - lein: - name: be-tests-java-11-<< matrix.edition >> - requires: - - be-deps - e: java-11 - lein-command: with-profile +junit test - skip-when-no-change: true - <<: *Matrix - - - lein: - name: be-tests-java-16-<< matrix.edition >> - requires: - - be-deps - e: java-16 - lein-command: with-profile +junit test - skip-when-no-change: true - <<: *Matrix - - - lein: - name: be-linter-cloverage - requires: - - be-deps - lein-command: cloverage --codecov - after-steps: - - run: - name: Upload code coverage to codecov.io - command: bash <(curl -s https://codecov.io/bash) - skip-when-no-change: true - - - test-driver: - name: be-tests-bigquery-ee - requires: - - be-tests-ee - driver: bigquery - - - test-driver: - name: be-tests-druid-ee - requires: - - be-tests-ee - e: druid - driver: druid - - - test-driver: - name: be-tests-googleanalytics-ee - requires: - - be-tests-ee - driver: googleanalytics - - - test-driver: - name: be-tests-mongo-ee - requires: - - be-tests-ee - e: mongo - driver: mongo - - - test-driver: - name: be-tests-mysql-ee - description: "(MySQL 5.7)" - requires: - - be-tests-ee - e: - name: mysql-5-7 - driver: mysql - - - test-driver: - name: be-tests-mysql-latest-ee - description: "(MySQL latest)" - requires: - - be-tests-ee - e: - name: mysql-latest - driver: mysql - # set up env vars for something named "MYSQL_SSL" to run MySQL SSL tests verifying connectivity with PEM cert - # they are deliberately given a different name to prevent them from affecting the regular test run against - # the configured MySQL instance, but there is one particular test (mysql-connect-with-ssl-and-pem-cert-test) - # that overrides the MB_MYSQL_TEST_* values with them - # the MYSQL_RDS_SSL_INSTANCE vars are secret and/or changeable, so they are defined in the CircleCI settings - extra-env: >- - MB_MYSQL_SSL_TEST_HOST=$MYSQL_RDS_SSL_INSTANCE_HOST - MB_MYSQL_SSL_TEST_SSL=true - MB_MYSQL_SSL_TEST_ADDITIONAL_OPTIONS='verifyServerCertificate=true' - MB_MYSQL_SSL_TEST_SSL_CERT="$(cat /home/circleci/metabase/metabase/resources/certificates/rds-combined-ca-bundle.pem)" - MB_MYSQL_SSL_TEST_USER=metabase - MB_MYSQL_SSL_TEST_PASSWORD=$MYSQL_RDS_SSL_INSTANCE_PASSWORD - - - test-driver: - name: be-tests-mariadb-ee - description: "(MariaDB 10.2)" - requires: - - be-tests-ee - e: - name: mariadb-10-2 - driver: mysql - - - test-driver: - name: be-tests-mariadb-latest-ee - description: "(MariaDB latest)" - requires: - - be-tests-ee - e: - name: mariadb-latest - driver: mysql - - - test-driver: - name: be-tests-oracle-ee - requires: - - be-tests-ee - before-steps: - - fetch-jdbc-driver: - source: ORACLE_JDBC_JAR - dest: ojdbc8.jar - driver: oracle - extra-env: >- - MB_ORACLE_SSL_TEST_SSL=true - MB_ORACLE_SSL_TEST_PORT=2484 - JVM_OPTS="-Djavax.net.ssl.trustStore=/home/circleci/metabase/metabase/resources/certificates/cacerts_with_RDS_root_ca.jks - -Djavax.net.ssl.trustStoreType=JKS - -Djavax.net.ssl.trustStorePassword=metabase $JAVA_OPTS" - - - test-driver: - name: be-tests-postgres-ee - description: "(9.6)" - requires: - - be-tests-ee - e: postgres-9-6 - driver: postgres - - - test-driver: - name: be-tests-postgres-latest-ee - description: "(Latest)" - requires: - - be-tests-ee - e: postgres-latest - driver: postgres - - - test-driver: - name: be-tests-presto-ee - requires: - - be-tests-ee - e: presto - before-steps: - - wait-for-port: - port: 8080 - driver: presto - - - test-driver: - name: be-tests-redshift-ee - requires: - - be-tests-ee - driver: redshift - timeout: 15m - - - test-driver: - name: be-tests-snowflake-ee - requires: - - be-tests-ee - driver: snowflake - timeout: 115m - - - test-driver: - name: be-tests-sparksql-ee - requires: - - be-tests-ee - e: sparksql - before-steps: - - wait-for-port: - port: 10000 - driver: sparksql - - - test-driver: - name: be-tests-sqlite-ee - requires: - - be-tests-ee - driver: sqlite - - - test-driver: - name: be-tests-sqlserver-ee - requires: - - be-tests-ee - e: sqlserver - driver: sqlserver - - - test-driver: - name: be-tests-vertica-ee - requires: - - be-tests-ee - e: vertica - before-steps: - - fetch-jdbc-driver: - source: VERTICA_JDBC_JAR - dest: vertica-jdbc-7.1.2-0.jar - driver: vertica - - - test-build-scripts: - requires: - - be-deps - - - build-uberjar-drivers: - name: build-uberjar-drivers-<< matrix.edition >> - requires: - - be-deps - <<: *Matrix - - - build-uberjar-frontend: - name: build-uberjar-frontend-<< matrix.edition >> - requires: - - fe-deps - <<: *Matrix - - - build-uberjar: - name: build-uberjar-<< matrix.edition >> - requires: - - build-uberjar-drivers-<< matrix.edition >> - - build-uberjar-frontend-<< matrix.edition >> - <<: *Matrix - - - fe-deps: - requires: - - checkout - - shared-tests-cljs: - requires: - - fe-deps - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-smoketest-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "smoketest-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/smoketest - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-admin-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "admin-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/admin - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-collections-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "collections-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/collections - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-dashboard-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "dashboard-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/dashboard - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-filters-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "filters-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/filters - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-onboarding-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "onboarding-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/onboarding - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-native-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "native-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/native - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-question-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "question-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/question - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-binning-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "binning-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/binning - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-sharing-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "sharing-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/sharing - - - fe-tests-cypress: - matrix: - parameters: - edition: ["ee", "oss"] - name: e2e-tests-visualizations-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - cypress-group: "visualizations-<< matrix.edition >>" - source-folder: frontend/test/metabase/scenarios/visualizations - - - fe-tests-cypress: - name: e2e-tests-mongo-4-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - e: fe-mongo-4 - cypress-group: "mongo" - source-folder: frontend/test/metabase-db/mongo - before-steps: - - wait-for-port: - port: 27017 - <<: *Matrix - - - fe-tests-cypress: - name: e2e-tests-postgres-12-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - e: fe-postgres-12 - cypress-group: "postgres" - source-folder: frontend/test/metabase-db/postgres - before-steps: - - wait-for-port: - port: 5432 - <<: *Matrix - - - fe-tests-cypress: - name: e2e-tests-mysql-8-<< matrix.edition >> - requires: - - build-uberjar-<< matrix.edition >> - e: fe-mysql-8 - cypress-group: "mysql" - source-folder: frontend/test/metabase-db/mysql - before-steps: - - wait-for-port: - port: 3306 - <<: *Matrix + - aws-ecr/build_and_push_image: + account_id: ${AWS_ACCOUNT_ID} + auth: + - aws-cli/setup + context: + - org-global + repo: metabase-k8s + extra_build_args: '--compress' + platform: linux/amd64 + push_image: true + region: ${AWS_DEFAULT_REGION} + skip_when_tags_exist: true + tag: latest,${CIRCLKE_SHA1} + # tag: latest,${CIRCLKE_SHA1},${CIRCLE_TAG} + # filters: + # tags: + # only: /^v0.4*/ + \ No newline at end of file diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 586f35c85b94..37a7da19f811 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -2,4 +2,10 @@ FROM mcr.microsoft.com/vscode/devcontainers/java:11 RUN apt-key adv --refresh-keys --keyserver keyserver.ubuntu.com\ && apt-get update && export DEBIAN_FRONTEND=noninteractive \ - && apt-get -y install --no-install-recommends leiningen + && apt-get -y install --no-install-recommends yarn + +RUN curl -fsSL https://deb.nodesource.com/setup_14.x | bash +RUN apt-get update && apt-get -y install --no-install-recommends nodejs + +RUN curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh \ + && bash ./linux-install-1.10.3.933.sh \ No newline at end of file diff --git a/.dir-locals.el b/.dir-locals.el index 72cadc8d07d1..0ea88960076c 100644 --- a/.dir-locals.el +++ b/.dir-locals.el @@ -1,49 +1,78 @@ -((nil . ((indent-tabs-mode . nil) ; always use spaces for tabs - (require-final-newline . t))) ; add final newline on save - (js2-mode . ((js2-mode-show-parse-errors . nil) ; these settings will let flycheck do everything through eslint, - (js2-mode-show-strict-warnings . nil))) ; because js2-mode can't handle flowtype - (clojure-mode . ((eval . (progn - ;; Specify which arg is the docstring for certain macros - ;; (Add more as needed) - (put 'defendpoint 'clojure-doc-string-elt 3) - (put 'defendpoint-async 'clojure-doc-string-elt 3) - (put 'api/defendpoint 'clojure-doc-string-elt 3) - (put 'api/defendpoint-async 'clojure-doc-string-elt 3) - (put 'defsetting 'clojure-doc-string-elt 2) - (put 'setting/defsetting 'clojure-doc-string-elt 2) - (put 's/defn 'clojure-doc-string-elt 2) - (put 'p.types/defprotocol+ 'clojure-doc-string-elt 2) +((nil + ;; always use spaces for tabs + (indent-tabs-mode . nil) + ;; add final newline on save + (require-final-newline . t) + ;; prefer keeping source width about ~118, GitHub seems to cut off stuff at either 119 or 120 and it's nicer + ;; to look at code in GH when you don't have to scroll back and forth + (fill-column . 118) + ;; tell find-things-fast to always use this directory as project root regardless of presence of other + ;; deps.edn files + (ftf-project-finders . (ftf-get-top-git-dir))) - ;; Define custom indentation for functions inside metabase. - ;; This list isn't complete; add more forms as we come across them. - (define-clojure-indent - (db/insert-many! 1) - (let-404) - (macros/case 0) - (match 1) - (mbql.match/match 1) - (mt/test-drivers 1) - (mt/query 1) - (mbql.match/match-one 1) - (mbql.match/replace 1) - (mbql.match/replace-in 2) - (impl/test-migrations 2) - (l/matche '(1 (:defn))) - (l/matcha '(1 (:defn))) - (p/defprotocol+ '(1 (:defn))) - (p.types/defprotocol+ '(1 (:defn))) - (p.types/def-abstract-type '(1 (:defn))) - (p.types/deftype+ '(2 nil nil (:defn))) - (p/def-map-type '(2 nil nil (:defn))) - (p.types/defrecord+ '(2 nil nil (:defn))) - (qp.streaming/streaming-response 1) - (prop/for-all 1) - (tools.macro/macrolet '(1 (:defn)))))) - (clojure-indent-style . always-align) - ;; if you're using clj-refactor (highly recommended!) - (cljr-favor-prefix-notation . nil) - ;; prefer keeping source width about ~118, GitHub seems to cut off stuff at either 119 or 120 and - ;; it's nicer to look at code in GH when you don't have to scroll back and forth - (fill-column . 118) - (clojure-docstring-fill-column . 118) - (cider-preferred-build-tool . lein)))) + (js2-mode + ;; these settings will let flycheck do everything through eslint, + (js2-mode-show-parse-errors . nil) + ;; because js2-mode can't handle flowtype + (js2-mode-show-strict-warnings . nil)) + + (clojure-mode + ;; Specify which arg is the docstring for certain macros + ;; (Add more as needed) + (eval . (put 'defendpoint 'clojure-doc-string-elt 3)) + (eval . (put 'defendpoint-async 'clojure-doc-string-elt 3)) + (eval . (put 'define-premium-feature 'clojure-doc-string-elt 2)) + (eval . (put 'api/defendpoint 'clojure-doc-string-elt 3)) + (eval . (put 'api/defendpoint-async 'clojure-doc-string-elt 3)) + (eval . (put 'defsetting 'clojure-doc-string-elt 2)) + (eval . (put 'setting/defsetting 'clojure-doc-string-elt 2)) + (eval . (put 's/defn 'clojure-doc-string-elt 2)) + (eval . (put 'p.types/defprotocol+ 'clojure-doc-string-elt 2)) + ;; Define custom indentation for functions inside metabase. + ;; This list isn't complete; add more forms as we come across them. + ;; + ;; `put-clojure-indent' is a safe-local-eval-function, so use a bunch of calls to that + ;; instead of one call to `define-clojure-indent' + (eval . (put-clojure-indent 'c/step 1)) + (eval . (put-clojure-indent 'db/insert-many! 1)) + (eval . (put-clojure-indent 'impl/test-migrations 2)) + (eval . (put-clojure-indent 'let-404 0)) + (eval . (put-clojure-indent 'macros/case 0)) + (eval . (put-clojure-indent 'match 1)) + (eval . (put-clojure-indent 'mbql.match/match 1)) + (eval . (put-clojure-indent 'mbql.match/match-one 1)) + (eval . (put-clojure-indent 'mbql.match/replace 1)) + (eval . (put-clojure-indent 'mbql.match/replace-in 2)) + (eval . (put-clojure-indent 'mt/dataset 1)) + (eval . (put-clojure-indent 'mt/query 1)) + (eval . (put-clojure-indent 'mt/test-drivers 1)) + (eval . (put-clojure-indent 'prop/for-all 1)) + (eval . (put-clojure-indent 'qp.streaming/streaming-response 1)) + ;; these ones have to be done with `define-clojure-indent' for now because of upstream bug + ;; https://github.com/clojure-emacs/clojure-mode/issues/600 once that's resolved we should use `put-clojure-indent' + ;; instead. Please don't add new entries unless they don't work with `put-clojure-indent' + (eval . (define-clojure-indent + (l/matcha '(1 (:defn))) + (l/matche '(1 (:defn))) + (p.types/def-abstract-type '(1 (:defn))) + (p.types/defprotocol+ '(1 (:defn))) + (p.types/defrecord+ '(2 nil nil (:defn))) + (p.types/deftype+ '(2 nil nil (:defn))) + (p/def-map-type '(2 nil nil (:defn))) + (p/defprotocol+ '(1 (:defn))) + (tools.macro/macrolet '(1 ((:defn)) :form)))) + (cider-clojure-cli-aliases . "dev:drivers:drivers-dev:ee:ee-dev:user") + (clojure-indent-style . always-align) + (cljr-favor-prefix-notation . nil) + (clojure-docstring-fill-column . 118) + (cider-preferred-build-tool . clojure-cli)) + + ("shared" + (clojure-mode + (cider-default-cljs-repl . shadow-select) + (cider-shadow-default-options . "node-repl") + (cider-preferred-build-tool . shadow-cljs))) + + ("bin" + (clojure-mode + (cider-clojure-cli-aliases . "dev")))) diff --git a/.dockerignore b/.dockerignore index 6891f6c8b2d6..67486beb4266 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,12 +1,25 @@ -.babel_cache/* +.babel_cache docs/* OSX/* target/* - -**node_modules +.circleci +.cpcache +.devcontainer +.github +.husky +.lsp +.shadow-cljs +.github +.vscode +hooks/* +test/* +test_config/* +test_modules/* +test_resources/* +node_modules **metabase.jar *.db -.dockerignore Dockerfile +.dockerignore \ No newline at end of file diff --git a/.eslintrc b/.eslintrc index 633392d26eaa..d43f54802017 100644 --- a/.eslintrc +++ b/.eslintrc @@ -21,13 +21,13 @@ "react/no-unescaped-entities": 2, "react/jsx-no-target-blank": 2, "react/jsx-key": 2, + "react/forbid-component-props": [2, { "forbid": ["w", "h"] }], "prefer-const": [1, { "destructuring": "all" }], "no-useless-escape": 0, "no-only-tests/no-only-tests": "error", "complexity": ["error", { "max": 54 }] }, "globals": { - "pending": false, "before": true, "cy": true, "Cypress": true diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 5e1d2d0bad58..ab16313bc1dc 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -5,7 +5,6 @@ ### Tests - [ ] Run the frontend and Cypress end-to-end tests with `yarn lint && yarn test`) -- [ ] If there are changes to the backend codebase, run the backend tests with `lein test && lein lint && ./bin/reflection-linter` - +- [ ] If there are changes to the backend codebase, run the backend tests with `clojure -X:dev:test` - [ ] Sign the [Contributor License Agreement](https://docs.google.com/a/metabase.com/forms/d/1oV38o7b9ONFSwuzwmERRMi9SYrhYeOrkbmNaq9pOJ_E/viewform) (unless it's a tiny documentation change). diff --git a/.github/scripts/README.md b/.github/scripts/README.md new file mode 100644 index 000000000000..faaf8e036f03 --- /dev/null +++ b/.github/scripts/README.md @@ -0,0 +1,3 @@ +# GitHub Action Test Scripts + +Scripts related to running integration tests (ex: through GitHub actions) diff --git a/.github/scripts/run-presto-kerberos-integration-test.sh b/.github/scripts/run-presto-kerberos-integration-test.sh new file mode 100755 index 000000000000..7e2aac1e7c3e --- /dev/null +++ b/.github/scripts/run-presto-kerberos-integration-test.sh @@ -0,0 +1,68 @@ +#! /usr/bin/env bash +# runs one or more Metabase test(s) against a Kerberized Presto instance +set -eo pipefail + +# Need Java commands on $PATH, which apparently is not yet the case +export PATH="$PATH:$JAVA_HOME/bin" + +# ensure java commmand is available +which java + +# install clojure version needed for Metabase +curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh +chmod +x linux-install-1.10.3.933.sh +./linux-install-1.10.3.933.sh + +RESOURCES_DIR=/app/source/resources + +# ensure the expected files are in place, in the resources dir +if [ ! -f "$RESOURCES_DIR/ssl_keystore.jks" ]; then + echo "$RESOURCES_DIR/ssl_keystore.jks does not exist; cannot run test" >&2 + exit 11 +fi + +if [ ! -f "$RESOURCES_DIR/krb5.conf" ]; then + echo "$RESOURCES_DIR/krb5.conf does not exist; cannot run test" >&2 + exit 12 +fi + +if [ ! -f "$RESOURCES_DIR/client.keytab" ]; then + echo "$RESOURCES_DIR/client.keytab does not exist; cannot run test" >&2 + exit 13 +fi + +# Copy the JDK cacerts file to our resources +cp $JAVA_HOME/lib/security/cacerts $RESOURCES_DIR/cacerts-with-presto-ca.jks + +# Capture the Presto server self signed CA in PEM format +openssl s_client -showcerts -connect presto-kerberos:7778 $RESOURCES_DIR/presto-ssl-root-ca.pem + +# Convert the Presto server self signed CA to DER format +openssl x509 -outform der -in $RESOURCES_DIR/presto-ssl-root-ca.pem -out $RESOURCES_DIR/presto-ssl-root-ca.der + +# Add Presto's self signed CA to the truststore +keytool -noprompt -import -alias presto-kerberos -keystore $RESOURCES_DIR/cacerts-with-presto-ca.jks \ + -storepass changeit -file $RESOURCES_DIR/presto-ssl-root-ca.der -trustcacerts + +ADDITIONAL_OPTS="SSLKeyStorePath=$RESOURCES_DIR/ssl_keystore.jks&SSLKeyStorePassword=presto\ +&SSLTrustStorePath=$RESOURCES_DIR/cacerts-with-presto-ca.jks&SSLTrustStorePassword=changeit" + +# Prepare dependencies +source "./bin/prep.sh" +prep_deps + +# Set up the environment variables pointing to all of this, and run some tests +DRIVERS=presto-jdbc \ +MB_ENABLE_PRESTO_JDBC_DRIVER=true \ +MB_PRESTO_JDBC_TEST_HOST=presto-kerberos \ +MB_PRESTO_JDBC_TEST_PORT=7778 \ +MB_PRESTO_JDBC_TEST_SSL=true \ +MB_PRESTO_JDBC_TEST_KERBEROS=true \ +MB_PRESTO_JDBC_TEST_USER=bob@EXAMPLE.COM \ +MB_PRESTO_JDBC_TEST_KERBEROS_PRINCIPAL=bob@EXAMPLE.COM \ +MB_PRESTO_JDBC_TEST_KERBEROS_REMOTE_SERVICE_NAME=HTTP \ +MB_PRESTO_JDBC_TEST_KERBEROS_KEYTAB_PATH=$RESOURCES_DIR/client.keytab \ +MB_PRESTO_JDBC_TEST_KERBEROS_CONFIG_PATH=$RESOURCES_DIR/krb5.conf \ +MB_PRESTO_JDBC_TEST_ADDITIONAL_OPTIONS=$ADDITIONAL_OPTS \ +clojure -X:dev:test:drivers:drivers-dev :only metabase.driver.presto-jdbc-test diff --git a/.github/workflows/auto-backport.yml b/.github/workflows/auto-backport.yml new file mode 100644 index 000000000000..21eb7454345b --- /dev/null +++ b/.github/workflows/auto-backport.yml @@ -0,0 +1,131 @@ +# Creates a pull request with the latest release branch as a target with a cherry-picked commit if an associated pull request has `backport` label +name: AutoBackport + +on: + push: + branches: + - master + +jobs: + pr_info: + name: Check if the commit should be backported + runs-on: ubuntu-latest + outputs: + title: ${{ fromJson(steps.collect_pr_info.outputs.result).title }} + number: ${{ fromJson(steps.collect_pr_info.outputs.result).pullRequestNumber }} + author: ${{ fromJson(steps.collect_pr_info.outputs.result).author }} + should_backport: ${{ fromJson(steps.collect_pr_info.outputs.result).hasBackportLabel }} + steps: + - uses: actions/github-script@v4 + id: collect_pr_info + with: + script: | + const commitMessage = context.payload.commits[0].message; + const pullRequestNumbers = Array.from(commitMessage.matchAll(/\(#(.*?)\)/g)) + + if (pullRequestNumbers.length === 0) { + return; + } + + if (pullRequestNumbers > 1) { + throw "Multiple PRs are associated with this commit"; + } + + const pullRequestNumber = pullRequestNumbers[0][1]; + + const { data } = await github.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: pullRequestNumber + }); + + const hasBackportLabel = data.labels.some((label) => label.name === 'backport'); + const { title, user } = data + + console.log(`PR #${pullRequestNumber}: "${title}" hasBackportLabel=${hasBackportLabel}`) + + return { + author: user.login, + pullRequestNumber, + title: data.title, + hasBackportLabel + } + + get_latest_release_branch: + name: Get latest release branch + runs-on: ubuntu-latest + outputs: + branch_name: ${{ steps.get_branch_name.outputs.result }} + steps: + - uses: actions/github-script@v4 + id: get_branch_name + with: + result-encoding: string + script: | + const releaseBranches = await github.git.listMatchingRefs({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: "heads/release-x.", + }); + + const getVersionFromBranch = branch => { + const match = branch.match(/release-x\.(.*?)\.x/); + return match && parseInt(match[1]) + }; + const latestReleaseBranch = releaseBranches.data + .filter(branch => getVersionFromBranch(branch.ref) !== null) + .reduce((prev, current) => getVersionFromBranch(prev.ref) > getVersionFromBranch(current.ref) ? prev : current); + const latestReleaseBranchName = latestReleaseBranch.ref.replace(/^refs\/heads\//, ""); + + console.log(`Latest release branch: ${latestReleaseBranchName}`) + + return latestReleaseBranchName; + + create_backport_pull_request: + runs-on: ubuntu-latest + name: Create a backport PR with the commit + needs: [pr_info, get_latest_release_branch] + if: ${{ needs.pr_info.outputs.should_backport == 'true' }} + env: + TARGET_BRANCH: ${{ needs.get_latest_release_branch.outputs.branch_name }} + ORIGINAL_PULL_REQUEST_NUMBER: ${{ needs.pr_info.outputs.number }} + ORIGINAL_TITLE: ${{ needs.pr_info.outputs.title }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v2 + name: Checkout + with: + fetch-depth: 0 + - run: | + git config --global user.email "metabase-github-automation@metabase.com" + git config --global user.name "$GITHUB_ACTOR" + + BACKPORT_BRANCH="backport-$GITHUB_SHA" + + git fetch --all + git checkout -b "${BACKPORT_BRANCH}" origin/"${TARGET_BRANCH}" + git cherry-pick "${GITHUB_SHA}" + git push -u origin "${BACKPORT_BRANCH}" + + hub pull-request -b "${TARGET_BRANCH}" -h "${BACKPORT_BRANCH}" -l "auto-backported" -a "${GITHUB_ACTOR}" -F- <<<"🤖 backported \"${ORIGINAL_TITLE}\" + + #${ORIGINAL_PULL_REQUEST_NUMBER}" + + notify_when_failed: + runs-on: ubuntu-latest + name: Notify about failure + needs: [pr_info, create_backport_pull_request] + if: ${{ failure() }} + steps: + - uses: actions/github-script@v4 + with: + script: | + const { GITHUB_SERVER_URL, GITHUB_REPOSITORY, GITHUB_RUN_ID } = process.env; + const runUrl = `${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}` + + github.issues.createComment({ + issue_number: ${{ needs.pr_info.outputs.number }}, + owner: context.repo.owner, + repo: context.repo.repo, + body: `@${{ needs.pr_info.outputs.author }} could not automatically create a backport PR 😩 [[Logs]](${runUrl})` + }) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 9660e0511426..0b050a73ea88 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -4,13 +4,9 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - tags: - '**' paths: - - '**.clj' + - '**.clj*' - '**.edn' - '**.java' - '**/metabase-plugin.yaml' @@ -26,53 +22,31 @@ jobs: - name: Run clj-kondo run: docker run -v $PWD:/work --rm cljkondo/clj-kondo clj-kondo --config /work/lint-config.edn --lint /work/src /work/enterprise/backend/src /work/backend/mbql/src /work/shared/src - be-linter-bikeshed: - runs-on: ubuntu-20.04 - timeout-minutes: 10 - steps: - - uses: actions/checkout@v2 - - name: Prepare JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Get M2 cache - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-bikeshed-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci bikeshed - be-linter-eastwood: runs-on: ubuntu-20.04 - timeout-minutes: 10 + timeout-minutes: 20 steps: - uses: actions/checkout@v2 - name: Prepare JDK 11 uses: actions/setup-java@v1 with: java-version: 11 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - name: Get M2 cache uses: actions/cache@v2 with: - path: ~/.m2 - key: ${{ runner.os }}-eastwood-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci eastwood - - be-linter-docstring-checker: - runs-on: ubuntu-20.04 - timeout-minutes: 10 - steps: - - uses: actions/checkout@v2 - - name: Prepare JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Get M2 cache - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-docstring-checker-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci docstring-checker + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-eastwood-${{ hashFiles('**/deps.edn') }} + - name: Compile Java & AOT Sources + run: | + source ./bin/prep.sh && prep_deps + - run: clojure -X:dev:ee:ee-dev:drivers:drivers-dev:eastwood + name: Run Eastwood linter be-linter-namespace-decls: runs-on: ubuntu-20.04 @@ -83,26 +57,19 @@ jobs: uses: actions/setup-java@v1 with: java-version: 11 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - name: Get M2 cache uses: actions/cache@v2 with: - path: ~/.m2 - key: ${{ runner.os }}-namespace-decls-${{ hashFiles('**/project.clj') }} - - run: lein with-profile +ci check-namespace-decls - - be-linter-reflection-warnings: - runs-on: ubuntu-20.04 - timeout-minutes: 10 - steps: - - uses: actions/checkout@v2 - - name: Prepare JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Get M2 cache - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-reflection-warnings-${{ hashFiles('**/project.clj') }} - - run: ./bin/reflection-linter - name: Run reflection warnings checker + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-namespace-decls-${{ hashFiles('**/deps.edn') }} + - name: Compile Java & AOT Sources + run: | + source ./bin/prep.sh && prep_deps + - run: clojure -X:dev:ee:ee-dev:drivers:drivers-dev:namespace-checker + name: Check ns forms diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 000000000000..d0c6466bd219 --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,131 @@ +# Cherry-picks commits from current branch to a specified one in a command "@metabase-bot backport release-x.40.x" +name: Backport + +on: + issue_comment: + types: [created] + +jobs: + create_pull_request: + name: Creates a pull request + if: contains(github.event.comment.body, '@metabase-bot backport') + runs-on: ubuntu-latest + steps: + - uses: actions/github-script@v4 + id: branch_info + with: + script: | + // Example: @metabase-bot backport release-x.40.x + const [_botName, _command, targetBranch] = context.payload.comment.body.split(" "); + console.log(`Target branch is ${targetBranch}`); + + const { data: originalPullRequest } = await github.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.issue.number, + }); + + const { data: commits } = await github.pulls.listCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.issue.number, + }); + + const targetRef = await github.git.getRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `heads/${targetBranch}`, + }); + + const backportBranch = `backport-${originalPullRequest.head.ref}` + + try { + await github.git.getRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `heads/${backportBranch}`, + }); + } catch(e) { + if (e.status === 404) { + await github.git.createRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `refs/heads/${backportBranch}`, + sha: targetRef.data.object.sha, + }); + } + } + + return { + backportBranch, + targetBranch, + originalPullRequest, + startSha: commits[0].sha, + endSha: commits[commits.length - 1].sha + } + - uses: actions/checkout@v2 + name: Cherry-pick commits and create PR + with: + fetch-depth: 0 + - run: | + git config --global user.email "metabase-github-automation@metabase.com" + git config --global user.name "$GITHUB_ACTOR" + + git fetch --all + + git checkout "${BACKPORT_BRANCH}" + git reset --hard origin/${TARGET_BRANCH} + + if [[ -z $(git ls-remote --heads origin ${ORIGINAL_HEAD_REF}) ]]; then + echo "PR has been merged, searching for a squashed commit in the base branch" + echo "searching for a commit in a ${ORIGINAL_BASE_REF} that contains pull request number ${ORIGINAL_PULL_REQUEST_NUMBER}" + SQUASHED_COMMIT=$(env -i git log ${ORIGINAL_BASE_REF} --grep="(#${ORIGINAL_PULL_REQUEST_NUMBER})" --format="%H") + echo "found commit ${SQUASHED_COMMIT}" + git cherry-pick ${SQUASHED_COMMIT} + else + echo "PR has not been merged, copying all commits" + git cherry-pick ${ORIGINAL_BASE_SHA}..${ORIGINAL_HEAD_SHA} + fi + + git push origin "${BACKPORT_BRANCH}" --force-with-lease + + if [[ $(hub pr list -b "${TARGET_BRANCH}" -h "${BACKPORT_BRANCH}" -s "open") ]]; then + echo "PR already exists" + else + hub pull-request -b "${TARGET_BRANCH}" -h "${BACKPORT_BRANCH}" -l "auto-backported" -a "${GITHUB_ACTOR}" -F- <<<"🤖 backported \"${ORIGINAL_TITLE}\" + + #${ORIGINAL_PULL_REQUEST_NUMBER}" + echo "New PR has been created" + fi + env: + ORIGINAL_TITLE: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.title }} + ORIGINAL_BASE_REF: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.base.ref }} + ORIGINAL_BASE_SHA: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.base.sha }} + ORIGINAL_HEAD_REF: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.head.ref }} + ORIGINAL_HEAD_SHA: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.head.sha }} + ORIGINAL_PULL_REQUEST_NUMBER: ${{ fromJson(steps.branch_info.outputs.result).originalPullRequest.number }} + TARGET_BRANCH: ${{ fromJson(steps.branch_info.outputs.result).targetBranch }} + BACKPORT_BRANCH: ${{ fromJson(steps.branch_info.outputs.result).backportBranch }} + START_SHA: ${{ fromJson(steps.branch_info.outputs.result).startSha }} + END_SHA: ${{ fromJson(steps.branch_info.outputs.result).endSha }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + notify_when_failed: + runs-on: ubuntu-latest + name: Notify about failure + needs: create_pull_request + if: ${{ failure() }} + steps: + - uses: actions/github-script@v4 + with: + script: | + const { GITHUB_SERVER_URL, GITHUB_REPOSITORY, GITHUB_RUN_ID} = process.env; + const runUrl = `${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}` + const author = context.payload.comment.user.login; + + github.issues.createComment({ + issue_number: context.payload.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: `@${author} could not automatically a backport PR 😩 [[Logs]](${runUrl})` + }) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index c2f730b1f135..7191522823b5 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -23,4 +23,4 @@ jobs: run: while ! curl -s localhost:3000/api/health; do sleep 1; done timeout-minutes: 1 - name: Check API health - run: curl -s localhost:3000/api/health \ No newline at end of file + run: curl -s localhost:3000/api/health diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index 4ebee9f2ac2f..aa9c701958ea 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -4,12 +4,6 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - - 'fix**' - - 'ci**' - tags: - '**' paths: - 'frontend/**' @@ -18,6 +12,7 @@ on: - 'docs/**' - '**/package.json' - '**/yarn.lock' + - '**/.eslintrc' - '.github/workflows/**' jobs: @@ -49,6 +44,11 @@ jobs: uses: actions/setup-node@v1 with: node-version: 14.x + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-cljs-${{ hashFiles('**/shadow-cljs.edn') }} - name: Get yarn cache uses: actions/cache@v2 with: @@ -78,21 +78,31 @@ jobs: fe-tests-unit: runs-on: ubuntu-20.04 - timeout-minutes: 10 + timeout-minutes: 12 steps: - uses: actions/checkout@v2 - name: Prepare Node.js uses: actions/setup-node@v1 with: node-version: 14.x + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-cljs-${{ hashFiles('**/shadow-cljs.edn') }} - name: Get yarn cache uses: actions/cache@v2 with: path: ~/.cache/yarn key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - run: yarn install --frozen-lockfile --prefer-offline - - run: yarn run test-unit + - run: yarn run test-unit --coverage --silent name: Run frontend unit tests + - name: Upload coverage to codecov.io + uses: codecov/codecov-action@v2 + with: + files: ./coverage/lcov.info + flags: front-end fe-tests-timezones: runs-on: ubuntu-20.04 @@ -103,6 +113,11 @@ jobs: uses: actions/setup-node@v1 with: node-version: 14.x + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-cljs-${{ hashFiles('**/shadow-cljs.edn') }} - name: Get yarn cache uses: actions/cache@v2 with: diff --git a/.github/workflows/i18n.yml b/.github/workflows/i18n.yml index 04606c99a52d..2f4ee2e8374f 100644 --- a/.github/workflows/i18n.yml +++ b/.github/workflows/i18n.yml @@ -4,13 +4,9 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - tags: - '**' paths: - - '**.clj' + - '**.clj*' - '**.js' - '**.jsx' - '.github/workflows/**' @@ -38,8 +34,8 @@ jobs: - name: Install Clojure CLI run: | - curl -O https://download.clojure.org/install/linux-install-1.10.1.708.sh && - sudo bash ./linux-install-1.10.1.708.sh + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - run: ./bin/i18n/update-translation-template name: Check i18n tags/make sure template can be built diff --git a/.github/workflows/percy-issue-comment.yml b/.github/workflows/percy-issue-comment.yml new file mode 100644 index 000000000000..13e376bfec26 --- /dev/null +++ b/.github/workflows/percy-issue-comment.yml @@ -0,0 +1,144 @@ +# Triggers Percy job by "@metabase-bot run visual tests" comment in a PR +name: PercyIssueComment + +on: + issue_comment: + types: [created] + +jobs: + pr_info: + if: github.event.comment.body == '@metabase-bot run visual tests' + runs-on: ubuntu-20.04 + outputs: + pull_request_number: ${{ fromJson(steps.fetch_pr.outputs.data).head.number }} + branch_name: ${{ fromJson(steps.fetch_pr.outputs.data).head.ref }} + commit_sha: ${{ fromJson(steps.fetch_pr.outputs.data).head.sha }} + steps: + - name: Fetch issue + uses: octokit/request-action@v2.x + id: fetch_issue + with: + route: GET ${{ github.event.issue.url }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Fetch PR + uses: octokit/request-action@v2.x + id: fetch_pr + with: + route: GET ${{ fromJson(steps.fetch_issue.outputs.data).pull_request.url }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + build: + runs-on: ubuntu-20.04 + needs: pr_info + timeout-minutes: 60 + strategy: + matrix: + edition: [oss] + env: + MB_EDITION: ${{ matrix.edition }} + INTERACTIVE: false + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ needs.pr_info.outputs.branch_name }} + token: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/deps.edn') }} + + - run: yarn install --frozen-lockfile --prefer-offline + - run: ./bin/build + + - name: Mark with the commit hash + run: git rev-parse --short HEAD > COMMIT-ID + - name: Calculate SHA256 checksum + run: sha256sum ./target/uberjar/metabase.jar > SHA256.sum + - name: Upload JARs as artifact + uses: actions/upload-artifact@v2 + with: + name: metabase-${{ matrix.edition }}-uberjar + path: | + ./target/uberjar/metabase.jar + ./COMMIT-ID + ./SHA256.sum + + percy: + timeout-minutes: 30 + needs: [build, pr_info] + runs-on: ubuntu-20.04 + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ needs.pr_info.outputs.branch_name }} + token: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - run: yarn install --frozen-lockfile --prefer-offline + + - uses: actions/download-artifact@v2 + name: Retrieve uberjar artifact + with: + name: metabase-oss-uberjar + - name: Get the version info + run: | + jar xf target/uberjar/metabase.jar version.properties + mv version.properties resources/ + + - name: Percy Test + run: yarn run test-visual-no-build + env: + PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }} + PERCY_COMMIT: ${{ needs.pr_info.outputs.commit_sha }} + PERCY_BRANCH: ${{ needs.pr_info.outputs.branch_name }} + PERCY_PULL_REQUEST: ${{ needs.pr_info.outputs.pull_request_number }} diff --git a/.github/workflows/percy.yml b/.github/workflows/percy.yml new file mode 100644 index 000000000000..33f82ef0f12c --- /dev/null +++ b/.github/workflows/percy.yml @@ -0,0 +1,115 @@ +# Triggers Percy job on push to master and release branches to create baseline screenshots +name: Percy + +on: + push: + branches: + - master + - "release-**" + paths-ignore: + - "docs/**" + - "**.md" + - "**unit.spec.js" + - "frontend/test/**" + - "!frontend/test/metabase-visual/**" + +jobs: + build: + runs-on: ubuntu-20.04 + timeout-minutes: 60 + strategy: + matrix: + edition: [oss] + env: + MB_EDITION: ${{ matrix.edition }} + INTERACTIVE: false + steps: + - uses: actions/checkout@v2 + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/deps.edn') }} + + - run: yarn install --frozen-lockfile --prefer-offline + - run: ./bin/build + + - name: Mark with the commit hash + run: git rev-parse --short HEAD > COMMIT-ID + - name: Calculate SHA256 checksum + run: sha256sum ./target/uberjar/metabase.jar > SHA256.sum + - name: Upload JARs as artifact + uses: actions/upload-artifact@v2 + with: + name: metabase-${{ matrix.edition }}-uberjar + path: | + ./target/uberjar/metabase.jar + ./COMMIT-ID + ./SHA256.sum + + percy: + runs-on: ubuntu-20.04 + timeout-minutes: 30 + needs: build + steps: + - uses: actions/checkout@v2 + - name: Prepare Node.js + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Prepare JDK 8 + uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Check versions + run: | + echo "Node.js `node --version`" + echo "yarn `yarn --version`" + java -version + - name: Get yarn cache + uses: actions/cache@v2 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + - run: yarn install --frozen-lockfile --prefer-offline + + - uses: actions/download-artifact@v2 + name: Retrieve uberjar artifact + with: + name: metabase-oss-uberjar + - name: Get the version info + run: | + jar xf target/uberjar/metabase.jar version.properties + mv version.properties resources/ + + - name: Percy Test + run: yarn run test-visual-no-build + env: + PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }} diff --git a/.github/workflows/presto-kerberos-integration-test.yml b/.github/workflows/presto-kerberos-integration-test.yml new file mode 100644 index 000000000000..bbbbd6ebf754 --- /dev/null +++ b/.github/workflows/presto-kerberos-integration-test.yml @@ -0,0 +1,70 @@ +name: Kerberized Presto Integration Test + +on: + pull_request: + push: + branches: + - master + - 'release**' + - 'feature**' + tags: + - '**' + paths: + - '**/presto_jdbc/**' + - '**/presto_jdbc.clj' + +jobs: + run-presto-kerberos-test: + runs-on: ubuntu-20.04 + timeout-minutes: 40 + steps: + - name: Install babashka + run: > + mkdir -p /tmp/babashka-install \ + && cd /tmp/babashka-install \ + && curl -sLO https://raw.githubusercontent.com/babashka/babashka/master/install \ + && chmod +x install \ + && sudo ./install \ + && cd - + - name: Checkout Metabase repository + uses: actions/checkout@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + - name: Check out Presto Kerberos Docker Compose + uses: actions/checkout@v2 + with: + repository: metabase/presto-kerberos-docker + ref: add-test_data-catalog + token: ${{ secrets.GITHUB_TOKEN }} + path: presto-kerberos-docker + - name: Bring up Presto+Kerberos cluster + run: cd presto-kerberos-docker && docker-compose up -d && cd .. + - name: Run Presto test query from command line (sanity check) + run: cd presto-kerberos-docker && ./test.sh && cd .. + # Since we are managing the Docker containers from the GitHub action container, we need to copy all the + # relevant resources now, into the resources dir for later consumption by the app + - name: Copy Presto SSL keystore to resources + run: docker cp presto-kerberos:/tmp/ssl_keystore.jks resources + - name: Copy krb5.conf file to resources + run: docker cp presto-kerberos:/etc/krb5.conf resources + - name: Copy client.keytab file to resources + run: docker cp presto-kerberos:/home/presto/client.keytab resources + - name: Checkout mba + uses: actions/checkout@v2 + with: + repository: metabase/mba + ref: master + token: ${{ secrets.GITHUB_TOKEN }} + path: mba-src + - name: ls mba + run: ls -latr mba-src + - name: Symlink mba + run: cd mba-src && sudo ln -s $(pwd)/src/main.clj /usr/local/bin/mba && chmod +x /usr/local/bin/mba && cd .. + - name: Ensure mba + run: which mba + - name: Run Metabase via MBA + run: /home/runner/work/metabase/metabase/mba-src/src/main.clj --mb . --data-db postgres-data -n example.com up + - name: Run test script in MBA instance + run: > + mba --mb . --data-db postgres-data -n example.com \ + run .github/scripts/run-presto-kerberos-integration-test.sh diff --git a/.github/workflows/uberjar.yml b/.github/workflows/uberjar.yml index a4183f2dea14..173ef8584bd4 100644 --- a/.github/workflows/uberjar.yml +++ b/.github/workflows/uberjar.yml @@ -3,9 +3,6 @@ name: Uberjar on: push: branches: - - master - - 'release-**' - tags: - '**' paths-ignore: - 'docs/**' @@ -35,15 +32,13 @@ jobs: java-version: 8 - name: Install Clojure CLI run: | - curl -O https://download.clojure.org/install/linux-install-1.10.1.708.sh && - sudo bash ./linux-install-1.10.1.708.sh + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh - name: Check versions run: | echo "Node.js `node --version`" echo "yarn `yarn --version`" java -version - echo "Clojure `clojure -e "(println (clojure-version))"`" - lein --version - name: Get yarn cache uses: actions/cache@v2 @@ -53,12 +48,13 @@ jobs: - name: Get M2 cache uses: actions/cache@v2 with: - path: ~/.m2 - key: ${{ runner.os }}-m2-${{ hashFiles('**/project.clj') }}-${{ hashFiles('**/deps.edn') }} + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-m2-${{ hashFiles('**/deps.edn') }} - run: yarn install --frozen-lockfile --prefer-offline - - run: lein with-profile +include-all-drivers,+cloverage,+junit,+${{ matrix.edition }} deps - - run: ./bin/build + - run: MB_EDITION=${{ matrix.edition }} ./bin/build - name: Mark with the commit hash run: git rev-parse --short HEAD > COMMIT-ID @@ -81,7 +77,7 @@ jobs: strategy: matrix: edition: [ee, oss] - java-version: [8, 11, 16] + java-version: [8, 11, 17] steps: - name: Prepare JRE (Java Run-time Environment) uses: actions/setup-java@v1 @@ -96,9 +92,9 @@ jobs: name: metabase-${{ matrix.edition }}-uberjar - name: Launch uberjar - run: | - java -jar ./target/uberjar/metabase.jar & - sleep 180 + run: java -jar ./target/uberjar/metabase.jar & + - name: Wait for Metabase to start + run: while ! curl -s localhost:3000/api/health; do sleep 1; done timeout-minutes: 5 - name: Check API health diff --git a/.github/workflows/whitespace.yml b/.github/workflows/whitespace.yml new file mode 100644 index 000000000000..6189ceb11a27 --- /dev/null +++ b/.github/workflows/whitespace.yml @@ -0,0 +1,41 @@ +name: Whitespace + +on: + pull_request: + push: + branches: + - '**' + paths: + - '**.yaml' + - '**.yml' + - '**.clj' + - '**.edn' + - '**.el' + - '**.html' + - '**.json' + - '**.js*' + - '**.sh' + +jobs: + whitespace-linter: + runs-on: ubuntu-20.04 + timeout-minutes: 5 + steps: + - uses: actions/checkout@v2 + - name: Prepare JDK 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + - name: Install Clojure CLI + run: | + curl -O https://download.clojure.org/install/linux-install-1.10.3.933.sh && + sudo bash ./linux-install-1.10.3.933.sh + - name: Get M2 cache + uses: actions/cache@v2 + with: + path: | + ~/.m2 + ~/.gitlibs + key: ${{ runner.os }}-whitespace-linter-${{ hashFiles('**/deps.edn') }} + - run: clojure -T:whitespace-linter lint + name: Run Whitespace Linter diff --git a/.github/workflows/yaml.yml b/.github/workflows/yaml.yml index 630d5c7ab2bf..3f5f4a3b15a5 100644 --- a/.github/workflows/yaml.yml +++ b/.github/workflows/yaml.yml @@ -4,10 +4,6 @@ on: pull_request: push: branches: - - master - - 'release**' - - 'feature**' - tags: - '**' paths: - '**.yml' diff --git a/.gitignore b/.gitignore index 1676473ceb07..b41ae1537ac3 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,8 @@ /resources/frontend_client/index.html /resources/frontend_client/public.html /resources/i18n/*.edn +/resources/license-backend-third-party.txt +/resources/license-frontend-third-party.txt /resources/namespaces.edn /resources/sample-dataset.db.trace.db /resources/version.properties @@ -78,6 +80,7 @@ dev/src/dev/nocommit/ **/cypress_sample_dataset.json /frontend/src/cljs .shadow-cljs +.clj-kondo/cache/ # lsp: ignore all but the config file .lsp/* diff --git a/.lein-classpath b/.lein-classpath deleted file mode 100644 index a38ac2ef8384..000000000000 --- a/.lein-classpath +++ /dev/null @@ -1 +0,0 @@ -lein_tasks diff --git a/.lsp/config.edn b/.lsp/config.edn index 0018f36a72b2..117bb4b453b8 100644 --- a/.lsp/config.edn +++ b/.lsp/config.edn @@ -1,4 +1,4 @@ {:keep-require-at-start? true :show-docs-arity-on-same-line? true - :project-specs [{:project-path "project.clj" - :classpath-cmd ["lein" "with-profile" "+ee" "classpath"]}]} + :project-specs [{:project-path "deps.edn" + :classpath-cmd ["clojure" "-A:dev:ee:ee-dev:drivers:drivers-dev" "-Spath"]}]} diff --git a/.percy.yml b/.percy.yml new file mode 100644 index 000000000000..b09920de5a83 --- /dev/null +++ b/.percy.yml @@ -0,0 +1,7 @@ +version: 2 +snapshot: + widths: + - 1280 + min-height: 800 +discovery: + disable-cache: true diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000000..679f283d3f2e --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,22 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Debug with Firefox", + "request": "launch", + "type": "firefox", + "url": "http://localhost:3000", + "webRoot": "${workspaceFolder}" + }, + { + "name": "Debug with Chrome", + "request": "launch", + "type": "pwa-chrome", + "url": "http://localhost:3000", + "webRoot": "${workspaceFolder}" + }, + ] +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 4c1983d9d322..62bfe7b2f8a8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,65 +2,32 @@ # STAGE 1.1: builder frontend ################### -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as frontend +FROM metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers as frontend ARG MB_EDITION=oss -WORKDIR /app/source +WORKDIR /home/circleci -COPY . . -RUN NODE_ENV=production MB_EDITION=$MB_EDITION yarn --frozen-lockfile && yarn build && bin/i18n/build-translation-resources - -################### -# STAGE 1.2: backend deps -################### - -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as backend - -WORKDIR /app/source - -# backend dependencies -COPY project.clj . -RUN lein deps :tree - -################### -# STAGE 1.3: drivers -################### - -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as drivers - -ARG MB_EDITION=oss - -WORKDIR /app/source - -COPY --from=backend /root/.m2/repository/. /root/.m2/repository/. - -# add the rest of the source -COPY . . - -# build the app -RUN INTERACTIVE=false MB_EDITION=$MB_EDITION sh bin/build-drivers.sh +COPY --chown=circleci . . +RUN NODE_ENV=production MB_EDITION=$MB_EDITION yarn --frozen-lockfile && \ + yarn build && yarn build-static-viz && bin/i18n/build-translation-resources ################### # STAGE 1.4: main builder ################### -FROM metabase/ci:java-11-lein-2.9.6-clj-1.10.3.822-04-22-2021 as builder +FROM metabase/ci:circleci-java-11-clj-1.10.3.929-07-27-2021-node-browsers as builder ARG MB_EDITION=oss -WORKDIR /app/source +WORKDIR /home/circleci # try to reuse caching as much as possible -COPY --from=frontend /root/.m2/repository/. /root/.m2/repository/. -COPY --from=frontend /app/source/. . -COPY --from=backend /root/.m2/repository/. /root/.m2/repository/. -COPY --from=backend /app/source/. . -COPY --from=drivers /root/.m2/repository/. /root/.m2/repository/. -COPY --from=drivers /app/source/. . +COPY --from=frontend /home/circleci/.m2/repository/. /home/circleci/.m2/repository/. +COPY --from=frontend /home/circleci/. . # build the app -RUN INTERACTIVE=false MB_EDITION=$MB_EDITION bin/build version uberjar +RUN INTERACTIVE=false MB_EDITION=$MB_EDITION bin/build version drivers uberjar # ################### # # STAGE 2: runner @@ -83,7 +50,7 @@ RUN apk upgrade && apk add --update-cache --no-cache bash ttf-dejavu fontconfig mkdir -p /plugins && chmod a+rwx /plugins # add Metabase script and uberjar -COPY --from=builder /app/source/target/uberjar/metabase.jar /app/ +COPY --from=builder /home/circleci/target/uberjar/metabase.jar /app/ COPY bin/docker/run_metabase.sh /app/ # expose our default runtime port diff --git a/OSX/Metabase/Backend/ResetPasswordTask.m b/OSX/Metabase/Backend/ResetPasswordTask.m index 1260ff631316..b675fd1e06b0 100644 --- a/OSX/Metabase/Backend/ResetPasswordTask.m +++ b/OSX/Metabase/Backend/ResetPasswordTask.m @@ -23,58 +23,47 @@ - (void)resetPasswordForEmailAddress:(NSString *)emailAddress success:(void (^)( // first, we need to stop the main Metabase task so we can access the DB NSLog(@"Stopping Metabase task in order to reset password..."); [[AppDelegate instance] stopMetabaseTask]; - + self.task = [[NSTask alloc] init]; - - // time travelers from the future: this is hardcoded since I'm the only one who works on this. I give you permission to fix it - Cam - #define DEBUG_RUN_LEIN_TASK 0 - - #if DEBUG_RUN_LEIN_TASK - self.task.environment = @{@"MB_DB_FILE": DBPath()}; - self.task.currentDirectoryPath = @"/Users/cam/metabase"; - self.task.launchPath = @"/usr/local/bin/lein"; - self.task.arguments = @[@"run", @"reset-password", emailAddress]; - NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' lein run reset-password %@", DBPath(), emailAddress); - #else - self.task.environment = @{@"MB_DB_FILE": DBPath()}; - self.task.launchPath = JREPath(); - self.task.arguments = @[@"-Djava.awt.headless=true", // this prevents the extra java icon from popping up in the dock when running - @"-Xverify:none", // disable bytecode verification for faster launch speed, not really needed here since JAR is packaged as part of signed .app - @"-jar", UberjarPath(), - @"reset-password", emailAddress]; - NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' %@ -jar %@ reset-password %@", DBPath(), JREPath(), UberjarPath(), emailAddress); - #endif - + + self.task.environment = @{@"MB_DB_FILE": DBPath()}; + self.task.launchPath = JREPath(); + self.task.arguments = @[@"-Djava.awt.headless=true", // this prevents the extra java icon from popping up in the dock when running + @"-Xverify:none", // disable bytecode verification for faster launch speed, not really needed here since JAR is packaged as part of signed .app + @"-jar", UberjarPath(), + @"reset-password", emailAddress]; + NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' %@ -jar %@ reset-password %@", DBPath(), JREPath(), UberjarPath(), emailAddress); + __weak ResetPasswordTask *weakSelf = self; self.task.terminationHandler = ^(NSTask *task) { NSLog(@"ResetPasswordTask terminated with status: %d", task.terminationStatus); [weakSelf terminate]; - + dispatch_async(dispatch_get_main_queue(), ^{ if (!task.terminationStatus && weakSelf.output.length >= 38) { // should be of format _<36-char-uuid>, e.g. "1_b20466b9-1f5b-488d-8ab6-5039107482f8" successBlock(weakSelf.output); } else { errorBlock(weakSelf.output.length ? weakSelf.output : @"An unknown error has occured."); } - + // now restart the main Metabase task NSLog(@"Reset password complete, restarting Metabase task..."); [[AppDelegate instance] startMetabaseTask]; }); }; - + [self.task launch]; }); } - (void)readHandleDidRead:(NSString *)message { NSLog(@"[PasswordResetTask] %@", message); - + /// output comes back like "STATUS [[[message]]]" NSRegularExpression *regex = [NSRegularExpression regularExpressionWithPattern:@"^(?:(?:OK)||(?:FAIL))\\s+\\[\\[\\[(.+)\\]\\]\\]\\s*$" options:NSRegularExpressionAnchorsMatchLines|NSRegularExpressionAllowCommentsAndWhitespace error:NULL]; if (![regex numberOfMatchesInString:message options:0 range:NSMakeRange(0, message.length)]) return; - + NSString *result = [regex stringByReplacingMatchesInString:message options:0 range:NSMakeRange(0, message.length) withTemplate:@"$1"]; if (result) { self.output = result; diff --git a/OSX/src/macos_release.clj b/OSX/src/macos_release.clj index 6ed28f0e9989..f08d37d6d855 100644 --- a/OSX/src/macos_release.clj +++ b/OSX/src/macos_release.clj @@ -22,7 +22,7 @@ :upload upload/upload!)) (defn- do-step! [step-name] - (let [thunk (or (get steps* (keyword step-name)) + (let [thunk (or (get steps* (u/parse-as-keyword step-name)) (throw (ex-info (format "Invalid step name: %s" step-name) {:found (set (keys steps*))})))] (println (colorize/magenta (format "Running step %s..." step-name))) diff --git a/backend/junit/test/metabase/junit.clj b/backend/junit/test/metabase/junit.clj deleted file mode 100644 index 49bb3aec0fc9..000000000000 --- a/backend/junit/test/metabase/junit.clj +++ /dev/null @@ -1,73 +0,0 @@ -(ns metabase.junit - "Formatter for JUnit test output for CI." - (:require [clojure.pprint :as pp] - [clojure.string :as str] - [medley.core :as m] - [metabase.util :as u] - [pjstadig.print :as p] - [test-report-junit-xml.core :as junit-xml] - [test_report_junit_xml.shaded.clojure.data.xml :as xml])) - -(defn- escape-unprintable-characters - [s] - (str/join (for [c s] - (if (and (Character/isISOControl c) - (not (Character/isWhitespace c))) - (format "&#%d;" (int c)) - c)))) - -(defn- decolorize-and-escape - "Remove ANSI color escape sequences, then encode things as character entities as needed" - [s] - (-> s u/decolorize escape-unprintable-characters)) - -(defn- event-description [{:keys [file line context message]}] - (str - (format "%s:%d" file line) - (when (seq context) - (str "\n" (str/join " " (reverse context)))) - (when message - (str "\n" message)))) - -(defn- print-expected [expected actual] - (p/rprint "expected: ") - (pp/pprint expected) - (p/rprint " actual: ") - (pp/pprint actual) - (p/clear)) - -(defn- result-output [{:keys [expected actual diffs message], :as event}] - (let [s (with-out-str - (println (event-description event)) - ;; this code is adapted from `pjstadig.util` - (p/with-pretty-writer - (fn [] - (if (seq diffs) - (doseq [[actual [a b]] diffs] - (print-expected expected actual) - (p/rprint " diff:") - (if a - (do (p/rprint " - ") - (pp/pprint a) - (p/rprint " + ")) - (p/rprint " + ")) - (when b - (pp/pprint b)) - (p/clear)) - (print-expected expected actual)))))] - (decolorize-and-escape s))) - -(defmulti format-result - {:arglists '([event])} - :type) - -(defmethod format-result :default - [event] - (-> (#'junit-xml/format-result event) - (m/update-existing-in [:attrs :message] decolorize-and-escape) - (m/update-existing :content (comp xml/cdata decolorize-and-escape)))) - -(defmethod format-result :fail - [event] - {:tag :failure - :content (xml/cdata (result-output event))}) diff --git a/bin/build b/bin/build index 7833eef8487e..d360de021445 100755 --- a/bin/build +++ b/bin/build @@ -2,8 +2,18 @@ set -euo pipefail +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/build-mb clojure -M -m build $@ diff --git a/bin/build-driver.sh b/bin/build-driver.sh index ed438085bb68..8380131c246a 100755 --- a/bin/build-driver.sh +++ b/bin/build-driver.sh @@ -5,12 +5,22 @@ set -eo pipefail driver="$1" if [ ! "$driver" ]; then - echo "Usage: ./bin/build-driver.sh [driver]" + echo "Usage: ./bin/build-driver.sh [edition]" exit -1 fi +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/build-drivers -clojure -M -m build-driver "$driver" +clojure -M -m build-driver $@ diff --git a/bin/build-drivers.sh b/bin/build-drivers.sh index f50aac9d22c2..bea160d25b78 100755 --- a/bin/build-drivers.sh +++ b/bin/build-drivers.sh @@ -2,8 +2,18 @@ set -euo pipefail +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/build-drivers clojure -M -m build-drivers $@ diff --git a/bin/build-drivers/README.md b/bin/build-drivers/README.md index 0033455aba5d..8862c9502df3 100644 --- a/bin/build-drivers/README.md +++ b/bin/build-drivers/README.md @@ -7,7 +7,7 @@ There are three main entrypoints. Shell script wrappers are provided for conveni ### `build-drivers` -Builds *all* drivers as needed. If drivers were recently built and no relevant source code changed, skips rebuild. +Builds *all* drivers as needed. ``` cd bin/build-drivers diff --git a/bin/build-drivers/deps.edn b/bin/build-drivers/deps.edn index 0231cb4d6284..5c89854601cd 100644 --- a/bin/build-drivers/deps.edn +++ b/bin/build-drivers/deps.edn @@ -1,17 +1,29 @@ {:paths ["src"] :deps - {common/common {:local/root "../common"} - cheshire/cheshire {:mvn/version "5.8.1"} - commons-codec/commons-codec {:mvn/version "1.14"} - hiccup/hiccup {:mvn/version "1.0.5"} - io.forward/yaml {:mvn/version "1.0.9"} ; don't upgrade to 1.0.10 -- doesn't work on Java 8 (!) - leiningen/leiningen {:mvn/version "2.9.5"} ; for parsing Leiningen projects - org.flatland/ordered {:mvn/version "1.5.9"} ; used by io.forward/yaml -- need the newer version - stencil/stencil {:mvn/version "0.5.0"}} + {common/common {:local/root "../common"} + com.github.seancorfield/depstar {:mvn/version "2.1.278"} + cheshire/cheshire {:mvn/version "5.8.1"} + commons-codec/commons-codec {:mvn/version "1.14"} + hiccup/hiccup {:mvn/version "1.0.5"} + io.forward/yaml {:mvn/version "1.0.9"} ; Don't upgrade yet, new version doesn't support Java 8 (see https://github.com/owainlewis/yaml/issues/37) + io.github.clojure/tools.build {:git/tag "v0.1.6", :git/sha "5636e61"} + org.clojure/tools.deps.alpha {:mvn/version "0.12.985"} + org.flatland/ordered {:mvn/version "1.5.9"} ; used by io.forward/yaml -- need the newer version + stencil/stencil {:mvn/version "0.5.0"} + ;; local source + metabase/metabase-core {:local/root "../.."} + metabase/driver-modules {:local/root "../../modules/drivers"}} + + :jvm-opts + ["-XX:-OmitStackTraceInFastThrow"] :aliases - {:test {:extra-paths ["test"] - :extra-deps {com.cognitect/test-runner {:git/url "https://github.com/cognitect-labs/test-runner.git" - :sha "209b64504cb3bd3b99ecfec7937b358a879f55c1"}} - :main-opts ["-m" "cognitect.test-runner"]}}} + {:dev + {:extra-paths ["test"]} + + :test + {:extra-paths ["test"] + :extra-deps {com.cognitect/test-runner {:git/url "https://github.com/cognitect-labs/test-runner.git" + :sha "209b64504cb3bd3b99ecfec7937b358a879f55c1"}} + :main-opts ["-m" "cognitect.test-runner"]}}} diff --git a/bin/build-drivers/src/build_driver.clj b/bin/build-drivers/src/build_driver.clj index 1c792623eafb..5824f5880be4 100644 --- a/bin/build-drivers/src/build_driver.clj +++ b/bin/build-drivers/src/build_driver.clj @@ -7,4 +7,4 @@ (u/exit-when-finished-nonzero-on-exception (when-not (seq driver) (throw (ex-info "Usage: clojure -m build-driver [edition]" {}))) - (build-driver/build-driver! (keyword driver) (or (keyword edition) :oss)))) + (build-driver/build-driver! (u/parse-as-keyword driver) (or (u/parse-as-keyword edition) :oss)))) diff --git a/bin/build-drivers/src/build_drivers.clj b/bin/build-drivers/src/build_drivers.clj index 8a993513b455..c2116ae330a7 100644 --- a/bin/build-drivers/src/build_drivers.clj +++ b/bin/build-drivers/src/build_drivers.clj @@ -2,12 +2,20 @@ "Entrypoint for `bin/build-drivers.sh`. Builds all drivers, if needed." (:require [build-drivers.build-driver :as build-driver] [clojure.java.io :as io] - [metabuild-common.core :as u])) + [metabuild-common.core :as u]) + (:import java.io.File)) (defn- all-drivers [] (->> (.listFiles (io/file (u/filename u/project-root-directory "modules" "drivers"))) - (filter #(.isDirectory %)) ;; watch for errant DS_Store files on os_x - (map (comp keyword #(.getName %))))) + (filter (fn [^File d] ; + (and + ;; watch for errant DS_Store files on os_x + (.isDirectory d) + ;; ignore stuff like .cpcache + (not (.isHidden d)) + ;; only consider a directory to be a driver if it contains a lein or deps build file + (.exists (io/file d "deps.edn"))))) + (map (comp keyword #(.getName ^File %))))) (defn build-drivers! [edition] (let [edition (or edition :oss)] @@ -19,4 +27,4 @@ (defn -main [& [edition]] (u/exit-when-finished-nonzero-on-exception - (build-drivers! (keyword edition)))) + (build-drivers! (u/parse-as-keyword edition)))) diff --git a/bin/build-drivers/src/build_drivers/build_driver.clj b/bin/build-drivers/src/build_drivers/build_driver.clj index 498e51f175ac..f05b81b57064 100644 --- a/bin/build-drivers/src/build_drivers/build_driver.clj +++ b/bin/build-drivers/src/build_drivers/build_driver.clj @@ -1,168 +1,35 @@ (ns build-drivers.build-driver - "Logic for building a single driver." - (:require [build-drivers.checksum :as checksum] - [build-drivers.common :as c] - [build-drivers.install-driver-locally :as install-locally] - [build-drivers.metabase :as metabase] - [build-drivers.plugin-manifest :as manifest] - [build-drivers.strip-and-compress :as strip-and-compress] + (:require [build-drivers.common :as c] + [build-drivers.compile-source-files :as compile-source-files] + [build-drivers.copy-source-files :as copy-source-files] + [build-drivers.create-uberjar :as create-uberjar] [build-drivers.verify :as verify] - [clojure.string :as str] - [colorize.core :as colorize] - [environ.core :as env] [metabuild-common.core :as u])) -(defn- copy-driver! - "Copy the driver JAR from its `target/` directory to `resources/modules`/." - [driver] - (u/step (format "Copy %s driver uberjar from %s -> %s" - driver - (u/assert-file-exists (c/driver-jar-build-path driver)) - (c/driver-jar-destination-path driver)) - (u/delete-file-if-exists! (c/driver-jar-destination-path driver)) - (u/create-directory-unless-exists! c/driver-jar-destination-directory) - (u/copy-file! (c/driver-jar-build-path driver) - (c/driver-jar-destination-path driver)))) - -(defn- clean-driver-artifacts! - "Delete built JARs of `driver`." - [driver] - (u/step (format "Delete %s driver artifacts" driver) - (u/delete-file-if-exists! (c/driver-target-directory driver)) +(defn clean! [driver] + (u/step "Clean" + (u/delete-file-if-exists! (c/compiled-source-target-dir driver)) (u/delete-file-if-exists! (c/driver-jar-destination-path driver)))) -(defn- clean-parents! - "Delete built JARs and local Maven installations of the parent drivers of `driver`." - [driver] - (u/step (format "Clean %s parent driver artifacts" driver) - (doseq [parent (manifest/parent-drivers driver)] - (clean-driver-artifacts! parent) - (install-locally/clean! parent) - (clean-parents! parent)))) - -(defn- clean-all! - "Delete all artifacts relating to building `driver`, including the driver JAR itself and installed - `metabase-core`/Metabase uberjar and any parent driver artifacts." - [driver] - (u/step "Clean all" - (clean-driver-artifacts! driver) - (clean-parents! driver) - (metabase/clean-metabase!))) - -(declare build-driver!) - -(defn- build-parents! - "Build and install to the local Maven repo any parent drivers of `driver` (e.g. `:google` is a parent of `:bigquery`). - The driver must be built as an uberjar so we can remove duplicate classes during the `strip-and-compress` stage; it - must be installed as a library so we can use it as a `:provided` dependency when building the child driver." - [driver edition] - (u/step (format "Build %s parent drivers" driver) - (when-let [parents (not-empty (manifest/parent-drivers driver))] - (doseq [parent parents] - (build-parents! parent edition) - (install-locally/install-locally! parent edition) - (build-driver! parent edition)) - (u/announce "%s parents built successfully." driver)))) - -(defn- build-uberjar! [driver edition] - (u/step (format "Build %s uberjar (%s edition)" driver edition) - (u/delete-file-if-exists! (c/driver-target-directory driver)) - (u/sh {:dir (c/driver-project-dir driver)} "lein" "clean") - (u/sh {:dir (c/driver-project-dir driver) - :env {"LEIN_SNAPSHOTS_IN_RELEASE" "true" - "HOME" (env/env :user-home) - "PATH" (env/env :path) - "JAVA_HOME" (env/env :java-home)}} - "lein" "with-profile" (format "+%s" (name edition)) "uberjar") - (strip-and-compress/strip-and-compress-uberjar! driver) - (u/announce "%s uberjar (%s edition) built successfully." driver edition))) - -(defn- build-and-verify! - "Build `driver` and verify the built JAR. This function ignores any existing artifacts and will always rebuild." - [driver edition] - {:pre [(#{:oss :ee} edition)]} - (u/step (str/join " " [(colorize/green "Build") - (colorize/yellow driver) - (colorize/green "driver") - (colorize/yellow (format "(%s edition)" edition))]) - (clean-driver-artifacts! driver) - (u/step (format "Build %s driver (%s edition) prerequisites if needed" driver edition) - (metabase/build-metabase!) - (build-parents! driver edition)) - (build-uberjar! driver edition) - (copy-driver! driver) - (verify/verify-driver driver) - (u/step (format "Save checksum for %s driver (%s edition) to %s" - driver edition (c/driver-checksum-filename driver)) - (let [filename (c/driver-checksum-filename driver) - checksum (checksum/driver-checksum driver edition)] - (spit filename checksum) - (u/announce "Wrote checksum %s to file %s" (pr-str checksum) filename))))) - -(defn- driver-checksum-matches? - "Check whether the saved checksum for the driver from the last build is the same as the current one. If so, we don't - need to build again. This checksum is based on driver sources as well as the checksums for Metabase sources and - parent drivers." - [driver edition] - (u/step (format "Determine whether %s driver (%s edition) source files have changed since last build" driver edition) - (let [existing-checksum (checksum/existing-driver-checksum driver)] - (cond - (not existing-checksum) - (do - (u/announce "No previous checksum. Need to rebuild driver") - false) - - (= existing-checksum (checksum/driver-checksum driver edition)) - (do - (u/announce "Checksum is the same. Do not need to rebuild driver.") - true) - - :else - (do - (u/announce "Checksum is different. Need to rebuild driver.") - false))))) - (defn build-driver! - "Build `driver`, if needed." - [driver edition] - {:pre [(#{:oss :ee nil} edition)]} - (let [edition (or edition :oss)] - (u/step (str/join " " [(colorize/green "Build") - (colorize/yellow driver) - (colorize/green "driver") - (colorize/yellow (format "(%s edition)" edition)) - (colorize/green "if needed")]) - ;; When we build a driver, we save a checksum of driver source code + metabase source code + parent drivers - ;; alongside the built driver JAR. The next time this script is called, we recalculate that checksum -- if the - ;; current checksum matches the saved one associated with the built driver JAR, we do not need to rebuild the - ;; driver. If anything relevant has changed, we have to rebuild the driver. - (if (driver-checksum-matches? driver edition) - ;; even if we're not rebuilding the driver, copy the artifact from `modules/drivers//target/uberjar/` - ;; to `resources/modules` so we can be sure we have the most up-to-date version there. - (try - (copy-driver! driver) - (verify/verify-driver driver) - ;; if verification fails, delete all the existing artifacts and just rebuild the driver from scratch. - (catch Throwable e - (u/error "Error verifying existing driver:\n%s" (pr-str e)) - (u/announce "Deleting existing driver artifacts and rebuilding.") - (clean-driver-artifacts! driver) - (build-driver! driver edition))) - ;; if checksum does not match, build and verify the driver - (try - (build-and-verify! driver edition) - ;; if building fails, clean everything, including metabase-core, the metabase uberjar, and parent - ;; dependencies, *then* retry. - (catch Throwable e - (u/announce "Cleaning ALL and retrying...") - (clean-all! driver) - (try - (build-and-verify! driver edition) - ;; if building the driver failed again, even after cleaning, delete anything that was built and then - ;; give up. - (catch Throwable e - (u/safe-println (colorize/red (format "Failed to build %s driver." driver))) - (clean-driver-artifacts! driver) - (throw e)))))) - ;; if we make it this far, we've built the driver successfully. - (u/announce "Success.")))) + ;; 1-arity that takes just a map is mean for use directly with clojure -X + ([{:keys [driver edition], :as options}] + (build-driver! driver edition (dissoc options :driver :edition))) + + ([driver edition] + (build-driver! driver edition nil)) + + ([driver edition {:keys [project-dir target-dir], :as options}] + (let [edition (or edition :oss) + start-time-ms (System/currentTimeMillis)] + (binding [c/*driver-project-dir* (or project-dir + c/*driver-project-dir*) + c/*target-directory* (or target-dir + c/*target-directory*)] + (u/step (format "Build driver %s (edition = %s, options = %s)" driver edition (pr-str options)) + (clean! driver) + (copy-source-files/copy-source-files! driver edition) + (compile-source-files/compile-clojure-source-files! driver edition) + (create-uberjar/create-uberjar! driver edition) + (u/announce "Built %s driver in %d ms." driver (- (System/currentTimeMillis) start-time-ms)) + (verify/verify-driver driver)))))) diff --git a/bin/build-drivers/src/build_drivers/checksum.clj b/bin/build-drivers/src/build_drivers/checksum.clj deleted file mode 100644 index a46e91e4151d..000000000000 --- a/bin/build-drivers/src/build_drivers/checksum.clj +++ /dev/null @@ -1,82 +0,0 @@ -(ns build-drivers.checksum - "Shared code for calculating and reading hex-encoded MD5 checksums for relevant files." - (:require [build-drivers.common :as c] - [build-drivers.plugin-manifest :as manifest] - [clojure.java.io :as io] - [clojure.string :as str] - [colorize.core :as colorize] - [metabuild-common.core :as u]) - (:import org.apache.commons.codec.digest.DigestUtils)) - -(defn checksum-from-file - "Read a saved MD5 hash checksum from a file." - [filename] - (u/step (format "Read saved checksum from %s" filename) - (let [file (io/file filename)] - (if-not (.exists file) - (u/announce "%s does not exist" filename) - (or (when-let [[checksum-line] (not-empty (str/split-lines (slurp file)))] - (when-let [[_ checksum-hex] (re-matches #"(^(?:\w+-)?[0-9a-f]{32}).*$" checksum-line)] - (u/safe-println (format "Saved checksum is %s" (colorize/cyan checksum-hex))) - checksum-hex)) - (u/error (format "Checksum file %s exists, but does not contain a valid checksum" filename))))))) - -;;; -------------------------------------------- Metabase source checksum -------------------------------------------- - -(defn- metabase-source-paths [] - (sort - (cons - (u/filename u/project-root-directory "project.clj") - (mapcat (fn [dir] - (try - (u/find-files dir (fn [s] - (or (str/ends-with? s ".clj") - (str/ends-with? s ".cljc")))) - (catch Throwable _ - []))) - [(u/filename u/project-root-directory "src") - (u/filename u/project-root-directory "enterprise" "backend" "src") - (u/filename u/project-root-directory "shared" "src")])))) - -(defn metabase-source-checksum - "Checksum of Metabase backend source files and `project.clj`." - ^String [] - (let [paths (metabase-source-paths)] - (u/step (format "Calculate checksum for %d Metabase source files" (count paths)) - (let [checksum (DigestUtils/md5Hex (str/join (map slurp paths)))] - (u/safe-println (format "Current checksum of Metabase files is %s" (colorize/cyan checksum))) - checksum)))) - - -;;; ---------------------------------------------- Driver source files ----------------------------------------------- - -(defn existing-driver-checksum - "Checksum from the relevant sources from last time we built `driver`." - [driver] - (checksum-from-file (c/driver-checksum-filename driver))) - -(defn- driver-source-paths - "Returns sequence of the source filenames for `driver`." - [driver] - (u/find-files (c/driver-project-dir driver) - (fn [path] - (or (and (str/ends-with? path ".clj") - (not (str/starts-with? path (u/filename (c/driver-project-dir driver) "test")))) - (str/ends-with? path ".yaml"))))) - -(defn driver-checksum - "The driver checksum is based on a checksum of all the driver source files (`.clj` files and the plugin manifest YAML - file) combined with the checksums for `metabase-core` *and* the parent drivers. After building a driver, we save - this checksum. Next time the script is ran, we recalculate the checksum to determine whether anything relevant has - changed -- if it has, and the current checksum doesn't match the saved one, we need to rebuild the driver." - ^String [driver edition] - (let [source-paths (driver-source-paths driver)] - (u/step (format "Calculate checksum for %d files: %s ..." (count source-paths) (first source-paths)) - (let [checksum (str - (c/edition-checksum-prefix driver edition) - (DigestUtils/md5Hex (str/join (concat [(metabase-source-checksum)] - (map #(driver-checksum % edition) - (manifest/parent-drivers driver)) - (map slurp (driver-source-paths driver))))))] - (u/safe-println (format "Current checksum of %s driver (%s edition) is %s" driver edition (colorize/cyan checksum))) - checksum)))) diff --git a/bin/build-drivers/src/build_drivers/common.clj b/bin/build-drivers/src/build_drivers/common.clj index 36de81a78989..ec3607b81c4e 100644 --- a/bin/build-drivers/src/build_drivers/common.clj +++ b/bin/build-drivers/src/build_drivers/common.clj @@ -1,77 +1,46 @@ (ns build-drivers.common - "Shared constants and functions related to source and artifact paths used throughout this code." - (:require [environ.core :as env] - [leiningen.core.project :as lein.project] + (:require [clojure.java.io :as io] + [clojure.tools.deps.alpha :as deps] [metabuild-common.core :as u])) -(def ^String maven-repository-path - (u/filename (env/env :user-home) ".m2" "repository")) +(def ^:dynamic *driver-project-dir* nil) -;;; -------------------------------------------------- Driver Paths -------------------------------------------------- +(def ^:dynamic *target-directory* nil) (defn driver-project-dir "e.g. \"/home/cam/metabase/modules/drivers/redshift\"" - [driver] - (u/filename u/project-root-directory "modules" "drivers" (name driver))) + ^String [driver] + (or *driver-project-dir* + (u/filename u/project-root-directory "modules" "drivers" (name driver)))) (defn driver-jar-name "e.g. \"redshift.metabase-driver.jar\"" - [driver] + ^String [driver] (format "%s.metabase-driver.jar" (name driver))) -(defn driver-target-directory - [driver] - (u/filename (driver-project-dir driver) "target")) - -(defn driver-jar-build-path - "e.g. \"/home/cam/metabase/modules/drivers/redshift/target/uberjar/redshift.metabase-driver.jar\"" - [driver] - (u/filename (driver-target-directory driver) "uberjar" (driver-jar-name driver))) - -(def ^String driver-jar-destination-directory - (u/filename u/project-root-directory "resources" "modules")) +(defn driver-jar-destination-directory ^String [] + (or *target-directory* + (u/filename u/project-root-directory "resources" "modules"))) (defn driver-jar-destination-path "e.g. \"/home/cam/metabase/resources/modules/redshift.metabase-driver.jar\"" ^String [driver] - (u/filename driver-jar-destination-directory (driver-jar-name driver))) - -(defn- lein-project-map - "Read the `project.clj` file for `driver` and return it as a map." - [driver & profiles] - (let [project-filename (u/assert-file-exists (u/filename (driver-project-dir driver) "project.clj"))] - (lein.project/read project-filename profiles))) - -(defn has-edition-profile? - "Whether `driver` has a separate profile for `edition`, e.g. `:ee`. This means this version of the driver is different - from other versions of the driver (e.g. :ee Oracle ships with the non-free Oracle JDBC driver, :oss does not)." - [driver edition] - (let [has-profile? (boolean - (contains? (:profiles (lein-project-map driver)) edition))] - (u/safe-println (format "%s %s have a separate %s profile" driver (if has-profile? "DOES" "DOES NOT") edition)) - has-profile?)) - -(defn edition-checksum-prefix - "Prefix to add to checksums of driver for `edition` -- normally this is `nil`, but if the driver has a specific - profile for `edition` (e.g. Oracle has a different profile for `:ee` builds) this is a prefix to make the checksum - different from the normal one." - [driver edition] - (when (has-edition-profile? driver edition) - (format "%s-" (name edition)))) - -(defn driver-checksum-filename - "e.g. \"/home/cam/metabase/modules/drivers/redshift/target/checksum.md5\"" - [driver] - (u/filename (driver-project-dir driver) "target" "checksum.md5")) + (u/filename (driver-jar-destination-directory) (driver-jar-name driver))) -(defn driver-plugin-manifest-filename - "e.g. \"/home/cam/metabase/modules/drivers/bigquery/resources/plugin-manifest.yaml\"" - [driver] - (u/filename (driver-project-dir driver) "resources" "metabase-plugin.yaml")) +(defn compiled-source-target-dir [driver] + (u/filename (driver-project-dir driver) "target" "jar")) +(defn driver-edn-filename [driver] + (u/filename (driver-project-dir driver) "deps.edn")) -;;; ------------------------------------------ Metabase Local Install Paths ------------------------------------------ +(defn- ->absolute [driver path] + (if (u/absolute? path) + path + (u/filename (driver-project-dir driver) path))) -(def ^String metabase-uberjar-path - "e.g. \"home/cam/metabase/target/uberjar/metabase.jar\"" - (u/filename u/project-root-directory "target" "uberjar" "metabase.jar")) +(defn driver-edn [driver edition] + (let [edn (deps/merge-edns ((juxt :root-edn :project-edn) (deps/find-edn-maps (driver-edn-filename driver)))) + combined (deps/combine-aliases edn #{edition})] + (-> (deps/tool edn combined) + ;; make sure :paths are absolute + (update :paths (partial mapv (partial ->absolute driver)))))) diff --git a/bin/build-drivers/src/build_drivers/compile_source_files.clj b/bin/build-drivers/src/build_drivers/compile_source_files.clj new file mode 100644 index 000000000000..cfe2277cfdb5 --- /dev/null +++ b/bin/build-drivers/src/build_drivers/compile_source_files.clj @@ -0,0 +1,54 @@ +(ns build-drivers.compile-source-files + (:require [build-drivers.common :as c] + [clojure.java.io :as io] + [clojure.tools.namespace.dependency :as ns.deps] + [clojure.tools.namespace.find :as ns.find] + [clojure.tools.namespace.parse :as ns.parse] + [metabuild-common.core :as u])) + +(defn driver-source-paths [driver edition] + (let [dirs (:paths (c/driver-edn driver edition))] + (assert (every? u/absolute? dirs) + (format "All dirs should be absolute, got: %s" (pr-str dirs))) + dirs)) + +(defn- dependencies-graph + "Return a `clojure.tools.namespace` dependency graph of namespaces named by `ns-symbol`." + [ns-decls] + (reduce + (fn [graph ns-decl] + (let [ns-symbol (ns.parse/name-from-ns-decl ns-decl)] + (reduce + (fn [graph dep] + (ns.deps/depend graph ns-symbol dep)) + graph + (ns.parse/deps-from-ns-decl ns-decl)))) + (ns.deps/graph) + ns-decls)) + +;; topologically sort the namespaces so we don't end up with weird compilation issues. +(defn source-path-namespaces [source-paths] + (let [ns-decls (mapcat + (comp ns.find/find-ns-decls-in-dir io/file) + source-paths) + ns-symbols (set (map ns.parse/name-from-ns-decl ns-decls))] + (->> (dependencies-graph ns-decls) + ns.deps/topo-sort + (filterv ns-symbols)))) + +(defn compile-clojure-source-files! [driver edition] + (u/step "Compile clojure source files" + (let [start-time-ms (System/currentTimeMillis) + source-paths (driver-source-paths driver edition) + target-dir (c/compiled-source-target-dir driver) + namespaces (source-path-namespaces source-paths)] + (u/announce "Compiling Clojure source files in %s to %s" (pr-str source-paths) target-dir) + (u/create-directory-unless-exists! target-dir) + (u/announce "Compiling namespaces %s" (pr-str namespaces)) + (binding [*compile-path* target-dir] + (doseq [a-namespace namespaces] + (#'clojure.core/serialized-require a-namespace) + (compile a-namespace))) + (u/announce "Compiled %d namespace(s) in %d ms." + (count namespaces) + (- (System/currentTimeMillis) start-time-ms))))) diff --git a/bin/build-drivers/src/build_drivers/copy_source_files.clj b/bin/build-drivers/src/build_drivers/copy_source_files.clj new file mode 100644 index 000000000000..f9b87805841b --- /dev/null +++ b/bin/build-drivers/src/build_drivers/copy_source_files.clj @@ -0,0 +1,18 @@ +(ns build-drivers.copy-source-files + (:require [build-drivers.common :as c] + [clojure.tools.build.api :as build] + [metabuild-common.core :as u])) + +(defn copy-source-files! [driver edition] + (u/step (format "Copy %s source files" driver) + (let [start-time-ms (System/currentTimeMillis) + dirs (:paths (c/driver-edn driver edition))] + (assert (every? u/absolute? dirs) + (format "All dirs should be absolute, got: %s" (pr-str dirs))) + (u/announce "Copying files in %s" (pr-str dirs)) + (build/copy-dir + {:src-dirs dirs + :target-dir (c/compiled-source-target-dir driver)}) + (u/announce "Copied files in %d directories in %d ms." + (count dirs) + (- (System/currentTimeMillis) start-time-ms))))) diff --git a/bin/build-drivers/src/build_drivers/create_uberjar.clj b/bin/build-drivers/src/build_drivers/create_uberjar.clj new file mode 100644 index 000000000000..d27c584375d1 --- /dev/null +++ b/bin/build-drivers/src/build_drivers/create_uberjar.clj @@ -0,0 +1,79 @@ +(ns build-drivers.create-uberjar + (:require [build-drivers.common :as c] + [clojure.java.io :as io] + [clojure.tools.deps.alpha :as deps] + [clojure.tools.deps.alpha.util.dir :as deps.dir] + [colorize.core :as colorize] + [hf.depstar.api :as depstar] + [metabuild-common.core :as u])) + +(defn driver-basis [driver edition] + (let [edn (c/driver-edn driver edition)] + (binding [deps.dir/*the-dir* (io/file (c/driver-project-dir driver))] + (deps/calc-basis edn)))) + +(defonce metabase-core-edn + (deps/merge-edns + ((juxt :root-edn :project-edn) + (deps/find-edn-maps (u/filename u/project-root-directory "deps.edn"))))) + +(defonce metabase-core-basis + (binding [deps.dir/*the-dir* (io/file u/project-root-directory)] + (deps/calc-basis metabase-core-edn))) + +(defonce metabase-core-provided-libs + (set (keys (:libs metabase-core-basis)))) + +(defn- driver-parents [driver edition] + (when-let [parents (not-empty (:metabase.driver/parents (c/driver-edn driver edition)))] + (u/announce "Driver has parent drivers %s" (pr-str parents)) + parents)) + +(defn- parent-provided-libs [driver edition] + (into {} (for [parent (driver-parents driver edition) + lib (keys (:libs (driver-basis parent edition)))] + [lib parent]))) + +(defn- provided-libs + "Return a map of lib -> provider, where lib is a symbol like `com.h2database/h2` and provider is either + `metabase-core` or the parent driver that provided that lib." + [driver edition] + (into (parent-provided-libs driver edition) + (map (fn [lib] + [lib 'metabase-core])) + metabase-core-provided-libs)) + +(defn remove-provided-libs [basis driver edition] + (let [provided-lib->provider (into {} + (filter (fn [[lib]] + (get-in basis [:libs lib]))) + (provided-libs driver edition))] + ;; log which libs we're including and excluding. + (doseq [lib (sort (keys (:libs basis)))] + (u/announce (if-let [provider (get provided-lib->provider lib)] + (format "SKIP %%45s (provided by %s)" provider) + "INCLUDE %s") + (colorize/yellow lib))) + ;; now remove the provide libs from `:classpath`, `:classpath-roots`, and `:libs` + (let [provided-libs-set (into #{} (keys provided-lib->provider)) + provided-paths-set (into #{} (mapcat #(get-in basis [:libs % :paths])) provided-libs-set)] + (-> basis + (update :classpath-roots #(vec (remove provided-paths-set %))) + (update :libs #(into {} (remove (fn [[lib]] (provided-libs-set lib))) %)) + (update :classpath #(into {} (remove (fn [[path]] (provided-paths-set path))) %)))))) + +(defn- uberjar-basis [driver edition] + (u/step "Determine which dependencies to include" + (-> (driver-basis driver edition) + (remove-provided-libs driver edition) + ;; remove unneeded keys so Depstar doesn't try to do anything clever and resolve them + (dissoc :deps :aliases :mvn/repos)))) + +(defn create-uberjar! [driver edition] + (u/step (format "Write %s %s uberjar -> %s" driver edition (c/driver-jar-destination-path driver)) + (let [start-time-ms (System/currentTimeMillis)] + (depstar/uber + {:class-dir (c/compiled-source-target-dir driver) + :uber-file (c/driver-jar-destination-path driver) + :basis (uberjar-basis driver edition)}) + (u/announce "Created uberjar in %d ms." (- (System/currentTimeMillis) start-time-ms))))) diff --git a/bin/build-drivers/src/build_drivers/install_driver_locally.clj b/bin/build-drivers/src/build_drivers/install_driver_locally.clj deleted file mode 100644 index 7189c74bd913..000000000000 --- a/bin/build-drivers/src/build_drivers/install_driver_locally.clj +++ /dev/null @@ -1,49 +0,0 @@ -(ns build-drivers.install-driver-locally - "Logic related to installing a driver as a library in the local Maven repository so it can be used as a dependency - when building descandant drivers. Right now this is only used for `:google`, which is used by `:bigquery` and - `:googleanalytics`." - (:require [build-drivers.checksum :as checksum] - [build-drivers.common :as c] - [colorize.core :as colorize] - [metabuild-common.core :as u])) - -(defn- local-install-path [driver] - (u/filename c/maven-repository-path "metabase" (format "%s-driver" (name driver)))) - -(defn- local-install-checksum-filename [driver edition] - (u/filename (local-install-path driver) (str (c/edition-checksum-prefix driver edition) "checksum.md5"))) - -(defn clean! - "Delete local Maven installation of the library version of `driver`." - [driver] - (u/step (format "Deleting existing Maven installation of %s driver" driver) - (u/delete-file-if-exists! (local-install-path driver)))) - -(defn- local-install-checksum-matches? - "After installing the library version of `driver`, we save a checksum based on its sources; next time we call - `install-locally!`, we can recalculate the checksum; if the saved one matches the current one, we do not need to - reinstall." - [driver edition] - (u/step "Determine whether %s driver source files have changed since last local install" - (let [existing-checksum (checksum/checksum-from-file (local-install-checksum-filename driver edition)) - current-checksum (checksum/driver-checksum driver edition) - same? (= existing-checksum current-checksum)] - (u/announce (if same? - "Checksum is the same. Do not need to rebuild driver." - "Checksum is different. Need to rebuild driver.")) - same?))) - -(defn install-locally! - "Install `driver` as a library in the local Maven repository IF NEEDED so descendant drivers can use it as a - `:provided` dependency when building. E.g. before building `:bigquery` we need to install `:google` as a library - locally." - [driver edition] - {:pre [(keyword? driver)]} - (u/step (str (colorize/green "Install ") (colorize/yellow driver) (colorize/green " driver to local Maven repo if needed")) - (if (local-install-checksum-matches? driver edition) - (u/announce "Already installed locally.") - (u/step (str (colorize/green "Install ") (colorize/yellow driver) (colorize/green " driver to local Maven repo")) - (u/sh {:dir (c/driver-project-dir driver)} "lein" "clean") - (u/sh {:dir (c/driver-project-dir driver)} "lein" "install-for-building-drivers") - (u/step (format "Save checksum to %s" driver (local-install-checksum-filename driver edition)) - (spit (local-install-checksum-filename driver edition) (checksum/driver-checksum driver edition))))))) diff --git a/bin/build-drivers/src/build_drivers/metabase.clj b/bin/build-drivers/src/build_drivers/metabase.clj deleted file mode 100644 index e94b29798ef6..000000000000 --- a/bin/build-drivers/src/build_drivers/metabase.clj +++ /dev/null @@ -1,88 +0,0 @@ -(ns build-drivers.metabase - "Code for installing the main Metabase project as a library (`metabase-core`) in the local Maven repository, and for - building a Metabase uberjar. Both are needed when building drivers." - (:require [build-drivers - [checksum :as checksum] - [common :as c]] - [metabuild-common.core :as u])) - -(def ^String ^:private uberjar-checksum-path - (str c/metabase-uberjar-path ".md5")) - -(def ^String ^:private metabase-core-install-path - (u/filename c/maven-repository-path "metabase-core")) - -(def ^String ^:private metabase-core-checksum-path - (u/filename metabase-core-install-path "checksum.md5")) - -(defn metabase-core-checksum-matches? [] - (u/step "Determine whether Metabase source files checksum has changed since last install of metabase-core" - (let [existing-checksum (checksum/checksum-from-file metabase-core-checksum-path) - current-checksum (checksum/metabase-source-checksum) - same? (= existing-checksum current-checksum)] - (u/announce (if same? - "Checksum is the same. Do not need to reinstall metabase-core locally." - "Checksum is different. Need to reinstall metabase-core locally.")) - same?))) - -(defn- delete-metabase-core-install! [] - (u/step "Delete local installation of metabase-core" - (u/delete-file-if-exists! metabase-core-install-path))) - -(defn- install-metabase-core! [] - (u/step "Install metabase-core locally if needed" - (if (metabase-core-checksum-matches?) - (u/announce "Up-to-date metabase-core already installed to local Maven repo") - (do - (delete-metabase-core-install!) - (u/sh {:dir u/project-root-directory} "lein" "clean") - (u/sh {:dir u/project-root-directory} "lein" "install-for-building-drivers") - (u/step "Save checksum for local installation of metabase-core" - (spit metabase-core-checksum-path (checksum/metabase-source-checksum))) - (u/announce "metabase-core dep installed to local Maven repo successfully."))))) - -(defn uberjar-checksum-matches? - "After installing/building Metabase we save a MD5 hex checksum of Metabase backend source files (including - `project.clj`). The next time we run `build-metabase!`, if the checksums have changed we know we need to - rebuild/reinstall." - [] - (u/step "Determine whether Metabase source files checksum has changed since last build of uberjar" - (let [existing-checksum (checksum/checksum-from-file uberjar-checksum-path) - current-checksum (checksum/metabase-source-checksum) - same? (= existing-checksum current-checksum)] - (u/announce (if same? - "Checksum is the same. Do not need to rebuild Metabase uberjar." - "Checksum is different. Need to rebuild Metabase uberjar.")) - same?))) - -(defn- delete-metabase-uberjar! [] - (u/step "Delete exist metabase uberjar" - (u/delete-file-if-exists! (u/filename u/project-root-directory "target")))) - -(defn- build-metabase-uberjar! [] - (u/step "Build Metabase uberjar if needed" - (if (uberjar-checksum-matches?) - (u/announce "Update-to-date Metabase uberjar already built") - (do - (delete-metabase-uberjar!) - (u/sh {:dir u/project-root-directory} "lein" "clean") - (u/sh {:dir u/project-root-directory} "lein" "uberjar") - (u/step "Save checksum for Metabase uberar" - (spit uberjar-checksum-path (checksum/metabase-source-checksum))) - (u/announce "Metabase uberjar built successfully"))))) - -(defn clean-metabase! - "Delete local Maven repository installation of the `metabase-core` library and delete the built Metabase uberjar." - [] - (u/step "Clean local Metabase deps" - (delete-metabase-core-install!) - (delete-metabase-uberjar!))) - -(defn build-metabase! - "Install `metabase-core` as a library in the local Maven repo, and build the Metabase uberjar IF NEEDED. We need to do - both because `metabase-core` is used as a dependency for drivers, and the Metabase uberjar is checked to make sure - we don't ship duplicate classes in the driver JAR (as part of the `strip-and-compress` stage.)" - [] - (u/step "Build metabase-core and install locally" - (install-metabase-core!) - (build-metabase-uberjar!))) diff --git a/bin/build-drivers/src/build_drivers/plugin_manifest.clj b/bin/build-drivers/src/build_drivers/plugin_manifest.clj deleted file mode 100644 index 163c6096475d..000000000000 --- a/bin/build-drivers/src/build_drivers/plugin_manifest.clj +++ /dev/null @@ -1,55 +0,0 @@ -(ns build-drivers.plugin-manifest - "Code for reading the YAML plugin manifest for a driver. " - (:require [build-drivers.common :as c] - [metabuild-common.core :as u] - [yaml.core :as yaml])) - -(defn- plugin-manifest - "Read `driver` plugin manifest and return a map." - [driver] - {:post [(map? %)]} - (yaml/from-file (u/assert-file-exists (c/driver-plugin-manifest-filename driver)))) - -(defn- driver-declarations [manifest] - ;; driver plugin manifest can have a single `:driver`, or multiple drivers, e.g. Spark SQL which also has the - ;; `:hive-like` abstract driver - (let [{driver-declaration :driver} manifest] - (if (map? driver-declaration) - [driver-declaration] - driver-declaration))) - -(defn- declared-drivers - "Sequence of all drivers declared in a plugin `manifest`. Usually only one driver, except for Spark SQL which declares - both `:hive-like` and `:sparksql`." - [manifest] - (map (comp keyword :name) (driver-declarations manifest))) - -(def ^:private metabase-core-drivers - "Drivers that ship as part of the core Metabase project (as opposed to a plugin) and thus do not need to be built." - #{:sql - :sql-jdbc - :mysql - :h2 - :postgres}) - -(defn parent-drivers - "Get the parent drivers of a driver for purposes of building a driver. Excludes drivers that ship as part of - `metabase-core`, since we don't need to worry about building those. - - e.g. - - (parent-drivers :googleanalytics) ;-> (:google)" - [driver] - (let [manifest (plugin-manifest driver) - declared (declared-drivers manifest)] - (or (not-empty - (for [{parent-declaration :parent} (driver-declarations manifest) - :let [parents (if (string? parent-declaration) - [parent-declaration] - parent-declaration)] - parent parents - :let [parent (keyword parent)] - :when (and (not (contains? (set declared) parent)) - (not (contains? metabase-core-drivers parent)))] - parent)) - (u/announce "%s does not have any parents" driver)))) diff --git a/bin/build-drivers/src/build_drivers/strip_and_compress.clj b/bin/build-drivers/src/build_drivers/strip_and_compress.clj deleted file mode 100644 index b2d594ea8d5c..000000000000 --- a/bin/build-drivers/src/build_drivers/strip_and_compress.clj +++ /dev/null @@ -1,65 +0,0 @@ -(ns build-drivers.strip-and-compress - (:require [build-drivers.common :as c] - [build-drivers.plugin-manifest :as manifest] - [metabuild-common.core :as u]) - (:import java.io.FileOutputStream - [java.util.zip ZipEntry ZipFile ZipOutputStream] - org.apache.commons.io.IOUtils)) - -(def ^:private files-to-always-include - "Files to always include regardless of whether they are present in blacklist JAR." - #{"metabase-plugin.yaml"}) - -(defn- jar-contents - "Get a set of all files in a JAR that we should strip out from the driver JAR -- either the Metabase uberjar itself or - a parent driver JAR." - [^String jar-path] - (with-open [zip-file (ZipFile. jar-path)] - (set - (for [^ZipEntry zip-entry (enumeration-seq (.entries zip-file)) - :let [filename (str zip-entry)] - :when (not (files-to-always-include filename))] - filename)))) - -(defn- strip-classes! [^String driver-jar-path ^String blacklist-jar-path] - (u/step (format "Remove classes from %s that are present in %s and recompress" driver-jar-path blacklist-jar-path) - (let [jar-contents (jar-contents blacklist-jar-path) - temp-driver-jar-path "/tmp/driver.jar" - wrote (atom 0) - skipped (atom 0)] - (u/delete-file-if-exists! temp-driver-jar-path) - (with-open [source-zip (ZipFile. (u/assert-file-exists driver-jar-path)) - os (doto (ZipOutputStream. (FileOutputStream. temp-driver-jar-path)) - (.setMethod ZipOutputStream/DEFLATED) - (.setLevel 9))] - (doseq [^ZipEntry entry (enumeration-seq (.entries source-zip))] - (if (jar-contents (str entry)) - (swap! skipped inc) - (with-open [is (.getInputStream source-zip entry)] - (.putNextEntry os (ZipEntry. (.getName entry))) - (IOUtils/copy is os) - (.closeEntry os) - (swap! wrote inc))))) - (u/announce (format "Done. wrote: %d skipped: %d" @wrote @skipped)) - (u/safe-println (format "Original size: %s" (u/format-bytes (u/file-size driver-jar-path)))) - (u/safe-println (format "Stripped/extra-compressed size: %s" (u/format-bytes (u/file-size temp-driver-jar-path)))) - (u/step "replace the original source JAR with the stripped one" - (u/delete-file-if-exists! driver-jar-path) - (u/copy-file! temp-driver-jar-path driver-jar-path))))) - -(defn strip-and-compress-uberjar! - "Remove any classes in compiled `driver` that are also present in the Metabase uberjar or parent drivers. The classes - will be available at runtime, and we don't want to make things unpredictable by including them more than once in - different drivers. - - This is only needed because `lein uberjar` does not seem to reliably exclude classes from `:provided` Clojure - dependencies like `metabase-core` and the parent drivers." - [driver] - (u/step (str (format "Strip out any classes in %s driver JAR found in core Metabase uberjar or parent JARs" driver) - " and recompress with higher compression ratio") - (let [driver-jar-path (u/assert-file-exists (c/driver-jar-build-path driver))] - (u/step "strip out any classes also found in the core Metabase uberjar" - (strip-classes! driver-jar-path (u/assert-file-exists c/metabase-uberjar-path))) - (u/step "remove any classes also found in any of the parent JARs" - (doseq [parent (manifest/parent-drivers driver)] - (strip-classes! driver-jar-path (u/assert-file-exists (c/driver-jar-build-path parent)))))))) diff --git a/bin/build-drivers/src/build_drivers/verify.clj b/bin/build-drivers/src/build_drivers/verify.clj index 6386f2538afa..5f09d75b712b 100644 --- a/bin/build-drivers/src/build_drivers/verify.clj +++ b/bin/build-drivers/src/build_drivers/verify.clj @@ -19,6 +19,16 @@ (u/announce "Driver init class file found.") (throw (ex-info (format "Driver verification failed: init class file %s not found" driver-init-class-filename) {})))))) +(defn- verify-does-not-have-clojure-core [driver] + (let [jar-filename (c/driver-jar-destination-path driver)] + (u/step (format "Check %s does not contain Clojure core classes" jar-filename) + (doseq [file ["clojure/spec/alpha__init.class" + "clojure/core__init.class" + "clojure/core.clj"]] + (when (jar-contains-file? jar-filename file) + (throw (ex-info (format "Driver verification failed: driver contains compiled Clojure core file %s" file) + {:file file}))))))) + (defn- verify-has-plugin-manifest [driver] (let [jar-filename (c/driver-jar-destination-path driver)] (u/step (format "Check %s contains metabase-plugin.yaml" jar-filename) @@ -34,4 +44,5 @@ (u/assert-file-exists (c/driver-jar-destination-path driver)) (verify-has-init-class driver) (verify-has-plugin-manifest driver) + (verify-does-not-have-clojure-core driver) (u/announce (format "%s driver verification successful." driver)))) diff --git a/bin/build-drivers/src/verify_driver.clj b/bin/build-drivers/src/verify_driver.clj index 5964ba99c0e1..4e96397e8f87 100644 --- a/bin/build-drivers/src/verify_driver.clj +++ b/bin/build-drivers/src/verify_driver.clj @@ -7,4 +7,4 @@ (u/exit-when-finished-nonzero-on-exception (when-not (seq driver) (throw (ex-info "Usage: clojure -m verify-driver " {}))) - (verify/verify-driver (keyword driver)))) + (verify/verify-driver (u/parse-as-keyword driver)))) diff --git a/bin/build-drivers/test/build_drivers/build_driver_test.clj b/bin/build-drivers/test/build_drivers/build_driver_test.clj index 8eab16884f24..20afc365fb8e 100644 --- a/bin/build-drivers/test/build_drivers/build_driver_test.clj +++ b/bin/build-drivers/test/build_drivers/build_driver_test.clj @@ -15,19 +15,11 @@ (build-driver/build-driver! :oracle :oss) (is (.exists (java.io.File. (jar-path)))) (testing "JAR should not contain the JDBC driver classes" - (is (not (jar-contains-jdbc-classes?)))) - (testing "Wouldn't need to rebuild :oss version of the driver" - (is (#'build-driver/driver-checksum-matches? :oracle :oss))) - (testing "WOULD need to build :ee version of the driver" - (is (not (#'build-driver/driver-checksum-matches? :oracle :ee)))))) + (is (not (jar-contains-jdbc-classes?)))))) (deftest build-ee-driver-test (testing "We should be able to build an EE driver" (build-driver/build-driver! :oracle :ee) (is (.exists (java.io.File. (jar-path)))) (testing "JAR *should* contain the JDBC driver classes" - (is (jar-contains-jdbc-classes?))) - (testing "Wouldn't need to rebuild :ee version of the driver" - (is (#'build-driver/driver-checksum-matches? :oracle :ee))) - (testing "WOULD need to build :oss version of the driver" - (is (not (#'build-driver/driver-checksum-matches? :oracle :oss)))))) + (is (jar-contains-jdbc-classes?))))) diff --git a/bin/build-drivers/test/build_drivers/checksum_test.clj b/bin/build-drivers/test/build_drivers/checksum_test.clj deleted file mode 100644 index 5c1bf4964900..000000000000 --- a/bin/build-drivers/test/build_drivers/checksum_test.clj +++ /dev/null @@ -1,11 +0,0 @@ -(ns build-drivers.checksum-test - (:require [build-drivers.checksum :as checksum] - [clojure.test :refer :all])) - -(deftest driver-checksum-test - (testing "OSS/EE checksums should be the same for drivers that don't have different oss/ee profiles" - (is (= (checksum/driver-checksum :sqlite :oss) - (checksum/driver-checksum :sqlite :ee)))) - (testing "OSS/EE checksums should be different for drivers that have different oss/ee profiles" - (is (not= (checksum/driver-checksum :oracle :oss) - (checksum/driver-checksum :oracle :ee))))) diff --git a/bin/build-drivers/test/build_drivers/common_test.clj b/bin/build-drivers/test/build_drivers/common_test.clj deleted file mode 100644 index 8aa517fc3b4c..000000000000 --- a/bin/build-drivers/test/build_drivers/common_test.clj +++ /dev/null @@ -1,15 +0,0 @@ -(ns build-drivers.common-test - (:require [build-drivers.common :as c] - [clojure.test :refer :all])) - -(deftest has-edition-profile?-test - (testing :ee - (is (= true - (c/has-edition-profile? :oracle :ee))) - (is (= false - (c/has-edition-profile? :sqlite :ee)))) - (testing :oss - (is (= false - (c/has-edition-profile? :oracle :oss))) - (is (= false - (c/has-edition-profile? :sqlite :oss))))) diff --git a/bin/build-drivers/test/build_drivers/install_driver_locally_test.clj b/bin/build-drivers/test/build_drivers/install_driver_locally_test.clj deleted file mode 100644 index 3a3dd0e58e17..000000000000 --- a/bin/build-drivers/test/build_drivers/install_driver_locally_test.clj +++ /dev/null @@ -1,16 +0,0 @@ -(ns build-drivers.install-driver-locally-test - (:require [build-drivers.install-driver-locally :as install-driver-locally] - [clojure.string :as str] - [clojure.test :refer :all])) - -(deftest local-install-checksum-filename-test - (is (str/ends-with? - (#'install-driver-locally/local-install-checksum-filename :oracle :ee) - ".m2/repository/metabase/oracle-driver/ee-checksum.md5")) - (is (str/ends-with? - (#'install-driver-locally/local-install-checksum-filename :oracle :oss) - ".m2/repository/metabase/oracle-driver/checksum.md5")) - (doseq [edition [:oss :ee]] - (is (str/ends-with? - (#'install-driver-locally/local-install-checksum-filename :sqlite edition) - ".m2/repository/metabase/sqlite-driver/checksum.md5")))) diff --git a/bin/build-for-test b/bin/build-for-test index d3a1cbfc3dd4..4013d23ff480 100755 --- a/bin/build-for-test +++ b/bin/build-for-test @@ -7,7 +7,7 @@ VERSION_PROPERTY_NAME="src_hash" source-hash() { # hash all the files that might change a backend-only uberjar build (for integration tests) ( - find src project.clj resources/sample-dataset.db.mv.db -type f -print0 | xargs -0 shasum ; + find src deps.edn resources/sample-dataset.db.mv.db -type f -print0 | xargs -0 shasum ; find resources -type f \( -iname \*.clj -o -iname \*.edn -o -iname \*.yaml -o -iname \*.properties -o -iname \*.html \) -not -name "version.properties" -print0 | xargs -0 shasum ; ) | shasum | awk '{ print $1 }' } diff --git a/bin/build-mb/README.md b/bin/build-mb/README.md new file mode 100644 index 000000000000..4d9aafe3fb96 --- /dev/null +++ b/bin/build-mb/README.md @@ -0,0 +1,28 @@ +## Build Metabase Tooling + +This project is to build the Metabase jar. It can be called standalone and is also called from the release project when creating releases. + +## License Information + +We create license information for all of our dependencies, both frontend and backend, and package them in our jar. + +Tests will run in CI that we have license information for all dependencies. If you see these failing, an easy way to get a report of dependencies without license information can be obtained by running + +```shell +build-mb % clojure -X build/list-without-license +$ "lein" "with-profile" "-dev,+ee,+include-all-drivers" "classpath" +All dependencies have licenses +``` + +If there are dependencies with missing license information you will see output like + +```shell +build-mb % clojure -X build/list-without-license +$ "lein" "with-profile" "-dev,+ee,+include-all-drivers" "classpath" +Missing License: /Users/dan/.m2/repository/org/eclipse/jetty/jetty-webapp/9.3.19.v20170502/jetty-webapp-9.3.19.v20170502.jar +Missing License: /Users/dan/.m2/repository/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar +Missing License: /Users/dan/.m2/repository/org/opensaml/opensaml-security-impl/3.4.5/opensaml-security-impl-3.4.5.jar +Missing License: /Users/dan/.m2/repository/colorize/colorize/0.1.1/colorize-0.1.1.jar +``` + +You can check the overrides file (resources/overrides.edn) and add the license information there, or perhaps improve the license discovery mechanism in the code. diff --git a/bin/build-mb/resources/overrides.edn b/bin/build-mb/resources/overrides.edn index a52997818069..716a08df4966 100644 --- a/bin/build-mb/resources/overrides.edn +++ b/bin/build-mb/resources/overrides.edn @@ -5,10 +5,17 @@ "com.google.guava" {:resource "apache2_0.txt"}, "com.fasterxml.jackson.dataformat" {:resource "apache2_0.txt"}, "com.onelogin" {:resource "MIT.txt"}, + "com.vladsch.flexmark" {:resource "BSD.txt"}, "xalan" {:resource "apache2_0.txt"} "org.apache.hadoop" {:resource "apache2_0.txt"} "org.ow2.asm" {:resource "BSD.txt"} - "org.eclipse.jetty" {:resource "apache2_0.txt"}} + "org.eclipse.jetty" {:resource "apache2_0.txt"} + ;; see if we can remove these if/when https://github.com/googleapis/java-core/issues/488 is done + "com.google.cloud" {:resource "apache2_0.txt"} + "com.google.auth" {:resource "apache2_0.txt"} + "com.google.code.gson" {:resource "apache2_0.txt"} + "com.google.protobuf" {:resource "apache2_0.txt"} + "com.google.http-client" {:resource "apache2_0.txt"}} "com.google.http-client" {"google-http-client" {:resource "apache2_0.txt"} "google-http-client-jackson2" {:resource "apache2_0.txt"}} @@ -34,6 +41,7 @@ "org.slf4j" {"slf4j-api" {:resource "MIT.txt"}}, "amalloy" {"ring-gzip-middleware" {:resource "MIT.txt"}}, "jakarta.activation" {"jakarta.activation-api" {:resource "EDL.txt"}}, + "com.sun.activation" {"jakarta.activation" {:resource "EDL.txt"}} "net.jcip" {"jcip-annotations" {:resource "CC_2_5.txt"}}, "hiccup" {"hiccup" {:resource "EPL.txt"}}, "jakarta.xml.bind" {"jakarta.xml.bind-api" {:resource "EDL.txt"}}, @@ -41,7 +49,8 @@ {"java-support" {:resource "apache2_0.txt"}}, "io.dropwizard.metrics" {"metrics-core" {:resource "apache2_0.txt"}}, "stencil" {"stencil" {:resource "EPL.txt"}}, - "org.antlr" {"antlr-runtime" {:resource "BSD.txt"}}, + "org.antlr" {"antlr-runtime" {:resource "BSD.txt"} + "antlr4-runtime" {:resource "BSD.txt"}}, "de.rototor.pdfbox" {"graphics2d" {:resource "apache2_0.txt"}}, "colorize" {"colorize" {:resource "EPL.txt"}}, "org.liquibase" {"liquibase-core" {:resource "apache2_0.txt"}}, diff --git a/bin/build-mb/src/build.clj b/bin/build-mb/src/build.clj index e7b63c7889d9..03f1f2c974cf 100644 --- a/bin/build-mb/src/build.clj +++ b/bin/build-mb/src/build.clj @@ -44,50 +44,57 @@ "NODE_ENV" "production" "MB_EDITION" mb-edition}} "./node_modules/.bin/webpack" "--bail")) + ;; related to the above TODO -- not sure why `yarn build-static-viz` fails here + (u/step "Build static viz" + (u/sh {:dir u/project-root-directory + :env {"PATH" (env/env :path) + "HOME" (env/env :user-home) + "NODE_ENV" "production" + "MB_EDITION" mb-edition}} + "./node_modules/.bin/webpack" "--bail" "--config" "webpack.static-viz.config.js")) (u/announce "Frontend built successfully.")))) +(defn- build-licenses! + [edition] + {:pre [(#{:oss :ee} edition)]} + (u/step "Generate backend license information from jar files" + (let [[classpath] (u/sh {:dir u/project-root-directory + :quiet? true} + "clojure" (str "-A" edition) "-Spath") + output-filename (u/filename u/project-root-directory + "resources" + "license-backend-third-party.txt") + {:keys [without-license]} (license/generate {:classpath classpath + :backfill (edn/read-string + (slurp (io/resource "overrides.edn"))) + :output-filename output-filename + :report? false})] + (when (seq without-license) + (run! (comp (partial u/error "Missing License: %s") first) + without-license)) + (u/announce "License information generated at %s" output-filename))) + + (u/step "Run `yarn licenses generate-disclaimer`" + (let [license-text (str/join \newline + (u/sh {:dir u/project-root-directory + :quiet? true} + "yarn" "licenses" "generate-disclaimer"))] + (spit (u/filename u/project-root-directory + "resources" + "license-frontend-third-party.txt") license-text)))) + (def uberjar-filename (u/filename u/project-root-directory "target" "uberjar" "metabase.jar")) (defn- build-uberjar! [edition] {:pre [(#{:oss :ee} edition)]} (u/delete-file-if-exists! uberjar-filename) (u/step (format "Build uberjar with profile %s" edition) - (u/sh {:dir u/project-root-directory} "lein" "clean") - (u/sh {:dir u/project-root-directory} "lein" "with-profile" (str \+ (name edition)) "uberjar") + ;; TODO -- we (probably) don't need to shell out in order to do this anymore, we should be able to do all this + ;; stuff directly in Clojure land by including this other `build` namespace directly (once we dedupe the names) + (u/sh {:dir u/project-root-directory} "clojure" "-T:build" "uberjar" :edition edition) (u/assert-file-exists uberjar-filename) (u/announce "Uberjar built successfully."))) -(defn- build-backend-licenses-file! [edition] - {:pre [(#{:oss :ee} edition)]} - (let [classpath-and-logs (u/sh {:dir u/project-root-directory - :quiet? true} - "lein" - "with-profile" (str \- "dev" - (str \, \+ (name edition)) - \,"+include-all-drivers") - "classpath") - classpath (last - classpath-and-logs) - output-filename (u/filename u/project-root-directory "license-backend-third-party") - {:keys [with-license - without-license]} (license/generate {:classpath classpath - :backfill (edn/read-string - (slurp (io/resource "overrides.edn"))) - :output-filename output-filename - :report? false})] - (when (seq without-license) - (run! (comp (partial u/error "Missing License: %s") first) - without-license)) - (u/announce "License information generated at %s" output-filename))) - -(defn- build-frontend-licenses-file! - [] - (let [license-text (str/join \newline - (u/sh {:dir u/project-root-directory - :quiet? true} - "yarn" "licenses" "generate-disclaimer"))] - (spit (u/filename u/project-root-directory "license-frontend-third-party") license-text))) - (def all-steps (ordered-map/ordered-map :version (fn [{:keys [edition version]}] @@ -96,12 +103,10 @@ (i18n/create-all-artifacts!)) :frontend (fn [{:keys [edition]}] (build-frontend! edition)) + :licenses (fn [{:keys [edition]}] + (build-licenses! edition)) :drivers (fn [{:keys [edition]}] (build-drivers/build-drivers! edition)) - :backend-licenses (fn [{:keys [edition]}] - (build-backend-licenses-file! edition)) - :frontend-licenses (fn [{:keys []}] - (build-frontend-licenses-file!)) :uberjar (fn [{:keys [edition]}] (build-uberjar! edition)))) @@ -121,7 +126,7 @@ version (str/join ", " (map name steps))) (doseq [step-name steps - :let [step-fn (or (get all-steps (keyword step-name)) + :let [step-fn (or (get all-steps (u/parse-as-keyword step-name)) (throw (ex-info (format "Invalid step: %s" step-name) {:step step-name :valid-steps (keys all-steps)})))]] @@ -133,3 +138,20 @@ (build! (merge {:edition (edition-from-env-var)} (when-let [steps (not-empty steps)] {:steps steps}))))) + +;; useful to call from command line `cd bin/build-mb && clojure -X build/list-without-license` +(defn list-without-license [{:keys []}] + (let [[classpath] (u/sh {:dir u/project-root-directory + :quiet? true} + "clojure" "-A:ee" "-Spath") + classpath-entries (license/jar-entries classpath) + {:keys [without-license]} (license/process* + {:classpath-entries classpath-entries + :backfill (edn/read-string + (slurp (io/resource "overrides.edn")))})] + (if (seq without-license) + (run! (comp (partial u/error "Missing License: %s") first) + without-license) + (u/announce "All dependencies have licenses")) + (shutdown-agents) + (System/exit (if (seq without-license) 1 0)))) diff --git a/bin/build-mb/src/build/licenses.clj b/bin/build-mb/src/build/licenses.clj index e5794f34d340..7acc5699a1e8 100644 --- a/bin/build-mb/src/build/licenses.clj +++ b/bin/build-mb/src/build/licenses.clj @@ -185,10 +185,17 @@ {:with-license (categorized true) :without-license (categorized false)})) +(defn jar-entries + "Returns a seq of jar entries on the classpath" + [classpath] + (->> (str/split classpath (re-pattern classpath-separator)) + (filter jar-file?))) + (defn generate - "Process a classpath, creating a file of all license information, writing to `:output-filename`. Backfill is a clojure - data structure or a filename of an edn file of a clojure datastructure providing for backfilling license information - if it is not discernable from the jar. Should be of the form (note keys are strings not symbols) + "Process a classpath, creating a file of all license information, writing to `:output-filename`. `classpath-entries` + should be a seq of classpath roots. Split a classpath on the classpath separator. Backfill is a clojure data + structure or a filename of an edn file of a clojure datastructure providing for backfilling license information if + it is not discernable from the jar. Should be of the form (note keys are strings not symbols) {\"group\" {\"artifact\" \"license text\"} \"group\" {\"artifact\" {:resource \"filename-of-license\"}} @@ -210,25 +217,22 @@ :without-license [ [jar-filename {:coords {:group :artifact :version} :error }] ... ]}" [{:keys [classpath backfill output-filename report?] :or {report? true}}] (let [backfill (if (string? backfill) - (edn/read-string (slurp backfill)) + (edn/read-string (slurp (io/resource backfill))) (or backfill {})) - entries (->> (str/split classpath (re-pattern classpath-separator)) - (filter jar-file?))] - (let [{:keys [with-license without-license] :as license-info} - (process* {:classpath-entries entries - :backfill backfill})] + entries (jar-entries classpath) + {:keys [with-license without-license] :as license-info} + (process* {:classpath-entries entries + :backfill backfill})] + (when (seq with-license) + (with-open [os (io/writer output-filename)] + (run! #(write-license os %) with-license))) + (when report? + (when (seq without-license) + (run! #(report-missing *err* %) without-license)) (when (seq with-license) - (with-open [os (io/writer output-filename)] - (run! #(write-license os %) with-license))) - (when report? - (when (seq without-license) - (run! #(report-missing *err* %) without-license)) - (when (seq with-license) - (println "License information for" (count with-license) "libraries written to " - output-filename) - ;; we call this from the build script. if we switch to the shell we can reenable this and figure out the - ;; best defaults. Want to make sure we never kill our build script - #_(System/exit (if (seq without-license) 1 0)))) - license-info))) - -;; clj -X build.licenses/generate :classpath \"$(cd ../.. && lein with-profile -dev,+ee,+include-all-drivers classpath | tail -n1)\" :backfill "\"resources/overrides.edn\"" :output-filename "\"backend-licenses-ee.txt\"" + (println "License information for" (count with-license) "libraries written to " + output-filename) + ;; we call this from the build script. if we switch to the shell we can reenable this and figure out the + ;; best defaults. Want to make sure we never kill our build script + #_(System/exit (if (seq without-license) 1 0)))) + license-info)) diff --git a/bin/build-mb/test/build/licenses_test.clj b/bin/build-mb/test/build/licenses_test.clj index c38f8b12fe56..6d4465586338 100644 --- a/bin/build-mb/test/build/licenses_test.clj +++ b/bin/build-mb/test/build/licenses_test.clj @@ -207,26 +207,26 @@ (deftest all-deps-have-licenses (testing "All deps on the classpath have licenses" - (loop-until-success #(u/sh {:dir u/project-root-directory} "lein" "with-profile" "+include-all-drivers,+oss,+ee" "deps") 3 "download deps") - (doseq [edition [:oss :ee]] - (let [classpath (u/sh {:dir u/project-root-directory - :quiet? true} - "lein" - "with-profile" (str \- "dev" - (str \, \+ (name edition)) - \,"+include-all-drivers") - "classpath") - classpath-entries (->> (str/split (last classpath) (re-pattern lic/classpath-separator)) - (filter lic/jar-file?))] - (let [results (lic/process* {:classpath-entries classpath-entries - :backfill (edn/read-string - (slurp (io/resource "overrides.edn")))})] - (is (nil? (:without-license results)) "Some deps don't have identifiable licenses") - (is (= (set classpath-entries) - (into #{} (->> results :with-license (map first)))))) - (is (some? (:without-license - (lic/process* {:classpath-entries classpath-entries - :backfill {}})))))))) + (loop-until-success #(u/sh {:dir u/project-root-directory} "clojure" "-A:ee" "-P") 3 "download deps") + (let [edition :ee + classpath (u/sh {:dir u/project-root-directory + :quiet? true} + "clojure" + "-A:ee" + "-Spath") + classpath-entries (->> (str/split (last classpath) (re-pattern lic/classpath-separator)) + (filter lic/jar-file?))] + (let [results (lic/process* {:classpath-entries classpath-entries + :backfill (edn/read-string + (slurp (io/resource "overrides.edn")))})] + (is (nil? (:without-license results)) + (str "Deps without license information:\n" + (str/join "\n" (map first (:without-license results))))) + (is (= (set classpath-entries) + (into #{} (->> results :with-license (map first)))))) + (is (some? (:without-license + (lic/process* {:classpath-entries classpath-entries + :backfill {}}))))))) (comment (run-tests) (binding [clojure.test/*test-out* *out*] (run-tests)) diff --git a/bin/check-clojure-cli.sh b/bin/check-clojure-cli.sh index 0fdb8cd5df44..d78b774713f6 100755 --- a/bin/check-clojure-cli.sh +++ b/bin/check-clojure-cli.sh @@ -1,9 +1,7 @@ #! /usr/bin/env bash -set -eou pipefail - you_need_to_upgrade() { - echo "Clojure CLI must be at least version 1.10.1.708. Your version is $version." + echo "Clojure CLI must be at least version 1.10.3.905. Your version is $version." echo "See https://www.clojure.org/guides/getting_started for upgrade instructions." exit -3 } @@ -24,8 +22,8 @@ check_clojure_cli() { elif [ "$minor_version" -eq "10" ]; then if [ "$patch_version" -lt "1" ]; then you_need_to_upgrade - elif [ "$patch_version" -eq "1" ]; then - if [ "$build_version" -lt "708" ]; then + elif [ "$patch_version" -eq "3" ]; then + if [ "$build_version" -lt "905" ]; then you_need_to_upgrade fi fi diff --git a/bin/clear-outdated-cpcaches.sh b/bin/clear-outdated-cpcaches.sh new file mode 100755 index 000000000000..89c2e43cd3d9 --- /dev/null +++ b/bin/clear-outdated-cpcaches.sh @@ -0,0 +1,51 @@ +#! /usr/bin/env bash + +set -euo pipefail + +script_directory=`dirname "${BASH_SOURCE[0]}"` + +# This function will clear all the .cpcache directories if any deps.edn file is newer than any of them. +clear_outdated_cpcaches() { + echo "Clearing outdated .cpcache directories if needed..." + + # switch to project root directory if we're not already there + cd "$script_directory/.." + project_root=`pwd` + + cpcaches=`find bin java modules -type d -name .cpcache` + if [ -d .cpcache ]; then + cpcaches=".cpcache $cpcaches" + fi + if [ -z "$cpcaches" ]; then + echo "No .cpcache directories found; nothing to do" + return 0 + fi + + deps_edns="deps.edn $(find bin java modules -type f -name deps.edn)" + + # find the OLDEST cpcache and NEWEST deps.edn files. + oldest_cpcache="" + for cpcache in $cpcaches; do + if [ -z "$oldest_cpcache" ] || [ "$cpcache" -ot "$oldest_cpcache" ]; then + oldest_cpcache="$cpcache" + fi + done + + newest_deps_edn="" + for deps_edn in $deps_edns; do + if [ -z "$newest_deps_edn" ] || [ "$deps_edn" -nt "$newest_deps_edn" ]; then + newest_deps_edn="$deps_edn" + fi + done + + # if the newest deps.edn is newer than the *ANY* of the cpcaches, clear all the cpcaches. + if [ "$newest_deps_edn" -nt "$oldest_cpcache" ]; then + echo "$newest_deps_edn is newer than $oldest_cpcache; deleting all .cpcache directories" + for cpcache in $cpcaches; do + echo "rm -rf $cpcache" + rm -rf "$cpcache" + done + else + echo ".cpcache directories are up to date." + fi +} diff --git a/bin/common/src/metabuild_common/core.clj b/bin/common/src/metabuild_common/core.clj index 08b592c67b93..770d95efa219 100644 --- a/bin/common/src/metabuild_common/core.clj +++ b/bin/common/src/metabuild_common/core.clj @@ -35,6 +35,7 @@ env-or-throw] [files + absolute? assert-file-exists copy-file! create-directory-unless-exists! @@ -56,12 +57,12 @@ yes-or-no-prompt] [misc + parse-as-keyword varargs] [output announce error - format-bytes pretty-print-exception safe-println] diff --git a/bin/common/src/metabuild_common/files.clj b/bin/common/src/metabuild_common/files.clj index cff9f9407d90..318e43d6f049 100644 --- a/bin/common/src/metabuild_common/files.clj +++ b/bin/common/src/metabuild_common/files.clj @@ -1,6 +1,6 @@ (ns metabuild-common.files - (:require [clojure.string :as str] - [environ.core :as env] + (:require [clojure.java.io :as io] + [clojure.string :as str] [metabuild-common.misc :as misc] [metabuild-common.output :as out] [metabuild-common.shell :as sh] @@ -97,20 +97,16 @@ (str/join File/separatorChar path-components)) (def ^String project-root-directory - "Root directory of the Metabase repo, e.g. `/users/cam/metabase`. Determined by finding the directory that has - `project.clj` in it." - (loop [^File dir (File. ^String (env/env :user-dir))] - (cond - (file-exists? (filename (.getAbsolutePath dir) "project.clj")) - (.getAbsolutePath dir) - - (.getParentFile dir) - (recur (.getParentFile dir)) - - :else - (throw (ex-info (format "Can't find project root directory: no parent directory of %s has a project.clj file" - (env/env :user-dir)) - {:dir (env/env :user-dir)}))))) + "Root directory of the Metabase repo, e.g. `/users/cam/metabase`. Determined based on its location relative to this + source file." + (.. (Paths/get (.toURI (io/resource "metabuild_common/files.clj"))) + toFile + getParentFile ; /home/cam/metabase/bin/common/src/metabuild_common + getParentFile ; /home/cam/metabase/bin/common/src/ + getParentFile ; /home/cam/metabase/bin/common/ + getParentFile ; /home/cam/metabase/bin/ + getParentFile ; /home/cam/metabase/ + getCanonicalPath)) (defn download-file! "Download a file from `url` to `dest-path` using `wget`." @@ -139,3 +135,8 @@ (when delete-on-exit? (.deleteOnExit file)) file))) + +(defn absolute? + "Whether `file` is an absolute path." + [file] + (.isAbsolute (io/file file))) diff --git a/bin/common/src/metabuild_common/misc.clj b/bin/common/src/metabuild_common/misc.clj index 02c1779c5fba..a71a1143817c 100644 --- a/bin/common/src/metabuild_common/misc.clj +++ b/bin/common/src/metabuild_common/misc.clj @@ -1,10 +1,23 @@ -(ns metabuild-common.misc) +(ns metabuild-common.misc + (:require [clojure.string :as str])) (defmacro varargs "Utility macro for passing varargs of a certain `klass` to a Java method. (Files/createTempFile \"driver\" \".jar\" (varargs FileAttribute))" - {:style/indent 1, :arglists '([klass] [klass xs])} + {:arglists '([klass] [klass xs])} [klass & [objects]] (vary-meta `(into-array ~klass ~objects) assoc :tag (format "[L%s;" (.getCanonicalName ^Class (ns-resolve *ns* klass))))) + +(defn parse-as-keyword + "Like [[clojure.core/keyword]], but with a couple of tweaks to make it better for parsing command-line args: + + * empty strings get parsed to `nil` instead of an empty keyword `:` + * strings starting with `:` e.g. `\":driver\"` get parsed to normal keywords e.g. `:driver` instead of `::driver` + (which is super confusing, because it's an _unnamespaced_ keyword whose the _name_ is `:driver`)" + [s] + (cond + (keyword? s) s + (not (str/blank? s)) (keyword (cond-> s + (str/starts-with? s ":") (.substring 1))))) diff --git a/bin/common/src/metabuild_common/output.clj b/bin/common/src/metabuild_common/output.clj index 3e70771d0343..2e49b694be06 100644 --- a/bin/common/src/metabuild_common/output.clj +++ b/bin/common/src/metabuild_common/output.clj @@ -40,12 +40,3 @@ (println (colorize/red (str "Step failed: " (.getMessage e)))) (binding [pprint/*print-right-margin* 120] (pprint/pprint e-map)))) - -(defn format-bytes - "Nicely format `num-bytes` in a human-readable way (e.g. KB/MB/etc.)" - [num-bytes] - (loop [n num-bytes [suffix & more] ["B" "KB" "MB" "GB"]] - (if (and (seq more) - (>= n 1024)) - (recur (/ n 1024.0) more) - (format "%.1f %s" n suffix)))) diff --git a/bin/common/test/metabuild_common/misc_test.clj b/bin/common/test/metabuild_common/misc_test.clj new file mode 100644 index 000000000000..9f3d004ebb4c --- /dev/null +++ b/bin/common/test/metabuild_common/misc_test.clj @@ -0,0 +1,12 @@ +(ns metabuild-common.misc-test + (:require [clojure.test :refer :all] + [metabuild-common.misc :as misc])) + +(deftest parse-as-keyword-test + (are [input expected] (= expected (misc/parse-as-keyword input)) + "abc" :abc + ":abc" :abc + "" nil + " " nil + :abc :abc + nil nil)) diff --git a/bin/compare-screenshots b/bin/compare-screenshots deleted file mode 100755 index 4f7e2535a482..000000000000 --- a/bin/compare-screenshots +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env babel-node - -import fetch from "isomorphic-fetch"; -import path from "path"; -import fs from "fs-promise" -import imageDiff_ from "image-diff"; -import https from "https"; -import os from "os"; - -import { WebClient } from "@slack/client"; - -const CIRCLECI_TOKEN = process.env["CIRCLECI_TOKEN"]; -const SLACK_TOKEN = process.env["SLACK_TOKEN"]; -const SLACK_CHANNEL = "#ci-screenshots"; - -const CIRCLE_PROJECT = "github/metabase/metabase"; -const CIRCLE_BRANCH = "master"; - -const CIRCLE_SCREENSHOT_PATH = "/home/ubuntu/metabase/screenshots/"; - -const slack = new WebClient(SLACK_TOKEN); - -async function circleci(path) { - const response = await fetch( - `https://circleci.com/api/v1.1/${path}?circle-token=${encodeURIComponent(CIRCLECI_TOKEN)}` - ); - return response.json(); -} - -function imageDiff(options) { - return new Promise((resolve, reject) => { - imageDiff_.getFullResult(options, (err, result) => - err ? reject(err) : resolve(result) - ); - }); -} - -function download(url, path) { - return new Promise((resolve, reject) => { - https.get(url, response => { - response.pipe(fs.createWriteStream(path)).on("finish", resolve); - }).on('error', reject); - }); -} - -async function getCircleArtifactScreenshots(buildPath) { - let artifacts = await circleci(`project/${buildPath}/artifacts`); - let results = {}; - for (const artifact of artifacts) { - if (artifact.pretty_path.startsWith(CIRCLE_SCREENSHOT_PATH)) { - const downloadPath = path.join(os.tmpdir(), path.basename(artifact.pretty_path)); - console.log("Downloading ", artifact.url, "to", downloadPath); - await download(artifact.url, downloadPath); - results[artifact.pretty_path.slice(CIRCLE_SCREENSHOT_PATH.length)] = downloadPath; - } - } - return results; -} - -async function getLocalScreenshots(directory) { - const filenames = await fs.readdir(directory); - let results = {}; - for (const filename of filenames) { - results[filename] = path.resolve(directory, filename); - } - return results; -} - -async function getScreenshots(target) { - if (target.circleProject && target.circleBranch) { - let builds = await circleci(`project/${target.circleProject}/tree/${target.circleBranch}`); - let ok = builds.filter(build => build.status === "success" || build.status === "fixed"); - let screenshots = await getCircleArtifactScreenshots(`${target.circleProject}/${ok[0].build_num}`); - return screenshots; - } else if (target.localDirectory) { - return await getLocalScreenshots(target.localDirectory); - } else { - throw "unknown target type"; - } -} - -async function run(expectedTarget, actualTarget) { - try { - const expectedScreenshots = await getScreenshots(expectedTarget); - console.log("Expected", Object.keys(expectedScreenshots)); - const actualScreenshots = await getScreenshots(actualTarget); - console.log("Actual", Object.keys(expectedScreenshots)); - let images = Object.keys({ ...expectedScreenshots, ...actualScreenshots }); - - for (const image of images) { - const expectedImage = expectedScreenshots[image]; - const actualImage = actualScreenshots[image]; - const diffImage = path.join(os.tmpdir(), "diff-"+image); - if (!actualImage) { - console.log("Added", image); - await slack.files.upload(image, { - title: "Added " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(actualImage) - }); - } else if (!expectedImage) { - console.log("Removed", image); - await slack.files.upload(image, { - title: "Removed " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(expectedImage) - }); - } else { - const result = await imageDiff({ - expectedImage, - actualImage, - diffImage, - shadow: true - }) - if (result.percentage === 0.0) { - console.log("No difference", image); - } else { - console.log("Changed", result.percentage.toFixed(2), image); - await slack.files.upload(image, { - title: "Diff (" + result.percentage.toFixed(2) + ") " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(diffImage) - }); - await slack.files.upload(image, { - title: "Expected " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(expectedImage) - }); - await slack.files.upload(image, { - title: "Actual " + image, - channels: [SLACK_CHANNEL], - file: fs.createReadStream(actualImage) - }); - } - } - } - } catch (e) { - console.error(e); - } -} - -run( - { circleProject: CIRCLE_PROJECT, circleBranch: CIRCLE_BRANCH }, - { localDirectory: "screenshots" } -); diff --git a/bin/i18n/src/i18n/common.clj b/bin/i18n/src/i18n/common.clj index 379df14445ae..0ac7f5f2cacc 100644 --- a/bin/i18n/src/i18n/common.clj +++ b/bin/i18n/src/i18n/common.clj @@ -36,7 +36,7 @@ {:id (.getMsgid message) :id-plural (.getMsgidPlural message) :str (.getMsgstr message) - :str-plural (seq (remove str/blank? (.getMsgstrPlural message))) + :str-plural (seq (.getMsgstrPlural message)) :fuzzy? (.isFuzzy message) :plural? (.isPlural message) :source-references (seq (remove str/blank? (.getSourceReferences message))) diff --git a/bin/i18n/src/i18n/create_artifacts.clj b/bin/i18n/src/i18n/create_artifacts.clj index 23cf7de8ad3b..26187f0aafba 100644 --- a/bin/i18n/src/i18n/create_artifacts.clj +++ b/bin/i18n/src/i18n/create_artifacts.clj @@ -25,6 +25,9 @@ (u/announce "Artifacts for locale %s created successfully." (pr-str locale)))) (defn- create-artifacts-for-all-locales! [] + ;; Empty directory in case some locales were removed + (u/delete-file-if-exists! backend/target-directory) + (u/delete-file-if-exists! frontend/target-directory) (doseq [locale (i18n/locales)] (create-artifacts-for-locale! locale))) diff --git a/bin/i18n/src/i18n/create_artifacts/frontend.clj b/bin/i18n/src/i18n/create_artifacts/frontend.clj index dd94a35e7a8f..3e326b754c09 100644 --- a/bin/i18n/src/i18n/create_artifacts/frontend.clj +++ b/bin/i18n/src/i18n/create_artifacts/frontend.clj @@ -43,7 +43,7 @@ (defn- i18n-map [locale] (->i18n-map (i18n/po-contents locale))) -(def ^:private target-directory +(def target-directory (u/filename u/project-root-directory "resources" "frontend_client" "app" "locales")) (defn- target-filename [locale] diff --git a/bin/i18n/test/i18n/create_artifacts/frontend_test.clj b/bin/i18n/test/i18n/create_artifacts/frontend_test.clj index a074159cfd62..5d2594dc3232 100644 --- a/bin/i18n/test/i18n/create_artifacts/frontend_test.clj +++ b/bin/i18n/test/i18n/create_artifacts/frontend_test.clj @@ -25,5 +25,9 @@ "${ 0 } Queryable Table" {:msgid_plural "{0} Queryable Tables" - :msgstr ["${ 0 } Tabla Consultable" "${ 0 } Tablas consultables"]}}}} + :msgstr ["${ 0 } Tabla Consultable" "${ 0 } Tablas consultables"]} + + "${ 0 } metric" + {:msgid_plural "{0} metrics" + :msgstr ["${ 0 } metrik" ""]}}}} (#'frontend/->i18n-map test-common/po-contents)))) diff --git a/bin/i18n/test/i18n/create_artifacts/test_common.clj b/bin/i18n/test/i18n/create_artifacts/test_common.clj index 01ade63706f8..58e22786c980 100644 --- a/bin/i18n/test/i18n/create_artifacts/test_common.clj +++ b/bin/i18n/test/i18n/create_artifacts/test_common.clj @@ -50,12 +50,23 @@ :source-references ["frontend/src/metabase/admin/datamodel/components/database/MetadataTableList.jsx:77"] :comment nil}) +(def plural-message-frontend-with-empty + {:id "{0} metric" + :id-plural "{0} metrics" + :str nil + :str-plural ["{0} metrik" ""] + :fuzzy? false + :plural? true + :source-references ["frontend/src/metabase/query_builder/components/view/QuestionDescription.jsx:20"] + :comment nil}) + (def messages [singular-message-frontend singular-message-backend singular-template-message-frontend singular-template-message-backend - plural-message-frontend]) + plural-message-frontend + plural-message-frontend-with-empty]) (def po-contents {:headers {"MIME-Version" "1.0", diff --git a/bin/i18n/update-translation-template b/bin/i18n/update-translation-template index 4a4b979dbe4a..e327cda37213 100755 --- a/bin/i18n/update-translation-template +++ b/bin/i18n/update-translation-template @@ -1,6 +1,6 @@ -#!/bin/sh +#! /usr/bin/env bash -set -eu +set -euo pipefail # gettext installed via homebrew is "keg-only", add it to the PATH if [ -d "/usr/local/opt/gettext/bin" ]; then @@ -13,6 +13,16 @@ if ! command -v xgettext > /dev/null; then exit 1 fi +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/../.." + +source "./bin/check-clojure-cli.sh" +check_clojure_cli + +source "./bin/prep.sh" +prep_deps + POT_NAME="locales/metabase.pot" POT_BACKEND_NAME="locales/metabase-backend.pot" # NOTE: hardcoded in .babelrc @@ -27,7 +37,7 @@ mkdir -p "locales" ####################### # NOTE: about twice as fast to call babel directly rather than a full webpack build -BABEL_ENV=extract ./node_modules/.bin/babel -q -x .js,.jsx -o /dev/null {enterprise/,}frontend/src +BABEL_ENV=extract ./node_modules/.bin/babel --quiet -x .js,.jsx -o /dev/null {enterprise/,}frontend/src # BABEL_ENV=extract BABEL_DISABLE_CACHE=1 yarn run build # NOTE: replace ttag's "${ 0 }" style references with xgettext "{0}" style references for consistency @@ -68,7 +78,7 @@ rm "$POT_BACKEND_NAME.bak" # update auto dash pot # ######################## -lein generate-automagic-dashboards-pot +clojure -M:generate-automagic-dashboards-pot ################## # merge all pots # diff --git a/bin/lint-migrations-file.sh b/bin/lint-migrations-file.sh index 419ea7540d28..e649dcdf9af1 100755 --- a/bin/lint-migrations-file.sh +++ b/bin/lint-migrations-file.sh @@ -9,5 +9,11 @@ cd "$script_directory/.." source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/lint-migrations-file clojure -M -m lint-migrations-file $@ diff --git a/bin/lint-migrations-file/deps.edn b/bin/lint-migrations-file/deps.edn index eadce4b742bb..02d58a2d9182 100644 --- a/bin/lint-migrations-file/deps.edn +++ b/bin/lint-migrations-file/deps.edn @@ -1,7 +1,7 @@ {:paths ["src"] :deps - {io.forward/yaml {:mvn/version "1.0.9"} ; don't upgrade to 1.0.10 -- doesn't work on Java 8 (!) + {io.forward/yaml {:mvn/version "1.0.9"} ; Don't upgrade yet, new version doesn't support Java 8 (see https://github.com/owainlewis/yaml/issues/37) org.flatland/ordered {:mvn/version "1.5.9"}} ; used by io.forward/yaml -- need the newer version :aliases diff --git a/bin/prep.sh b/bin/prep.sh new file mode 100755 index 000000000000..0962c281d8d0 --- /dev/null +++ b/bin/prep.sh @@ -0,0 +1,60 @@ +#! /usr/bin/env bash + +# functions for running prep steps to compile Java and AOT source files, needed before running other stuff. + +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." +project_root=`pwd` + +clear_cpcaches() { + cd "$project_root" + for file in `find . -type d -name .cpcache`; do + rm -rf "$file" + done +} + +compile_java_sources() { + cd "$project_root" + + echo "Compile Java source files in $project_root/java if needed..." + if [ ! -d "$project_root/java/target/classes" ]; then + echo 'Compile Java source files' + cd "$project_root" + clojure -Sforce -X:deps prep + else + echo 'Java source files are already compiled' + fi +} + +compile_spark_sql_aot_sources() { + cd "$project_root" + + echo "Compile Spark SQL AOT source files in $project_root/modules/drivers/sparksql if needed..." + if [ ! -d "$project_root/modules/drivers/sparksql/target/classes" ]; then + echo 'Compile Spark SQL AOT source files' + cd "$project_root/modules/drivers" + clojure -Sforce -X:deps prep + else + echo 'Spark SQL AOT source files are already compiled' + fi +} + +prep_deps() { + if compile_java_sources; then + echo "Java sources => OK" + else + echo 'Compilation failed (WHY?!); clearing classpath caches and trying again...' + clear_cpcaches + compile_java_sources + fi + + if compile_spark_sql_aot_sources; then + echo "Spark SQL AOT sources => OK" + else + echo 'Compilation failed (WHY?!); clearing classpath caches and trying again...' + clear_cpcaches + compile_spark_sql_aot_sources + fi + + cd "$project_root" +} diff --git a/bin/reflection-linter b/bin/reflection-linter deleted file mode 100755 index 559267ebb08d..000000000000 --- a/bin/reflection-linter +++ /dev/null @@ -1,13 +0,0 @@ -#! /usr/bin/env bash - -printf "\e[1;34mChecking for reflection warnings. This may take a few minutes, so sit tight...\e[0m\n" - -warnings=`lein with-profile +ci,+ee check-reflection-warnings 2>&1 | grep Reflection | grep metabase | sort | uniq` - -if [ ! -z "$warnings" ]; then - printf "\e[1;31mYour code has introduced some reflection warnings.\e[0m 😞\n" - echo "$warnings"; - exit -1; -fi - -printf "\e[1;32mNo reflection warnings! Success.\e[0m\n" diff --git a/bin/release.sh b/bin/release.sh index 19dd88914e6f..b8fc3c68bb3c 100755 --- a/bin/release.sh +++ b/bin/release.sh @@ -2,8 +2,18 @@ set -euo pipefail +# switch to project root directory if we're not already there +script_directory=`dirname "${BASH_SOURCE[0]}"` +cd "$script_directory/.." + source "./bin/check-clojure-cli.sh" check_clojure_cli +source "./bin/clear-outdated-cpcaches.sh" +clear_outdated_cpcaches + +source "./bin/prep.sh" +prep_deps + cd bin/release clojure -M -m release $@ diff --git a/bin/release/deps.edn b/bin/release/deps.edn index b9e0e5d2365f..cffa79de3fec 100644 --- a/bin/release/deps.edn +++ b/bin/release/deps.edn @@ -14,8 +14,4 @@ :extra-deps {com.cognitect/test-runner {:git/url "https://github.com/cognitect-labs/test-runner.git" :sha "209b64504cb3bd3b99ecfec7937b358a879f55c1"} org.clojure/data.json {:mvn/version "2.0.2"}} - :main-opts ["-m" "cognitect.test-runner"]} - :nREPL {:extra-paths ["test"] - :extra-deps {nrepl/nrepl {:mvn/version "0.8.3"} - org.clojure/data.json {:mvn/version "2.0.2"}} - :main-opts ["-m" "nrepl.cmdline" "-i"]}}} + :main-opts ["-m" "cognitect.test-runner"]}}} diff --git a/bin/release/src/release.clj b/bin/release/src/release.clj index 96ea1327a4fb..06acdd36afb6 100644 --- a/bin/release/src/release.clj +++ b/bin/release/src/release.clj @@ -53,6 +53,6 @@ (u/exit-when-finished-nonzero-on-exception (check-prereqs/check-prereqs) (set-build-options/prompt-and-set-build-options!) - (let [steps (or (seq (map keyword steps)) + (let [steps (or (seq (map u/parse-as-keyword steps)) (keys steps*))] (do-steps! steps)))) diff --git a/bin/release/src/release/elastic_beanstalk.clj b/bin/release/src/release/elastic_beanstalk.clj index a64bb737c17a..83276b49912b 100644 --- a/bin/release/src/release/elastic_beanstalk.clj +++ b/bin/release/src/release/elastic_beanstalk.clj @@ -26,6 +26,10 @@ "Source location of the .ebextensions directory" (u/assert-file-exists (u/filename c/root-directory "bin" "release" "src" "release" "elastic_beanstalk" ".ebextensions"))) +(def ^:private eb-platform-source + "Source location of the .ebextensions directory" + (u/assert-file-exists (u/filename c/root-directory "bin" "release" "src" "release" "elastic_beanstalk" ".platform"))) + (def ^:private archive-temp-dir "Path where we'll put the contents of the ZIP file before we create it." "/tmp/metabase-aws-eb") @@ -75,6 +79,8 @@ (json/generate-string (dockerrun-json-content) {:pretty true}))) (u/step "Copy .ebextensions" (u/copy-file! eb-extensions-source (u/filename archive-temp-dir ".ebextensions"))) + (u/step "Copy .platform" + (u/copy-file! eb-platform-source (u/filename archive-temp-dir ".platform"))) (u/step "Create metabase-aws-eb.zip" (u/delete-file-if-exists! archive-path) (u/sh {:dir archive-temp-dir} "zip" "--recurse-paths" archive-path ".") diff --git a/bin/release/src/release/elastic_beanstalk/.ebextensions/01_metabase.config b/bin/release/src/release/elastic_beanstalk/.ebextensions/01_metabase.config index f8544d69ff49..36b63f5bda57 100644 --- a/bin/release/src/release/elastic_beanstalk/.ebextensions/01_metabase.config +++ b/bin/release/src/release/elastic_beanstalk/.ebextensions/01_metabase.config @@ -4,59 +4,34 @@ # Comment out a variable to disable a feature ##### -files: - "/tmp/install" : - mode: "000755" - owner: root - group: root - source: https://inspector-agent.amazonaws.com/linux/latest/install - container_commands: - # do server_https first to avoid overwriting other config changes - 01_server_https: - command: ".ebextensions/metabase_config/metabase-setup.sh server_https" - ignoreErrors: true - - 02_log_x_real_ip: - command: ".ebextensions/metabase_config/metabase-setup.sh log_x_real_ip" - ignoreErrors: true + 01-persist_env_vars: + command: ".ebextensions/metabase_config/metabase-setup.sh set_up_env_vars" + ignoreErrors: false - 03_install_papertrail: + 02_install_papertrail: command: ".ebextensions/metabase_config/metabase-setup.sh install_papertrail" test: test $PAPERTRAIL_HOST ignoreErrors: true - 04_try_papertrail: + 03_try_papertrail: command: "/sbin/service remote_syslog restart" test: test -e /etc/log_files.yml ignoreErrors: true - - 05_try_nginx: - command: "/sbin/service nginx restart" - test: nginx -t - ignoreErrors: false - 06-install_inspector: - command: bash -x install -u false - cwd: /tmp/ + 04_install_inspector: + command: yum -q list installed AwsAgent &>/dev/null && echo "Inspector installed, nothing to do here" || curl -O https://inspector-agent.amazonaws.com/linux/latest/install | sudo bash + ignoreErrors: true - 07-install_cloudwatch_agent: - command: sudo yum install amazon-cloudwatch-agent -y + 05-install_cloudwatch_agent: + command: sudo yum update && sudo yum upgrade -y && yum -q list installed amazon-cloudwatch-agent &>/dev/null && echo "Cloudwatch agent installed, nothing to do here" || sudo yum -y install https://s3.amazonaws.com/amazoncloudwatch-agent/amazon_linux/amd64/latest/amazon-cloudwatch-agent.rpm + ignoreErrors: true - 08-run_cloudwatch_agent: + 06-run_cloudwatch_agent: command: sudo /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl -a fetch-config -m ec2 -c file:.ebextensions/metabase_config/cloudwatch/config.json -s - - 09-persist_env_vars: - command: /opt/elasticbeanstalk/bin/get-config environment | jq -r 'to_entries | .[] | "export \(.key)=\"\(.value)\""' > /etc/profile.d/sh.local - - 10_set_up_env_vars: - command: ".ebextensions/metabase_config/metabase-setup.sh set_up_env_vars" ignoreErrors: true +# NGINX will be disabled in future releases, this will make the trick #option_settings: -# aws:elasticbeanstalk:application:environment -# NGINX_FORCE_SSL=1 -# PAPERTRAIL_HOSTNAME=$HOSTNAME -# PAPERTRAIL_HOST=foobar.papertrailapp.com -# PAPERTRAIL_PORT=12345 -# PAPERTRAIL_FILES=/var/log/nginx/access.log /var/log/nginx/error.log \ No newline at end of file +# aws:elasticbeanstalk:environment:proxy: +# ProxyServer: "none" \ No newline at end of file diff --git a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json index a71cc0fbd496..a6ecec67597c 100644 --- a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json +++ b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/cloudwatch/config.json @@ -7,39 +7,39 @@ "metrics_collected": { "cpu": { "measurement": [ - "time_active", - "time_guest", - "time_guest_nice", - "time_idle", - "time_iowait", - "time_irq", - "time_nice", - "time_softirq", - "time_steal", - "time_system", - "time_user", - "usage_active", - "usage_guest", - "usage_guest_nice", - "usage_idle", - "usage_iowait", - "usage_irq", - "usage_nice", - "usage_softirq", - "usage_steal", - "usage_system", + "time_active", + "time_guest", + "time_guest_nice", + "time_idle", + "time_iowait", + "time_irq", + "time_nice", + "time_softirq", + "time_steal", + "time_system", + "time_user", + "usage_active", + "usage_guest", + "usage_guest_nice", + "usage_idle", + "usage_iowait", + "usage_irq", + "usage_nice", + "usage_softirq", + "usage_steal", + "usage_system", "usage_user" ], "totalcpu": false }, "disk": { "measurement": [ - "free", - "total", - "used", - "used_percent", - "inodes_free", - "inodes_used", + "free", + "total", + "used", + "used_percent", + "inodes_free", + "inodes_used", "inodes_total" ], "ignore_file_system_types": [ @@ -49,76 +49,76 @@ "measurement": [ "reads", "writes", - "read_bytes", - "write_bytes", - "read_time", - "write_time", - "io_time", + "read_bytes", + "write_bytes", + "read_time", + "write_time", + "io_time", "iops_in_progress" ] }, "swap": { "measurement": [ - "free", - "used", + "free", + "used", "used_percent" ] }, "mem": { "measurement": [ - "active", - "available", - "available_percent", - "buffered", - "cached", - "free", - "inactive", - "total", - "used", + "active", + "available", + "available_percent", + "buffered", + "cached", + "free", + "inactive", + "total", + "used", "used_percent" ] }, "net": { "measurement": [ - "bytes_sent", - "bytes_recv", - "drop_in", - "drop_out", - "err_in", - "err_out", - "packets_sent", + "bytes_sent", + "bytes_recv", + "drop_in", + "drop_out", + "err_in", + "err_out", + "packets_sent", "packets_recv" ] }, "netstat": { "measurement": [ - "tcp_close", - "tcp_close_wait", - "tcp_closing", - "tcp_established", - "tcp_fin_wait1", - "tcp_fin_wait2", - "tcp_last_ack", - "tcp_listen", - "tcp_none", - "tcp_syn_sent", - "tcp_syn_recv", - "tcp_time_wait", + "tcp_close", + "tcp_close_wait", + "tcp_closing", + "tcp_established", + "tcp_fin_wait1", + "tcp_fin_wait2", + "tcp_last_ack", + "tcp_listen", + "tcp_none", + "tcp_syn_sent", + "tcp_syn_recv", + "tcp_time_wait", "udp_socket" ] }, "processes": { "measurement": [ - "blocked", - "dead", - "idle", - "paging", - "running", - "sleeping", - "stopped", - "total", - "total_threads", - "wait", + "blocked", + "dead", + "idle", + "paging", + "running", + "sleeping", + "stopped", + "total", + "total_threads", + "wait", "zombies" ] } @@ -131,4 +131,4 @@ }, "aggregation_dimensions" : [["InstanceId"], ["InstanceType"], ["InstanceId","InstanceType"]] } -} \ No newline at end of file +} diff --git a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh index 213993fa3de6..71f610d37380 100755 --- a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh +++ b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/metabase-setup.sh @@ -5,6 +5,52 @@ # Unset a variable to disable a feature #### +set_up_env_vars () { + # /opt/elasticbeanstalk/bin/get-config environment | jq -r 'to_entries | .[] | "export \(.key)=\"\(.value)\""' > /etc/profile.d/sh.local + if grep -q "Amazon Linux 2" /etc/os-release; then + if [ ! -z "$RDS_HOSTNAME" ]; then + # sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /etc/profile.d/sh.local + sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /opt/elasticbeanstalk/deployment/env.list + # sed -i 's/RDS_USERNAME/MB_DB_USER/' /etc/profile.d/sh.local + sed -i 's/RDS_USERNAME/MB_DB_USER/' /opt/elasticbeanstalk/deployment/env.list + # sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /etc/profile.d/sh.local + sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /opt/elasticbeanstalk/deployment/env.list + # sed -i 's/RDS_PORT/MB_DB_PORT/' /etc/profile.d/sh.local + sed -i 's/RDS_PORT/MB_DB_PORT/' /opt/elasticbeanstalk/deployment/env.list + # sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /etc/profile.d/sh.local + sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /opt/elasticbeanstalk/deployment/env.list + if [ "$RDS_PORT" == "3306" ]; then + # echo 'export MB_DB_TYPE="mysql"' >> /etc/profile.d/sh.local + echo 'MB_DB_TYPE=mysql' >> /opt/elasticbeanstalk/deployment/env.list + else + # echo 'export MB_DB_TYPE="postgres"' >> /etc/profile.d/sh.local + echo 'MB_DB_TYPE=postgres' >> /opt/elasticbeanstalk/deployment/env.list + fi + fi + else + if [ ! -z "$RDS_HOSTNAME" ]; then + # sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /etc/profile.d/sh.local + sed -i 's/RDS_HOSTNAME/MB_DB_HOST/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration + # sed -i 's/RDS_USERNAME/MB_DB_USER/' /etc/profile.d/sh.local + sed -i 's/RDS_USERNAME/MB_DB_USER/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration + # sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /etc/profile.d/sh.local + sed -i 's/RDS_PASSWORD/MB_DB_PASS/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration + # sed -i 's/RDS_PORT/MB_DB_PORT/' /etc/profile.d/sh.local + sed -i 's/RDS_PORT/MB_DB_PORT/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration + # sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /etc/profile.d/sh.local + sed -i 's/RDS_DB_NAME/MB_DB_DBNAME/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration + if [ "$RDS_PORT" == "3306" ]; then + # echo 'export MB_DB_TYPE="mysql"' >> /etc/profile.d/sh.local + sed -i 's/}}}}/,"MB_DB_TYPE":"mysql"}}}}/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration + else + # echo 'export MB_DB_TYPE="postgres"' >> /etc/profile.d/sh.local + sed -i 's/}}}}/,"MB_DB_TYPE":"postgres"}}}}/' /opt/elasticbeanstalk/deploy/configuration/containerconfiguration + fi + fi + fi + +} + # add files to papertrail pt_files () { sed -i '/ - .*/d' /etc/log_files.yml @@ -31,118 +77,12 @@ pt_local_host () { sed -i "s/.*hostname:.*/hostname: $PAPERTRAIL_HOSTNAME/" /etc/log_files.yml } -# enable https redirect -server_https () { - cd /etc/nginx/sites-available/ - if [[ "x$NGINX_FORCE_SSL" == "x1" ]] # && ! grep -q https elasticbeanstalk-nginx-docker-proxy.conf ; - then - cat << 'EOF' > elasticbeanstalk-nginx-docker-proxy.conf -map $http_upgrade $connection_upgrade { - default "upgrade"; - "" ""; -} - -server { - listen 80; - - gzip on; - gzip_comp_level 4; - gzip_types text/html text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; - - if ($time_iso8601 ~ "^(\d{4})-(\d{2})-(\d{2})T(\d{2})") { - set $year $1; - set $month $2; - set $day $3; - set $hour $4; - } - - access_log /var/log/nginx/access.log; - - location /api/health { - proxy_pass http://docker; - proxy_http_version 1.1; - - proxy_set_header Connection $connection_upgrade; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_connect_timeout 600; - proxy_send_timeout 600; - proxy_read_timeout 600; - send_timeout 600; - - } - - - location / { - if ($http_x_forwarded_proto != "https") { - rewrite ^ https://$host$request_uri? permanent; - } - - proxy_pass http://docker; - proxy_http_version 1.1; - - proxy_set_header Connection $connection_upgrade; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_connect_timeout 600; - proxy_send_timeout 600; - proxy_read_timeout 600; - send_timeout 600; - } -} -EOF - else - cat << 'EOF' > elasticbeanstalk-nginx-docker-proxy.conf -map $http_upgrade $connection_upgrade { - default "upgrade"; - "" ""; -} - -server { - listen 80; - - gzip on; - gzip_comp_level 4; - gzip_types text/html text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; - - if ($time_iso8601 ~ "^(\d{4})-(\d{2})-(\d{2})T(\d{2})") { - set $year $1; - set $month $2; - set $day $3; - set $hour $4; - } - - access_log /var/log/nginx/access.log; - - location / { - proxy_pass http://docker; - proxy_http_version 1.1; - - proxy_set_header Connection $connection_upgrade; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_connect_timeout 600; - proxy_send_timeout 600; - proxy_read_timeout 600; - send_timeout 600; - } -} -EOF - fi -} - # download, install and configure papertrail install_papertrail () { cp .ebextensions/metabase_config/papertrail/log_files.yml /etc/log_files.yml && chmod 644 /etc/log_files.yml cp .ebextensions/metabase_config/papertrail/remote_syslog /etc/init.d/remote_syslog && chmod 555 /etc/init.d/remote_syslog cd /tmp/ - wget -q "https://github.com/papertrail/remote_syslog2/releases/download/v0.14/remote_syslog_linux_amd64.tar.gz" && + wget -q "https://github.com/papertrail/remote_syslog2/releases/download/v0.20/remote_syslog_linux_amd64.tar.gz" && tar xzf remote_syslog_linux_amd64.tar.gz /sbin/service remote_syslog stop mv /tmp/remote_syslog/remote_syslog /usr/local/bin/ @@ -154,43 +94,11 @@ install_papertrail () { [[ "$PAPERTRAIL_HOSTNAME" ]] && pt_local_host } -# update nginx logging to include x_real_ip -log_x_real_ip () { - cp .ebextensions/metabase_config/nginx/log_x_real_ip.conf /etc/nginx/conf.d/log_x_real_ip.conf - cd /etc/nginx/sites-available - if ! grep -q access_log *-proxy.conf ; then - sed -i 's|location \/ {|location \/ {\n\n access_log \/var\/log\/nginx\/access.log log_x_real_ip;\n|' *-proxy.conf - fi -} - -set_up_env_vars () { - if [ ! -z "$RDS_HOSTNAME" ]; then - # EEK: this is a bit fragile. if user picks a non-standard port for their db we are screwed :( - if [ "$RDS_PORT" == "3306" ]; then - export MB_DB_TYPE=mysql - else - export MB_DB_TYPE=postgres - fi - - export MB_DB_DBNAME=$RDS_DB_NAME - export MB_DB_USER=$RDS_USERNAME - export MB_DB_PASS=$RDS_PASSWORD - export MB_DB_HOST=$RDS_HOSTNAME - export MB_DB_PORT=$RDS_PORT - fi -} - case $1 in set_up_env_vars) set_up_env_vars ;; -server_https) - server_https - ;; install_papertrail) install_papertrail ;; -log_x_real_ip) - log_x_real_ip - ;; -esac \ No newline at end of file +esac diff --git a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/nginx/log_x_real_ip.conf b/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/nginx/log_x_real_ip.conf deleted file mode 100644 index a6cbe070a52e..000000000000 --- a/bin/release/src/release/elastic_beanstalk/.ebextensions/metabase_config/nginx/log_x_real_ip.conf +++ /dev/null @@ -1,4 +0,0 @@ -# /etc/nginx/conf.d/log_x_real_ip.conf -log_format log_x_real_ip '$proxy_add_x_forwarded_for - [$time_local] ' - '$request $status $body_bytes_sent ' - '$http_referer $http_user_agent'; diff --git a/bin/release/src/release/elastic_beanstalk/.platform/confighooks/postdeploy/config_nginx.sh b/bin/release/src/release/elastic_beanstalk/.platform/confighooks/postdeploy/config_nginx.sh new file mode 100755 index 000000000000..aa9bfe4cd3f2 --- /dev/null +++ b/bin/release/src/release/elastic_beanstalk/.platform/confighooks/postdeploy/config_nginx.sh @@ -0,0 +1,6 @@ +#!/bin/bash +if [[ "x$NGINX_FORCE_SSL" == "x1" ]]; then + cp .platform/nginx/nginx-ssl.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart +else + cp .platform/nginx/nginx.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart +fi diff --git a/bin/release/src/release/elastic_beanstalk/.platform/hooks/postdeploy/config_nginx.sh b/bin/release/src/release/elastic_beanstalk/.platform/hooks/postdeploy/config_nginx.sh new file mode 100755 index 000000000000..aa9bfe4cd3f2 --- /dev/null +++ b/bin/release/src/release/elastic_beanstalk/.platform/hooks/postdeploy/config_nginx.sh @@ -0,0 +1,6 @@ +#!/bin/bash +if [[ "x$NGINX_FORCE_SSL" == "x1" ]]; then + cp .platform/nginx/nginx-ssl.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart +else + cp .platform/nginx/nginx.conf /etc/nginx/nginx.conf && nginx -t && /sbin/service nginx restart +fi diff --git a/bin/release/src/release/elastic_beanstalk/.platform/nginx/nginx-ssl.conf b/bin/release/src/release/elastic_beanstalk/.platform/nginx/nginx-ssl.conf new file mode 100644 index 000000000000..309f03ff3f65 --- /dev/null +++ b/bin/release/src/release/elastic_beanstalk/.platform/nginx/nginx-ssl.conf @@ -0,0 +1,58 @@ +user nginx; +worker_processes auto; +error_log /var/log/nginx/error.log; +pid /var/run/nginx.pid; +worker_rlimit_nofile 65936; + +events { + worker_connections 1024; +} + +http { + include /etc/nginx/mime.types; + default_type application/octet-stream; + + access_log /var/log/nginx/access.log; + + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + include conf.d/*.conf; + + map $http_upgrade $connection_upgrade { + default "upgrade"; + } + + server { + listen 80 default_server; + gzip on; + gzip_comp_level 4; + gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + + access_log /var/log/nginx/access.log main; + + location / { + if ($http_x_forwarded_proto != "https") { + rewrite ^ https://$host$request_uri? permanent; + } + + proxy_pass http://docker; + proxy_http_version 1.1; + + proxy_set_header Connection $connection_upgrade; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_connect_timeout 600; + proxy_send_timeout 600; + proxy_read_timeout 600; + send_timeout 600; + } + + # Include the Elastic Beanstalk generated locations + include conf.d/elasticbeanstalk/*.conf; + } +} \ No newline at end of file diff --git a/bin/release/src/release/elastic_beanstalk/.platform/nginx/nginx.conf b/bin/release/src/release/elastic_beanstalk/.platform/nginx/nginx.conf new file mode 100644 index 000000000000..0c0cc7f600c7 --- /dev/null +++ b/bin/release/src/release/elastic_beanstalk/.platform/nginx/nginx.conf @@ -0,0 +1,54 @@ +user nginx; +worker_processes auto; +error_log /var/log/nginx/error.log; +pid /var/run/nginx.pid; +worker_rlimit_nofile 65936; + +events { + worker_connections 1024; +} + +http { + include /etc/nginx/mime.types; + default_type application/octet-stream; + + access_log /var/log/nginx/access.log; + + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + include conf.d/*.conf; + + map $http_upgrade $connection_upgrade { + default "upgrade"; + } + + server { + listen 80 default_server; + gzip on; + gzip_comp_level 4; + gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + + access_log /var/log/nginx/access.log main; + + location / { + proxy_pass http://docker; + proxy_http_version 1.1; + + proxy_set_header Connection $connection_upgrade; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_connect_timeout 600; + proxy_send_timeout 600; + proxy_read_timeout 600; + send_timeout 600; + } + + # Include the Elastic Beanstalk generated locations + include conf.d/elasticbeanstalk/*.conf; + } +} \ No newline at end of file diff --git a/bin/release/test/release/version_info_test.clj b/bin/release/test/release/version_info_test.clj index 8e8c6b783568..73ea4e0febc1 100644 --- a/bin/release/test/release/version_info_test.clj +++ b/bin/release/test/release/version_info_test.clj @@ -44,5 +44,3 @@ (json/read-json true)) expected (make-version-info edition test-versions)] (is (= expected actual))))))) - - diff --git a/build.clj b/build.clj new file mode 100644 index 000000000000..2b52bdf298ea --- /dev/null +++ b/build.clj @@ -0,0 +1,160 @@ +(ns build + (:require [clojure.java.io :as io] + [clojure.string :as str] + [clojure.tools.build.api :as b] + [clojure.tools.build.util.zip :as b.zip] + [clojure.tools.namespace.dependency :as ns.deps] + [clojure.tools.namespace.find :as ns.find] + [clojure.tools.namespace.parse :as ns.parse] + [hf.depstar.api :as d] + [metabuild-common.core :as c]) + (:import java.io.OutputStream + java.net.URI + [java.nio.file Files FileSystems OpenOption StandardOpenOption] + java.util.Collections + java.util.jar.Manifest)) + +(def class-dir "target/classes") +(def uberjar-filename "target/uberjar/metabase.jar") + +(defn do-with-duration-ms [thunk f] + (let [start-time-ms (System/currentTimeMillis) + result (thunk) + duration (- (System/currentTimeMillis) start-time-ms)] + (f duration) + result)) + +(defmacro with-duration-ms [[duration-ms-binding] & body] + (let [[butlast-forms last-form] ((juxt butlast last) body)] + `(do-with-duration-ms + (fn [] ~@butlast-forms) + (fn [~duration-ms-binding] + ~last-form)))) + +(defn create-basis [edition] + {:pre [(#{:ee :oss} edition)]} + (b/create-basis {:project "deps.edn", :aliases #{edition}})) + +(defn all-paths [basis] + (concat (:paths basis) + (get-in basis [:classpath-args :extra-paths]))) + +(defn clean! [] + (c/step "Clean" + (c/step (format "Delete %s" class-dir) + (b/delete {:path class-dir})) + (c/step (format "Delete %s" uberjar-filename) + (b/delete {:path uberjar-filename})))) + +;; this topo sort order stuff is required for stuff to work correctly... I copied it from my Cloverage PR +;; https://github.com/cloverage/cloverage/pull/303 +(defn- dependencies-graph + "Return a `clojure.tools.namespace` dependency graph of namespaces named by `ns-symbol`." + [ns-decls] + (reduce + (fn [graph ns-decl] + (let [ns-symbol (ns.parse/name-from-ns-decl ns-decl)] + (reduce + (fn [graph dep] + (ns.deps/depend graph ns-symbol dep)) + graph + (ns.parse/deps-from-ns-decl ns-decl)))) + (ns.deps/graph) + ns-decls)) + +(defn metabase-namespaces-in-topo-order [basis] + (let [ns-decls (mapcat + (comp ns.find/find-ns-decls-in-dir io/file) + (all-paths basis)) + ns-symbols (set (map ns.parse/name-from-ns-decl ns-decls))] + (->> (dependencies-graph ns-decls) + ns.deps/topo-sort + (filter ns-symbols)))) + +(defn compile-sources! [basis] + (c/step "Compile Clojure source files" + (let [paths (all-paths basis) + _ (c/announce "Compiling Clojure files in %s" (pr-str paths)) + ns-decls (c/step "Determine compilation order for Metabase files" + (metabase-namespaces-in-topo-order basis))] + (with-duration-ms [duration-ms] + (b/compile-clj {:basis basis + :src-dirs paths + :class-dir class-dir + :ns-compile ns-decls}) + (c/announce "Finished compilation in %.1f seconds." (/ duration-ms 1000.0)))))) + +(defn copy-resources! [edition basis] + (c/step "Copy resources" + ;; technically we don't NEED to copy the Clojure source files but it doesn't really hurt anything IMO. + (doseq [path (all-paths basis)] + (c/step (format "Copy %s" path) + (b/copy-dir {:target-dir class-dir, :src-dirs [path]}))))) + +(defn create-uberjar! [basis] + (c/step "Create uberjar" + (with-duration-ms [duration-ms] + (d/uber {:class-dir class-dir + :uber-file uberjar-filename + :basis basis}) + (c/announce "Created uberjar in %.1f seconds." (/ duration-ms 1000.0))))) + +(def manifest-entries + {"Manifest-Version" "1.0" + "Created-By" "Metabase build.clj" + "Build-Jdk-Spec" (System/getProperty "java.specification.version") + "Main-Class" "metabase.core" + "Liquibase-Package" (str/join "," + ["liquibase.change" + "liquibase.changelog" + "liquibase.database" + "liquibase.datatype" + "liquibase.diff" + "liquibase.executor" + "liquibase.ext" + "liquibase.lockservice" + "liquibase.logging" + "liquibase.parser" + "liquibase.precondition" + "liquibase.sdk" + "liquibase.serializer" + "liquibase.snapshot" + "liquibase.sqlgenerator" + "liquibase.structure" + "liquibase.structurecompare"])}) + +(defn manifest ^Manifest [] + (doto (Manifest.) + (b.zip/fill-manifest! manifest-entries))) + +(defn write-manifest! [^OutputStream os] + (.write (manifest) os) + (.flush os)) + +;; the customizations we need to make are not currently supported by tools.build -- see +;; https://ask.clojure.org/index.php/10827/ability-customize-manifest-created-clojure-tools-build-uber -- so we need +;; to do it by hand for the time being. +(defn update-manifest! [] + (c/step "Update META-INF/MANIFEST.MF" + (with-open [fs (FileSystems/newFileSystem (URI. (str "jar:file:" (.getAbsolutePath (io/file "target/uberjar/metabase.jar")))) + Collections/EMPTY_MAP)] + (let [manifest-path (.getPath fs "META-INF" (into-array String ["MANIFEST.MF"]))] + (with-open [os (Files/newOutputStream manifest-path (into-array OpenOption [StandardOpenOption/WRITE + StandardOpenOption/TRUNCATE_EXISTING]))] + (write-manifest! os)))))) + +;; clojure -T:build uberjar :edition +(defn uberjar [{:keys [edition], :or {edition :oss}}] + (c/step (format "Build %s uberjar" edition) + (with-duration-ms [duration-ms] + (clean!) + (let [basis (create-basis edition)] + (compile-sources! basis) + (copy-resources! edition basis) + (create-uberjar! basis) + (update-manifest!)) + (c/announce "Built target/uberjar/metabase.jar in %.1f seconds." + (/ duration-ms 1000.0))))) + +;; TODO -- add `jar` and `install` commands to install Metabase to the local Maven repo (?) could make it easier to +;; build 3rd-party drivers the old way diff --git a/codecov.yml b/codecov.yml index 82aaeaac126e..aa4def51848f 100644 --- a/codecov.yml +++ b/codecov.yml @@ -5,12 +5,32 @@ codecov: coverage: status: project: - default: + back-end: # Project must always have at least 78% coverage (by line) target: 78% # Whole-project test coverage is allowed to drop up to 5%. (For situtations where we delete code with full coverage) threshold: 5% - patch: - default: - # Changes must have at least 75% test coverage (by line) - target: 75% + flags: + - back-end + + front-end: + target: 35% + threshold: 5% + flags: + - front-end + + patch: off + +flags: + back-end: + paths: + - enterprise/backend + - shared/src + - src/metabase + carryforward: true + + front-end: + paths: + - enterprise/frontend + - frontend + carryforward: true diff --git a/deps.edn b/deps.edn new file mode 100644 index 000000000000..9c0fbad4dd10 --- /dev/null +++ b/deps.edn @@ -0,0 +1,430 @@ +;; -*- comment-column: 80; -*- +{:deps + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + ;; !! PLEASE KEEP THESE ORGANIZED ALPHABETICALLY !! + ;; !! AND ADD A COMMENT EXPLAINING THEIR PURPOSE !! + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + {aleph/aleph {:mvn/version "0.4.6" ; Async HTTP library; WebSockets + :exclusions [org.clojure/tools.logging]} + amalloy/ring-buffer {:mvn/version "1.3.1" ; fixed length queue implementation, used in log buffering + :exclusions [org.clojure/clojure + org.clojure/clojurescript]} + amalloy/ring-gzip-middleware {:mvn/version "0.1.4"} ; Ring middleware to GZIP responses if client can handle it + bigml/histogram {:mvn/version "4.1.3"} ; Histogram data structure + buddy/buddy-core {:mvn/version "1.10.1" ; various cryptograhpic functions + :exclusions [commons-codec/commons-codec + org.bouncycastle/bcpkix-jdk15on + org.bouncycastle/bcprov-jdk15on]} + buddy/buddy-sign {:mvn/version "3.4.1"} ; JSON Web Tokens; High-Level message signing library + cheshire/cheshire {:mvn/version "5.10.1"} ; fast JSON encoding (used by Ring JSON middleware) + clj-http/clj-http {:mvn/version "3.12.3" ; HTTP client + :exclusions [commons-codec/commons-codec + commons-io/commons-io + slingshot/slingshot]} + clojurewerkz/quartzite {:mvn/version "2.1.0" ; scheduling library + :exclusions [c3p0/c3p0]} + colorize/colorize {:mvn/version "0.1.1" ; string output with ANSI color codes (for logging) + :exclusions [org.clojure/clojure]} + com.cemerick/friend {:mvn/version "0.2.3" ; auth library + :exclusions [commons-codec/commons-codec + net.sourceforge.nekohtml/nekohtml + org.apache.httpcomponents/httpclient + ring/ring-core + slingshot/slingshot]} + com.clearspring.analytics/stream {:mvn/version "2.9.8" ; Various sketching algorithms + :exclusions [it.unimi.dsi/fastutil + org.slf4j/slf4j-api]} + com.draines/postal {:mvn/version "2.0.4"} ; SMTP library + com.google.guava/guava {:mvn/version "30.1.1-jre"} ; dep for BigQuery, Spark, and GA. Require here rather than letting different dep versions stomp on each other — see comments on #9697 + com.h2database/h2 {:mvn/version "1.4.197"} ; embedded SQL database + com.taoensso/nippy {:mvn/version "3.1.1"} ; Fast serialization (i.e., GZIP) library for Clojure + com.vladsch.flexmark/flexmark {:mvn/version "0.62.2"} ; Markdown parsing + commons-codec/commons-codec {:mvn/version "1.15"} ; Apache Commons -- useful codec util fns + commons-io/commons-io {:mvn/version "2.11.0"} ; Apache Commons -- useful IO util fns + commons-validator/commons-validator {:mvn/version "1.7" ; Apache Commons -- useful validation util fns + :exclusions [commons-beanutils/commons-beanutils + commons-digester/commons-digester + commons-logging/commons-logging]} + compojure/compojure {:mvn/version "1.6.2" ; HTTP Routing library built on Ring + :exclusions [ring/ring-codec]} + dk.ative/docjure {:mvn/version "1.16.0" ; excel export + crypto-random/crypto-random {:mvn/version "1.2.1"} ; library for generating cryptographically secure random bytes and strings + :exclusions [org.apache.poi/poi + org.apache.poi/poi-ooxml]} + environ/environ {:mvn/version "1.2.0"} ; env vars/Java properties abstraction + hiccup/hiccup {:mvn/version "1.0.5"} ; HTML templating + honeysql/honeysql {:mvn/version "1.0.461" ; Transform Clojure data structures to SQL + :exclusions [org.clojure/clojurescript]} + instaparse/instaparse {:mvn/version "1.4.10"} ; Make your own parser + io.forward/yaml {:mvn/version "1.0.9" ; Clojure wrapper for YAML library SnakeYAML. Don't upgrade yet, new version doesn't support Java 8 (see https://github.com/owainlewis/yaml/issues/37) + :exclusions [org.clojure/clojure + org.flatland/ordered + org.yaml/snakeyaml]} + javax.xml.bind/jaxb-api {:mvn/version "2.4.0-b180830.0359"} ; add the `javax.xml.bind` classes which we're still using but were removed in Java 11 + joda-time/joda-time {:mvn/version "2.10.10"} + kixi/stats {:mvn/version "0.4.4" ; Various statistic measures implemented as transducers + :exclusions [org.clojure/data.avl]} + me.raynes/fs {:mvn/version "1.4.6" ; Filesystem tools + :exclusions [org.apache.commons/commons-compress]} + medley/medley {:mvn/version "1.3.0"} ; lightweight lib of useful functions + metabase/connection-pool {:mvn/version "1.1.1"} ; simple wrapper around C3P0. JDBC connection pools + metabase/saml20-clj {:mvn/version "2.0.0"} ; EE SAML integration + metabase/throttle {:mvn/version "1.0.2"} ; Tools for throttling access to API endpoints and other code pathways + net.cgrand/macrovich {:mvn/version "0.2.1"} ; utils for writing macros for both Clojure & ClojureScript + net.redhogs.cronparser/cron-parser-core {:mvn/version "3.5" ; describe Cron schedule in human-readable language + :exclusions [joda-time/joda-time ; exclude joda time 2.3 which has outdated timezone information + org.slf4j/slf4j-api]} + net.sf.cssbox/cssbox {:mvn/version "5.0.0" ; HTML / CSS rendering + :exclusions [org.slf4j/slf4j-api]} + org.apache.commons/commons-compress {:mvn/version "1.21"} ; compression utils + org.apache.commons/commons-lang3 {:mvn/version "3.12.0"} ; helper methods for working with java.lang stuff + org.apache.logging.log4j/log4j-1.2-api {:mvn/version "2.14.1"} ; apache logging framework + org.apache.logging.log4j/log4j-api {:mvn/version "2.14.1"} ; add compatibility with log4j 1.2 + org.apache.logging.log4j/log4j-core {:mvn/version "2.14.1"} ; apache logging framework + org.apache.logging.log4j/log4j-jcl {:mvn/version "2.14.1"} ; allows the commons-logging API to work with log4j 2 + org.apache.logging.log4j/log4j-liquibase {:mvn/version "2.14.1"} ; liquibase logging via log4j 2 + org.apache.logging.log4j/log4j-slf4j-impl {:mvn/version "2.14.1"} ; allows the slf4j API to work with log4j 2 + org.apache.poi/poi {:mvn/version "5.0.0"} ; Work with Office documents (e.g. Excel spreadsheets) -- newer version than one specified by Docjure + org.apache.poi/poi-ooxml {:mvn/version "5.0.0" + :exclusions [org.bouncycastle/bcpkix-jdk15on + org.bouncycastle/bcprov-jdk15on]} + org.apache.sshd/sshd-core {:mvn/version "2.7.0"} ; ssh tunneling and test server + org.apache.xmlgraphics/batik-all {:mvn/version "1.14"} ; SVG -> image + org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"} ; LDAP client + org.bouncycastle/bcpkix-jdk15on {:mvn/version "1.69"} ; Bouncy Castle crypto library -- explicit version of BC specified to resolve illegal reflective access errors + org.bouncycastle/bcprov-jdk15on {:mvn/version "1.69"} + org.clojure/clojure {:mvn/version "1.10.3"} + org.clojure/core.async {:mvn/version "1.3.618" + :exclusions [org.clojure/tools.reader]} + org.clojure/core.logic {:mvn/version "1.0.0"} ; optimized pattern matching library for Clojure + org.clojure/core.match {:mvn/version "1.0.0"} + org.clojure/core.memoize {:mvn/version "1.0.250"} ; useful FIFO, LRU, etc. caching mechanisms + org.clojure/data.csv {:mvn/version "1.0.0"} ; CSV parsing / generation + org.clojure/java.classpath {:mvn/version "1.0.0"} ; examine the Java classpath from Clojure programs + org.clojure/java.jdbc {:mvn/version "0.7.12"} ; basic JDBC access from Clojure + org.clojure/java.jmx {:mvn/version "1.0.0"} ; JMX bean library, for exporting diagnostic info + org.clojure/math.combinatorics {:mvn/version "0.1.6"} ; combinatorics functions + org.clojure/math.numeric-tower {:mvn/version "0.0.4"} ; math functions like `ceil` + org.clojure/tools.logging {:mvn/version "1.1.0"} ; logging framework + org.clojure/tools.namespace {:mvn/version "1.1.0"} + org.clojure/tools.reader {:mvn/version "1.3.6"} + org.clojure/tools.trace {:mvn/version "0.7.11"} ; function tracing + org.eclipse.jetty/jetty-server {:mvn/version "9.4.43.v20210629"} ; web server + org.flatland/ordered {:mvn/version "1.5.9"} ; ordered maps & sets + org.graalvm.js/js {:mvn/version "21.2.0"} ; JavaScript engine + org.liquibase/liquibase-core {:mvn/version "3.6.3" ; migration management (Java lib) + :exclusions [ch.qos.logback/logback-classic]} + org.mariadb.jdbc/mariadb-java-client {:mvn/version "2.6.2"} ; MySQL/MariaDB driver + org.postgresql/postgresql {:mvn/version "42.2.23"} ; Postgres driver + org.slf4j/slf4j-api {:mvn/version "1.7.32"} ; abstraction for logging frameworks -- allows end user to plug in desired logging framework at deployment time + org.tcrawley/dynapath {:mvn/version "1.1.0"} ; Dynamically add Jars (e.g. Oracle or Vertica) to classpath + org.threeten/threeten-extra {:mvn/version "1.7.0"} ; extra Java 8 java.time classes like DayOfMonth and Quarter + org.yaml/snakeyaml {:mvn/version "1.29"} ; YAML parser (required by liquibase) + potemkin/potemkin {:mvn/version "0.4.5" ; utility macros & fns + :exclusions [riddley/riddley]} + pretty/pretty {:mvn/version "1.0.5"} ; protocol for defining how custom types should be pretty printed + prismatic/schema {:mvn/version "1.1.12"} ; Data schema declaration and validation library + redux/redux {:mvn/version "0.1.4"} ; Utility functions for building and composing transducers + riddley/riddley {:mvn/version "0.2.0"} ; code walking lib -- used interally by Potemkin, manifold, etc. + ring/ring-core {:mvn/version "1.9.4"} ; web server (Jetty wrapper) + ring/ring-jetty-adapter {:mvn/version "1.9.4"} ; Ring adapter using Jetty webserver + ring/ring-json {:mvn/version "0.5.1"} ; Ring middleware for reading/writing JSON automatically + robdaemon/clojure.java-time {:mvn/version "0.3.3-SNAPSHOT"} ; Java 8 java.time wrapper. Fork to address #13102 - see upstream PR: https://github.com/dm3/clojure.java-time/pull/60 + slingshot/slingshot {:mvn/version "0.12.2"} ; enhanced throw/catch, used by other deps + stencil/stencil {:mvn/version "0.5.0"} ; Mustache templates for Clojure + toucan/toucan {:mvn/version "1.15.4" ; Model layer, hydration, and DB utilities + :exclusions [honeysql/honeysql + org.clojure/java.jdbc + org.clojure/tools.logging + org.clojure/tools.namespace]} + user-agent/user-agent {:mvn/version "0.1.0"} ; User-Agent string parser, for Login History page & elsewhere + weavejester/dependency {:mvn/version "0.2.1"} ; Dependency graphs and topological sorting + + ;; dummy dependency for the Java source file(s) + metabase/java-deps {:local/root "java"}} + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + ;; !! PLEASE KEEP NEW DEPENDENCIES ABOVE ALPHABETICALLY ORGANIZED AND ADD COMMENTS EXPLAINING THEM. !! + ;; !! *PLEASE DO NOT* ADD NEW ONES TO THE BOTTOM OF THE LIST. !! + ;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + + :paths + ["src" "shared/src" "resources" "java/target/classes"] + + :aliases + { +;;; Local Dev & test profiles + + ;; for local development: start a REPL with + ;; + ;; clojure -A:dev (basic dev REPL that includes test namespaces) + ;; clojure -A:dev:drivers:drivers-dev (dev REPL w/ drivers + tests) + ;; clojure -A:dev:ee:ee-dev (dev REPL w/ EE code including tests) + ;; + ;; You can start a web server from this REPL with + ;; + ;; (require 'dev) + ;; (dev/start!) + :dev + {:extra-deps + {clj-http-fake/clj-http-fake {:mvn/version "1.0.3" + :exclusions [slingshot/slingshot]} + cloverage/cloverage {:mvn/version "1.2.2"} + eftest/eftest {:mvn/version "0.5.9"} + jonase/eastwood {:mvn/version "0.9.6"} + lein-check-namespace-decls/lein-check-namespace-decls {:mvn/version "1.0.4"} ; misnomer since this works on Clojure CLI now too + pjstadig/humane-test-output {:mvn/version "0.11.0"} + reifyhealth/specmonstah {:mvn/version "2.0.0"} + ring/ring-mock {:mvn/version "0.4.0"} + talltale/talltale {:mvn/version "0.5.4"}} + + :extra-paths ["dev/src" "local/src" "test" "shared/test" "test_resources"] + :jvm-opts ["-Dmb.run.mode=dev" + "-Dmb.field.filter.operators.enabled=true" + "-Dmb.test.env.setting=ABCDEFG" + "-Duser.timezone=UTC" + "-Dfile.encoding=UTF-8" + "-Duser.language=en" + ;; Exceptions that get thrown repeatedly are created without stacktraces as a performance + ;; optimization in newer Java versions. This makes debugging pretty hard when working on stuff + ;; locally -- prefer debuggability over performance for local dev work. + "-XX:-OmitStackTraceInFastThrow" + ;; prevent Java icon from randomly popping up in macOS dock + "-Djava.awt.headless=true"]} + + ;; includes test code as source paths. Run tests with clojure -X:dev:test + :test + {:extra-paths ["test_config"] + :exec-fn metabase.test-runner/run-tests + :jvm-opts ["-Dmb.run.mode=test" + "-Dmb.db.in.memory=true" + "-Dmb.jetty.join=false" + "-Dmb.field.filter.operators.enabled=true" + "-Dmb.api.key=test-api-key" + ;; Different port from normal `:dev` so you can run tests on a different server. + ;; TODO -- figure out how to do a random port like in the old project.clj? + "-Dmb.jetty.port=3001"]} + + ;; run the dev server with + ;; clojure -M:run + ;; clojure -M:run:drivers (include all drivers) + ;; clojure -M:run:ee (include EE code) + :run + {:main-opts ["-m" "metabase.core"] + :jvm-opts ["-Dmb.run.mode=dev" + "-Djava.awt.headless=true" ; prevent Java icon from randomly popping up in macOS dock + "-Dmb.jetty.port=3000"]} + + ;; alias for CI-specific options. + :ci + {:jvm-opts ["-Xmx2g" + ;; normally CircleCI sets `CI` as an env var, so this is mostly to replicate that locally. + "-Dci=TRUE"]} + + ;; include EE source code. + :ee + {:extra-paths ["enterprise/backend/src"]} + + ;; Include EE tests. + ;; for ee dev: :dev:ee:ee-dev + ;; for ee tests: clojure -X:dev:ee:ee-dev:test + :ee-dev + {:extra-paths ["enterprise/backend/test"]} + + ;; these aliases exist for symmetry with the ee aliases. Empty for now. + :oss + {} + + :oss-dev + {} + + ;; for local dev -- include the drivers locally with :dev:drivers + :drivers + {:extra-deps + {metabase/driver-modules {:local/root "modules/drivers"}}} + + ;; for local dev: include drivers as well as their tests. + ;; + ;; clojure -X:dev:drivers:drivers-dev:test + ;; + ;; or + ;; + ;; clojure -X:dev:ee:ee-dev:drivers:drivers-dev:test (for EE) + :drivers-dev + {:extra-paths + ["modules/drivers/bigquery/test" + "modules/drivers/bigquery-cloud-sdk/test" + "modules/drivers/druid/test" + "modules/drivers/google/test" + "modules/drivers/googleanalytics/test" + "modules/drivers/mongo/test" + "modules/drivers/oracle/test" + "modules/drivers/presto/test" + "modules/drivers/presto-common/test" + "modules/drivers/presto-jdbc/test" + "modules/drivers/redshift/test" + "modules/drivers/snowflake/test" + "modules/drivers/sparksql/test" + "modules/drivers/sqlite/test" + "modules/drivers/sqlserver/test" + "modules/drivers/vertica/test"]} + +;;; Linters + + ;; clojure -X:dev:ee:ee-dev:drivers:drivers-dev:namespace-checker + :namespace-checker + {:exec-fn metabase.linters.namespace-checker/check-namespace-decls + :exec-args {:prefix-rewriting false}} + + ;; clojure -M:dev:ee:ee-dev:drivers:drivers-dev:check + :check + {:extra-deps {athos/clj-check {:git/url "https://github.com/athos/clj-check.git" + :sha "518d5a1cbfcd7c952f548e6dbfcb9a4a5faf9062"}} + :main-opts ["-m" "clj-check.check"]} + + ;; clojure -X:dev:ee:ee-dev:drivers:drivers-dev:eastwood + :eastwood + {:exec-fn metabase.linters.eastwood/eastwood + :exec-args {;; manually specify the source paths for the time being (exclude test paths) until we fix Eastwood + ;; errors in the test paths (once PR #17193 is merged) + :source-paths ["src" + "shared/src" + "enterprise/backend/src" + "modules/drivers/bigquery/src" + "modules/drivers/bigquery-cloud-sdk/src" + "modules/drivers/druid/src" + "modules/drivers/google/src" + "modules/drivers/googleanalytics/src" + "modules/drivers/mongo/src" + "modules/drivers/oracle/src" + "modules/drivers/presto/src" + "modules/drivers/presto-common/src" + "modules/drivers/presto-jdbc/src" + "modules/drivers/redshift/src" + "modules/drivers/snowflake/src" + "modules/drivers/sparksql/src" + "modules/drivers/sqlite/src" + "modules/drivers/sqlserver/src" + "modules/drivers/vertica/src"] + :add-linters [:unused-private-vars + ;; These linters are pretty useful but give a few false + ;; positives and can't be selectively disabled (yet) + ;; + ;; For example see https://github.com/jonase/eastwood/issues/193 + ;; + ;; It's still useful to re-enable them and run them every once + ;; in a while because they catch a lot of actual errors too. + ;; Keep an eye on the issue above and re-enable them if we can + ;; get them to work + #_:unused-fn-args + #_:unused-locals] + :exclude-linters [ ;; Turn this off temporarily until we finish removing + ;; self-deprecated functions & macros + :deprecations + ;; this has a fit in libs that use Potemkin `import-vars` such + ;; as `java-time` + :implicit-dependencies + ;; too many false positives for now + :unused-ret-vals]}} + + ;; clojure -T:whitespace-linter + :whitespace-linter + {:deps {com.github.camsaul/whitespace-linter {:sha "b25716745f5875194bc38364e498d0ddda51f4b0"}} + :ns-default whitespace-linter + :exec-args {:paths ["./.dir-locals.el" + "./deps.edn" + "./package.json" + "./shadow-cljs.edn" + ".circleci" + ".github" + "bin" + "enterprise" + "frontend" + "resources" + "shared" + "src" + "test"] + :include-patterns ["\\.clj.?$" + "\\.edn$" + "\\.el$" + "\\.html$" + "\\.json$" + "\\.jsx?$" + "\\.sh$" + "\\.yaml$" + "\\.yml$"] + :exclude-patterns ["resources/i18n/.*\\.edn$" + "resources/frontend_client" + "resources/frontend_shared" + "resources/html-entities.edn" + "frontend/src/cljs" + "frontend/test/metabase/lib/urls\\.unit\\.spec\\.js$" + "frontend/test/metabase/lib/formatting\\.unit\\.spec\\.js$" + "shared/src/metabase/shared/util/currency\\.cljc$"]}} + + ;; clojure -X:dev:ee:ee-dev:test:cloverage + :cloverage + {:exec-fn metabase.cloverage-runner/run-project + :exec-args {:fail-threshold 69 + :codecov? true + ;; don't instrument logging forms, since they won't get executed as part of tests anyway + ;; log calls expand to these + :exclude-call + [clojure.tools.logging/logf + clojure.tools.logging/logp] + + :src-ns-path + ["src" "enterprise/backend/src" "shared/src"] + + :test-ns-path + ["test" "enterprise/backend/test" "shared/test"] + + :ns-regex + ["^metabase.*" "^metabase-enterprise.*"] + + ;; don't instrument Postgres/MySQL driver namespaces, because we don't current run tests for them + ;; as part of recording test coverage, which means they can give us false positives. + ;; + ;; regex literals aren't allowed in EDN. We parse them in `./test/cloverage.clj` + :ns-exclude-regex + ["metabase\\.driver\\.mysql" "metabase\\.driver\\.postgres"]} + ;; different port from `:test` so you can run it at the same time as `:test`. + :jvm-opts ["-Dmb.jetty.port=3002"]} + +;;; building Uberjar + + ;; clojure -T:build uberjar + ;; clojure -T:build uberjar :edition :ee + :build + {:deps {io.github.clojure/tools.build {:git/tag "v0.1.6", :git/sha "5636e61"} + com.github.seancorfield/depstar {:mvn/version "2.1.278"} + metabase/build.common {:local/root "bin/common"} + metabase/buid-mb {:local/root "bin/build-mb"}} + :ns-default build} + +;;; Other misc convenience aliases + + ;; Profile Metabase start time with clojure -M:profile + :profile + {:main-opts ["-m" "metabase.core" "profile"] + :jvm-opts ["-XX:+CITime" ; print time spent in JIT compiler + "-XX:+PrintGC"]} + + ;; get the H2 shell with clojure -M:h2 + :h2 + {:main-opts ["-m" "org.h2.tools.Shell"]} + + ;; clojure -M:generate-automagic-dashboards-pot + :generate-automagic-dashboards-pot + {:main-opts ["-m" "metabase.automagic-dashboards.rules"]} + + ;; Start a Network REPL (nrepl) that you can connect your editor to. + ;; + ;; clojure -M:dev:nrepl (etc.) + :nrepl + {:extra-deps {nrepl/nrepl {:mvn/version "0.8.3"}} + :main-opts ["-m" "nrepl.cmdline"]} + + ;; TODO -- consider creating an alias that includes the `./bin` build-drivers & release code as well so we can work + ;; on them all from a single REPL process. + }} diff --git a/dev/src/dev/readme.md b/dev/src/dev/readme.md new file mode 100644 index 000000000000..ba90a1408e6d --- /dev/null +++ b/dev/src/dev/readme.md @@ -0,0 +1,54 @@ +## Render png + +Has some helper functions to help out with rendering debugging. + +In this namespace, you can run `(help)` to get a bit of help. It's principle usage right now is to render and debug the svg images. + +### NOTE! + +You must build the js bundle used to create the svgs with visx. Run `yarn build-static-viz` to ensure that this bundle is created and the file `resources/frontend_client/app/dist/lib-static-viz.bundle.js` exists + +Example usage below: + +```clojure +dev=> (require 'dev.render-png) +nil +dev=> (in-ns 'dev.render-png) +#object[clojure.lang.Namespace 0x14fef810 "dev.render-png"] +dev.render-png=> (help) + +To render some html, call the function `preview-html`. This takes one argument, a map. +The keys in the map are `:chart` and either `:html-file` or `:html-inline`. +(preview-html {:chart :donut :html-inline some-html-to-render}) +or +(preview-html {:chart :donut :html-file some-file-with-html}) + +This function will render the html and open an image. +Valid charts are `:donut`, `:line`, and `:bar`. + +You can use {{chart}} in your html to indicate where the image of the chart should be embedded. +It will be + +nil +dev.render-png=> (preview-html {:chart :donut :html-file "chart.html"}) +nil +dev.render-png=> +``` + +The steps were +1. require the namespace, so the code is loaded +2. `in-ns` to go "in" the namespace so we can easily call the functions +3. call the function we care about. It will open up an image preview. + +An example chart.html is + +```html +
+

behold the donut

+ {{chart}} +
+

the donut has been beholden

+
+``` + +This file should be saved at the root of the repository for the call to `preview-html` to find it. diff --git a/dev/src/dev/render_png.clj b/dev/src/dev/render_png.clj index 5a8142ca6b96..fe291a0bf3b3 100644 --- a/dev/src/dev/render_png.clj +++ b/dev/src/dev/render_png.clj @@ -1,14 +1,20 @@ (ns dev.render-png - "Improve feedback loop for dealing with png rendering code" + "Improve feedback loop for dealing with png rendering code. Will create images using the rendering that underpins + pulses and subscriptions and open those images without needing to send them to slack or email." (:require [clojure.java.io :as io] [clojure.java.shell :as sh] + [clojure.string :as str] + [hiccup.core :as h] [metabase.models.card :as card] [metabase.models.user :as user] [metabase.pulse :as pulse] [metabase.pulse.render :as pulse-render] + [metabase.pulse.render.js-svg :as js-svg] + [metabase.pulse.render.png :as png] [metabase.query-processor :as qp] [metabase.query-processor.middleware.permissions :as qp.perms] - [toucan.db :as tdb])) + [toucan.db :as tdb]) + (:import org.fit.cssbox.misc.Base64Coder)) ;; taken from https://github.com/aysylu/loom/blob/master/src/loom/io.clj (defn- os @@ -46,15 +52,102 @@ user (tdb/select-one user/User) query-results (binding [qp.perms/*card-id* nil] (qp/process-query-and-save-execution! - (assoc dataset_query :async? false) + (-> dataset_query + (assoc :async? false) + (assoc-in [:middleware :process-viz-settings?] true)) {:executed-by (:id user) :context :pulse :card-id card-id})) png-bytes (pulse-render/render-pulse-card-to-png (pulse/defaulted-timezone card) card - query-results) + query-results + 1000) tmp-file (java.io.File/createTempFile "card-png" ".png")] (with-open [w (java.io.FileOutputStream. tmp-file)] (.write w ^bytes png-bytes)) (.deleteOnExit tmp-file) (open tmp-file))) + +(defn open-png-bytes [bytes] + (let [tmp-file (java.io.File/createTempFile "card-png" ".png")] + (with-open [w (java.io.FileOutputStream. tmp-file)] + (.write w ^bytes bytes)) + (.deleteOnExit tmp-file) + (open tmp-file))) + +(defn render-img-data-uri + "Takes a PNG byte array and returns a Base64 encoded URI" + [img-bytes] + (str "data:image/png;base64," (String. (Base64Coder/encode img-bytes)))) + +(defn svg-image [kind] + (let [line|bar-data [["2015-02-01T00:00:00-08:00" 443] + ["2015-03-01T00:00:00-08:00" 875] + ["2015-04-01T00:00:00-07:00" 483] + ["2015-05-01T00:00:00-07:00" 421]] + donut-data [["alpha" 32] + ["beta" 49] + ["gamma" 23] + ["delta" 67]] + donut-colors {"alpha" "red" + "beta" "green" + "gamma" "blue" + "delta" "yellow"}] + (case kind + :line (js-svg/timelineseries-line line|bar-data) + :bar (js-svg/timelineseries-bar line|bar-data) + :donut (js-svg/categorical-donut donut-data donut-colors) + (throw (ex-info (str "Invalid chart type: " kind "\n Valid choices are :line, :bar, :donut") + {}))))) + +(defn preview-html + "Chart type is one of :line, :bar, :donut. Html is a string with a placeholder {{chart}} which will be replaced with + the [:img {:src chart-placeholder}] and the resulting html will be opened." + [{:keys [chart html-file html-inline]}] + (let [chart-image (render-img-data-uri (svg-image chart)) + chart-html (h/html [:img {:src chart-image :style "display: block; width: 100%"}]) + html (cond html-file + (slurp html-file) + html-inline + (str "" + html-inline + "")) + html (h/html (str/replace html #"\{\{chart\}\}" chart-html))] + (with-open [os (java.io.ByteArrayOutputStream.)] + (let [image-bytes (do (#'png/render-to-png! html os 1000) + (.toByteArray os))] + (open-png-bytes image-bytes))))) + +(defn help [] + (println + " +To render some html, call the function `preview-html`. This takes one argument, a map. +The keys in the map are `:chart` and either `:html-file` or `:html-inline`. +(preview-html {:chart :donut :html-inline some-html-to-render}) +or +(preview-html {:chart :donut :html-file some-file-with-html}) + +This function will render the html and open an image. +Valid charts are `:donut`, `:line`, and `:bar`. + +You can use {{chart}} in your html to indicate where the image of the chart should be embedded. +It will be + +For instance +(preview-html {:chart :donut + :html-inline \"

behold the donut

{{chart}}
\"}) +")) + +(comment + (preview-html {:chart :donut :html-inline " +
+ {{chart}} + + + + + + + +
500600
MarchApril
+
"})) diff --git a/docs/README.md b/docs/README.md index 4a7bb2a30360..9908dd93cf03 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,37 +1,103 @@ -## Getting Help +# Metabase documentation and resources -#### [FAQs](faq/start.md) +## Tutorials + +### [Learn Metabase][learn] + +Learn how to set Metabase up, build out your analytics, organize things and set permissions, and how to do data ops right. + +## Getting help + +### [Troubleshooting Guide][troubleshooting] + +Problems, their causes, how to detect them, and how to fix them. + +### [Metabase forum][forum] + +A place to get help on installation, setting up as well as sharing tips and tricks. + +### [FAQs][faq] Frequently asked questions about Metabase. -#### [Troubleshooting Guide](troubleshooting-guide/index.md) -Have a problem and need help? Start with our troubleshooting guide. +## Metabase reference guides + +Documentation guides for the Metabase application. + +### [Users Guide][users-guide] + +How to ask questions, how to visualize answers, as well as how to share questions and create dashboards. + +### [Admin Guide][admin-guide] + +How to set up Metabase, configure common settings, manage accounts and permissions, and add databases. + +### [Operations Guide][operations-guide] + +Learn how to install Metabase for production use. The guide covers topics like SSL termination, deploying via Docker Containers vs. JAR files, as well as the tradeoffs involved. -## In-depth Guides +### [Enterprise Guide][enterprise] -#### [Users Guide](users-guide/start.md) -This is the go-to guide on asking questions and sharing answers using Metabase. You'll learn in depth about how questions are expressed, how to chart answers, as well as how to share questions and create dashboards. +Here’s where to go for help using the features included in [Metabase Enterprise Edition][enterprise-landing]. -#### [Admin Guide](administration-guide/start.md) -This guide is for advanced users and those who will be setting up and maintaining a Metabase installation. You'll learn how to set the instance up, how to configure common settings, how to manage user accounts, and how to add databases. +## Metabase for developers -#### [Operations Guide](operations-guide/start.md) -This guide contains more detailed information about how to install Metabase for production use. It covers topics like SSL termination, deploying via Docker Containers vs. Jars, as well as the tradeoffs involved. +### [Developers Guide][developers] -## Enterprise Edition +Learn how to contribute to the Metabase open source project. -#### [Enterprise Features Guide](./enterprise-guide/start.md) -Here’s where to go for help using the features included in the Enterprise Edition of Metabase. +### [Driver Development][drivers] -## For Developers +This guide lists existing community drivers, and shows how to get started with driver development. -#### [Developers Guide](developers-guide.md) -This guide covers how to contribute back to the Metabase open source project. It includes setting up a development environment, running tests, and the contribution and product process Metabase follows. +### [Embedding reference apps][embedding-ref-apps] -#### [Driver Development](developers-guide-drivers.md) -Want to write your own database driver? This guide lists existing community drivers and shows how to get started with driver development. +Code examples for embedding Metabase in applications. + +## Metabase community + +Connect with others using Metabase and catch up on the latest news. + +### [Metabase forum][forum] + +A place to get help on installation, setting up as well as sharing tips and tricks. + +### [Data stories][data-stories] + +Real stories about teams working and learning with data. You can also share your own stories. + +### [Case studies][case-studies] + +See how other organizations, big and small, have leveled up using Metabase. + +### [Blog][blog] + +Stay up to date on the latest from Metabase. + +### [Source code repository on GitHub][source-code] + +Metabase is open source: come on over and check out the code. ## Reference -#### [Anonymous Information Collection Reference](information-collection.md) -This describes the anonymous usage information we collect (if you opt-in) as well as why we collect it and the ways we use it. +### [Anonymous Information Collection Reference][info-collection] + +This page describes the anonymous usage information we collect (only if you opt-in), why we collect it, and how we use it to improve Metabase. + +[admin-guide]: administration-guide/start.md +[blog]: /blog +[case-studies]: https://www.metabase.com/case_studies/ +[embedding-ref-apps]: https://github.com/metabase/embedding-reference-apps +[enterprise]: enterprise-guide/start.md +[enterprise-landing]: /enterprise +[data-stories]: /community +[developers]: developers-guide/start.md +[drivers]: developers-guide-drivers.md +[faq]: faq/start.md +[forum]: https://discourse.metabase.com/ +[info-collection]: information-collection.md +[learn]: /learn +[operations-guide]: operations-guide/start.md +[source-code]: https://github.com/metabase/metabase +[troubleshooting]: troubleshooting-guide/index.md +[users-guide]: users-guide/start.md diff --git a/docs/administration-guide/03-metadata-editing.md b/docs/administration-guide/03-metadata-editing.md index 8095285e7eb2..7ef43c7b2190 100644 --- a/docs/administration-guide/03-metadata-editing.md +++ b/docs/administration-guide/03-metadata-editing.md @@ -110,17 +110,17 @@ You can also designate a column as the table's **primary key** or **foreign key* - Category - Comment - Description - - Common - - Number - Title - - Common - **Location** - City - Country + - Latitude - Longitude - State - Zip Code - **Financial** + - Cost + - Currency - Discount - Gross margin - Income @@ -143,7 +143,12 @@ You can also designate a column as the table's **primary key** or **foreign key* - Creation date - Creation time - Creation timestamp + - Deletion date + - Deletion time - Deletion timestamp + - Updated date + - Updated time + - Updated timestamp - Join date - Join time - Join timestamp @@ -159,6 +164,7 @@ You can also designate a column as the table's **primary key** or **foreign key* - URL - **Other** - Field containing JSON + - No semantic type ### Casting to a specific data type diff --git a/docs/administration-guide/04-managing-users.md b/docs/administration-guide/04-managing-users.md index 65f001ad1347..b727a604d164 100644 --- a/docs/administration-guide/04-managing-users.md +++ b/docs/administration-guide/04-managing-users.md @@ -36,6 +36,12 @@ Right now, the only special role someone can have is Admin. The only difference To make someone an admin, click on the Groups dropdown and click the check mark next to the Administrators group. +### Unsubscribe from all subscriptions / alerts + +This action will delete any dashboard subscriptions or alerts the person has created, and remove them as a recipient from any other subscriptions or alerts. + +This action doesn't affect email distribution lists that are managed outside of Metabase. + ### Adding people to Groups Adding people to groups allows you to assign [data access](05-setting-permissions.md) and [collection permissions](06-collections.md) to them. To add someone to one or more groups, just click the Groups dropdown and click the checkboxes next to the group(s) you want to add the person to. diff --git a/docs/administration-guide/05-setting-permissions.md b/docs/administration-guide/05-setting-permissions.md index 0951d2e1ebef..d4dccd99f505 100644 --- a/docs/administration-guide/05-setting-permissions.md +++ b/docs/administration-guide/05-setting-permissions.md @@ -1,89 +1,80 @@ -## Setting Data Access Permissions +# Permissions overview There are always going to be sensitive bits of information in your databases and tables, and thankfully Metabase provides a simple way to ensure that people on your team only see the data they’re supposed to. -### How permissions work in Metabase +## How permissions work in Metabase -Metabase uses a group-based approach to set permissions and restrictions on your databases and tables. At a high level, to set up permissions in your Metabase instance you’ll need to create one or more groups, add members to those groups, and then choose what level of database and SQL access those groups should have. +Metabase uses a group-based approach to set permissions. At a high-level, you can set permissions on two things: data and collections. **Data permissions** are about defining what raw data groups are allowed to use when creating new questions (i.e., self-service analytics). **Collection permissions** determine what existing dashboards and questions groups can see. On some plans, you can also sandbox data, which "filters" what data people can see when they view a particular questions, such as limiting the rows or columns they can see. -A user can be a member of multiple groups, and if one of the groups they’re in has access to a particular database or table, but another group they’re a member of does not, then they **will** have access to that database. +You can set permissions on: -In addition to setting permissions on your databases and tables, you can also [set access permissions on the collections](06-collections.md) where your dashboards, questions, and pulses are saved. Collection permissions can be set and edited from the collection itself, or the Admin Panel. +- [Databases connected to Metabase][data-permissions] +- [Tables and schemas in those databases][table-permissions] +- [Rows and columns of a table][data-sandboxing] (only on some plans) +- [Collections of questions and dashboards][collections] -### Groups +For plans that include [SQL Snippet Folders][sql-snippet-folders], you can also set permissions on those folders. -To view and manage your groups, go to the Admin Panel, click on the People section, and then click on Groups from the side menu. +To determine who has access to what, you’ll need to create one or more groups, choose which level of access that group has to different databases, collections, and so on, then add people to that group. -![Groups](images/groups.png) - -#### Special default groups - -You’ll notice that you already have two default groups: Administrators and All Users. These are special groups that can’t be removed. +### Key points regarding permissions -You’ll also see that you’re a member of the **Administrators** group — that’s why you were able to go to the Admin Panel in the first place. So, to make someone an admin of Metabase you just need to add them to this group. Metabase admins can log into the Admin Panel and make changes there, and they always have unrestricted access to all data that you have in your Metabase instance. So be careful who you add to the Administrator group! - -The **All Users** group is another special one. Every Metabase user is always a member of this group, though they can also be a member of as many other groups as you want. We recommend using the All Users group as a way to set default access levels for new Metabase users. If you have [Google single sign-on](10-single-sign-on.md) enabled, new users who join that way will be automatically added to the All Users group. +Some key things to keep in mind when thinking about permissions in Metabase: -#### An important note on the All Users group +- Permissions are granted to groups, not people. +- People can be in more than one group. +- If a person is in multiple groups, they will have the most permissive access granted to them across all of their groups. For example, if they are part of three groups, and two of those groups don't have permissions to a database, but the third group they're in can query that database, then that person will have access to that database. -As we mentioned above, a user is given the _most permissive_ setting she has for a given database/schema/table across _all_ groups she is in. Because of that, it is important that your All Users group should never have _greater_ access for an item than a group for which you're trying to restrict access — otherwise the more permissive setting will win out. This goes for both data access as well as [collection permission](06-collections.md) settings. +## Groups -If you’ve set up the [Slack integration](09-setting-up-slack.md) and enabled [Metabot](../users-guide/11-metabot.md), you’ll also see a special **Metabot** group, which will allow you to restrict which questions your users will be able to access in Slack via Metabot. - -#### Managing groups - -From the Groups section, click the `Add a group` button to create a new group. We recommend creating groups that correspond to the teams your company or organization has, such as Human Resources, Engineering, Finance, etc. Click the X icon to the right of a group in the list to remove it (remember, you can’t remove the special default groups). By default, newly created groups don’t have access to anything. - -Click into a group and then click `Add members` to add users to that group. Click on the X on the right side of a group member to remove them from that group. You can also add and remove users from groups from the People list using the dropdown in the Groups column. +To view and manage your groups, go to the __Admin Panel__ > __People__, and then click on __Groups__ from the side menu. -### Permissions view - -Now that you have some groups, you’ll want to control their data access by going to the `Permissions` section of the Admin Panel. You’ll see an interactive table that displays all of your databases and all of your groups, and the level of access your groups have for each database. - -![Permissions view](images/permissions.png) - -You can click on any cell in the table to change a group’s access level. When you’re done making your changes, just click the `save changes` button in the top-right, and you’ll see a confirmation dialog summarizing the changes. - -![Changing access level](images/change-access.png) +![Groups](images/groups.png) -At the database level, there are two different kinds of access you can set: data access, and SQL (or native query) access. +### Special default groups -#### Data access +Every Metabase has two default groups: Administrators and All Users. These are special groups that can’t be removed. -- **Unrestricted access:** can access data from all tables (within all namespaces/schemas, if your database uses those), including any tables that might get added to this database in the future. -- **Limited access:** can only access the tables that you explicitly select within namespaces/schemas you explicitly select. If a new table gets added to this database in the future, access to it will not automatically be given. Saved questions based on tables the user doesn’t have access to won’t show up in the list of saved questions, dashboard cards based on those questions won’t appear, and they won’t be able to ask new questions about those tables. If every card on a dashboard is hidden for a user, then that dashboard won’t be shown to them in the dashboard list. -- **No access:** can’t see anything based on data contained in this database. Won’t see saved questions based on tables contained in this database, and won’t be able to ask new questions about those tables. +#### Administrators -#### SQL (or native query) access +You’re a member of the **Administrators** group — that’s why you were able to go to the Admin Panel in the first place. To make someone an admin of Metabase, you just need to add them to this group. Metabase admins can log into the Admin Panel and make changes there, and they always have unrestricted access to all data that you have in your Metabase instance. So be careful who you add to the Administrator group! -- **Write raw queries:** can write new SQL/native queries using the SQL editor. This access level requires the group to additionally have Unrestricted data access for the database in question, since SQL queries can circumvent table-level permissions. -- **No access**: can't view, write, or edit SQL/native queries. Users will still be able to view the results of questions created from SQL/native queries, but not the code itself. They also won't see the "View the SQL" button when composing custom questions in the notebook editor. +#### All users -If you select `Limit access` for one of your databases, your view will change to show the contents of that database. If the database utilizes namespaces or schemas, you’ll see a list of all the schemas in the database, and the level of data access each group has for them. Similarly, if you select `Limit access` on one of your schemas, you’ll drill down a level and see all the tables within it. From these views, you can navigate back by using the breadcrumb links in the top-left, and you can always drill down into a database or schema using the link under its name in the left column. +The **All Users** group is another special one. Every Metabase user is always a member of this group, though they can also be a member of as many other groups as you want. We recommend using the All Users group as a way to set default access levels for new Metabase users. If you have [Google single sign-on](10-single-sign-on.md) enabled, new users who join that way will be automatically added to the All Users group. -![Table permissions](images/table-permissions.png) +As we mentioned above, a person is given the _most permissive_ setting she has for a given database/schema/table across _all_ groups she's in. Because of that, it's important that your All Users group should never have _greater_ access for an item than a group for which you're trying to restrict access — otherwise the more permissive setting will win out. This goes for both data access as well as [collection permission](06-collections.md) settings. -Data access levels for schemas follows the same pattern as for databases: +### Managing groups -- **Unrestricted access:** can access all tables in this schema, including any tables that might get added in the future. -- **Limited access:** can only access the tables that you explicitly select. -- **No access:** can’t access any tables in this schema. +#### Creating a group and adding people to it -Lastly, data access levels for tables are almost exactly the same as well: +From the Admin > Groups tab, click the **Add a group** button to create a new group. We recommend creating groups that correspond to the teams your company or organization has, such as Human Resources, Engineering, Finance, and so on. By default, newly created groups don’t have access to anything. -- **Unrestricted access:** can ask questions about this table and see saved questions and dashboard cards using this table. -- **No access:** can’t ask questions about this table or see saved questions or dashboard cards using this table. +Click into a group and then click `Add members` to add users to that group. Click on the X on the right side of a group member to remove them from that group. You can also add and remove users from groups from the People list using the dropdown in the Groups column. -_Note: you’ll notice that tables don’t have the option for limited access. If you need to set column-level or row-level data permissions, check out the [data sandboxing](https://www.metabase.com/docs/latest/enterprise-guide/data-sandboxes.html) feature of the [Enterprise Edition](https://www.metabase.com/enterprise/)._ +#### Removing a group -For more, check out our [Guide to data permissions](https://www.metabase.com/learn/organization/organization/data-permissions.html). +To remove a group, click the X icon to the right of a group in the list to remove it (remember, you can’t remove the special default groups). -### A note about Pulses +## Further reading -Pulses act a bit differently with regard to permissions. When a user creates a new Pulse, they will only have the option to include saved questions that they have permission to view. Note, however, that they are not prevented from emailing that Pulse to anyone, or posting that Pulse to a Slack channel (if you have Slack integration set up), regardless of the recipients’ permissions. Unlike dashboards, where individual cards are blocked based on a user’s permissions, a Pulse will always render all of its cards. +Checkout our track on [Permissions][permissions] in Learn Metabase. --- -## Next: collections - -Metabase lets you create and set permissions on collections of dashboards and questions. [Learn how](06-collections.md). +## Next: Data permissions + +Metabase lets you [set permissions on databases and their tables][data-permissions]. + +[collections]: 06-collections.md +[dashboard-subscriptions]: ../users-guide/dashboard-subscriptions.md +[data-permissions]: data-permissions.md +[pulses]: ../users-guide/10-pulses.md +[data-sandboxing]: ../enterprise-guide/data-sandboxes.md +[permissions]: /learn/permissions/ +[sandbox-columns]: /learn/permissions/data-sandboxing-column-permissions.html +[sandbox-rows]: /learn/permissions/data-sandboxing-row-permissions.html +[slack-integration]: 09-setting-up-slack.md +[sql-snippet-folders]: ../enterprise-guide/sql-snippets.md +[table-permissions]: data-permissions.md#table-permissions \ No newline at end of file diff --git a/docs/administration-guide/06-collections.md b/docs/administration-guide/06-collections.md index 6ef496f359a3..7f62d12eaf32 100644 --- a/docs/administration-guide/06-collections.md +++ b/docs/administration-guide/06-collections.md @@ -1,14 +1,15 @@ -## Creating Collections for Your Saved Questions +# Collection permissions ![Collection detail](images/collections/collection-detail.png) Collections are a great way to organize your dashboards, saved questions, and pulses, and to decide who gets to see and edit things. Collections could be things like, "Important Metrics," "Product Team," "Marketing KPIs," or "Questions about users." Collections can even contain other collections, allowing you to create an organizational structure that fits your team. You can also choose which user groups should have what level of access to your collections (more on that below). -Metabase starts out with a default top-level collection which is called "Our analytics," which every other collection is saved inside of. +Metabase starts out with a default top-level collection which is called __Our analytics__, which every other collection is saved inside of. This page will teach you how to create and manage your collections. For more information on organizing saved questions and using collections, [check out this section of the User's Guide](../users-guide/06-sharing-answers.md). ### Creating and editing collections + If a user has Curate access for a collection, they can create new sub-collections inside it and edit the contents of the collection. From the detail view of any collection, click on the `Create a collection` button to make a new one. Give your collection a name, choose where it should live, and give it a description if you'd like. ![Create collection](images/collections/create-collection.png) @@ -16,6 +17,7 @@ If a user has Curate access for a collection, they can create new sub-collection By default, new collections will have the same permissions settings as the collection it was created in (its "parent" collection), but you can change those settings from the Edit menu. ### Pinning things in collections + ![Pins](images/collections/pinned-items.png) One great feature in Metabase is that you can pin the most important couple of items in each of your collections to the top. Pinning an item in a collection turns it into a big, eye-catching card that will help make sure that folks who are browsing your Metabase instance will always know what's most important. @@ -23,10 +25,11 @@ One great feature in Metabase is that you can pin the most important couple of i Any user with curate permissions for a collection can pin items in it, making it easy to delegate curation responsibilities to other members of your team. To pin something, you can either click and drag it to the top of the page, or click on its menu and choose the pin action. (Note that collections themselves can't be pinned.) ### Setting permissions for collections + Collection permissions are similar to [data access permissions](05-setting-permissions.md). Rather than going to the Admin Panel, you set permissions on collections by clicking on the lock icon in the top-right of the screen while viewing the collection and clicking on `Edit permissions`. Only Administrators can edit collection permissions. Each [user group](05-setting-permissions.md) can have either View, Curate, or No access to a collection: - **Curate access:** the user can edit, move, and archive items saved in this collection, and can save or move new items into it. They can also create new sub-collections within this collection. In order to archive a sub-collection within this collection, they'll need to have Curate access for it and any and all collections within it. -- **View access:** the user can see all the questions, dashboards, and pulses in the collection. If the user does not have permission to view some or all of the questions included in a given dashboard or pulse then those questions will not be visible to them; but any questions that are saved in this collection *will* be visible to them, *even if the user doesn't have access to the underlying data used to in the question.* +- **View access:** the user can see all the questions, dashboards, and pulses in the collection. If the user does not have permission to view some or all of the questions included in a given dashboard or pulse then those questions will not be visible to them; but any questions that are saved in this collection _will_ be visible to them, _even if the user doesn't have access to the underlying data used to in the question._ - **No access:** the user won't see this collection listed, and doesn't have access to any of the items saved within it. ![Permissions](images/collections/collection-permissions.png) @@ -35,12 +38,13 @@ If you want to see the bigger picture of what permissions your user groups have ![Full permissions grid](images/collections/permission-grid.png) -Just like with data access permissions, collection permissions are *additive*, meaning that if a user belongs to more than one group, if one of their groups has a more restrictive setting for a collection than another one of their groups, they'll be given the *more permissive* setting. This is especially important to remember when dealing with the All Users group: since all users are members of this group, if you give the All Users group Curate access to a collection, then *all* users will be given Curate access for that collection, even if they also belong to a group with *less* access than that. +Just like with data access permissions, collection permissions are _additive_, meaning that if a user belongs to more than one group, if one of their groups has a more restrictive setting for a collection than another one of their groups, they'll be given the _more permissive_ setting. This is especially important to remember when dealing with the All Users group: since all users are members of this group, if you give the All Users group Curate access to a collection, then _all_ users will be given Curate access for that collection, even if they also belong to a group with _less_ access than that. ### Permissions and sub-collections -One nuance with how collections permissions work has to do with sub-collections. A user group can be given access to a collection located somewhere within one or more sub-collections *without* having to have access to every collection "above" it. E.g., if a user group had access to the "Super Secret Collection" that's saved several layers deep within a "Marketing" collection that the group does *not* have access to, the "Super Secret Collection" would show up at the top-most level that the group *does* have access to. -To learn more, check out our Learn article on [working with collection permissions](https://www.metabase.com/learn/organization/organization/collection-permissions.html). +One nuance with how collections permissions work has to do with sub-collections. A user group can be given access to a collection located somewhere within one or more sub-collections _without_ having to have access to every collection "above" it. E.g., if a user group had access to the "Super Secret Collection" that's saved several layers deep within a "Marketing" collection that the group does _not_ have access to, the "Super Secret Collection" would show up at the top-most level that the group _does_ have access to. + +To learn more, check out our Learn article on [working with collection permissions][working-with-collection-permissions]. ### Personal collections @@ -49,13 +53,38 @@ Each user has a personal collection where they're always allowed to save things, A personal collection works just like any other collection except that its permissions can't be changed. If a sub-collection within a personal collection is moved to a different collection, it will inherit the permissions of that collection. ### Archiving collections + Users with curate permission for a collection can archive collections. Click the edit icon in the top-right of the collection screen and select `Archive this collection` to archive it. This will also archive all questions, dashboards, pulses, and all other sub-collections and their contents. Importantly, this will also remove any archived questions from all dashboards and Pulses that use them. **Note:** the "Our analytics" collection and personal collections can't be archived. -You can always *unarchive* things by clicking on the More menu from a collection and selecting `View the archive`, then clicking the un-archive button next to an archived item. Questions within archived collections are not individually listed in the archive, so if you want to unarchive a specific question from an archived collection, you have to unarchive that whole collection. +You can always _unarchive_ items. In the Collections list sidebar, at the bottom, click on __View archive__. Search for the item you'd like to unarchive (you'll either need to scroll down the page, or use the browser's find in page functionality, as archived items won't appear in Metabase's search results). Select the open box with an up arrow icon to "Unarchive this". + +## Dashboard subscriptions + +You don't explicitly set permissions on [dashboards subscriptions][dashboard-subscriptions], as the subscriptions are a feature of a dashboard. And access to dashboards falls under Collection permissions. + +Here's what you can do with dashboard subscriptions based on Collection permissions for the collection the dashboard is in: + +- **Curate access**: You can view and edit all subscriptions for the dashboard, including subscriptions created by other people. +- **View access**: You can view all subscriptions for that dashboard. You can also create subscriptions and edit ones that you’ve created, but you can’t edit ones that other people created. You can also unsubscribe from a subscription that somebody else created. +- **No access**: You can’t view any of the dashboard's subscriptions, including, for example, subscriptions you created before an administrator revoked your access to the collection. + +### Metabot group + +If you’ve set up the [Slack integration][slack-integration] and enabled Metabot, you’ll also see a special Metabot group when assigning permissions to collections, which will allow you to restrict which questions your users will be able to access in Slack via Metabot. + +## A note about Pulses + +If you're using [Pulses][pulses], we recommend switching to [dashboard subscriptions][dashboard-subscriptions]. + +Pulses act a bit differently with regard to permissions. When a user creates a new Pulse, they will only have the option to include saved questions that they have permission to view. Note, however, that they are not prevented from emailing that Pulse to anyone, or posting that Pulse to a Slack channel (if you have Slack integration set up), regardless of the recipients’ permissions. Unlike dashboards, where individual cards are blocked based on a user’s permissions, a Pulse will always render all of its cards. --- ## Next: sharing and embedding with public links + Want to share certain dashboards or questions with the world? You can do that with [public links](12-public-links.md). + + +[working-with-collection-permissisons]: /learn/permissions/collection-permissions.html \ No newline at end of file diff --git a/docs/administration-guide/12-public-links.md b/docs/administration-guide/12-public-links.md index 8ad0c3908606..50aba49bd8ef 100644 --- a/docs/administration-guide/12-public-links.md +++ b/docs/administration-guide/12-public-links.md @@ -10,7 +10,10 @@ First things first, you'll need to go to the Admin Panel and enable public shari ### Enable sharing on your dashboard or saved question ![Enable sharing](images/public-links/enable-links.png) -Next, exit the Admin Panel and go to the dashboard or question that you want to share, then click on the `Sharing and Embedding` icon in the top-right of the screen (it looks like a box with an arrow pointing up). Then click on the toggle to enable public sharing for this dashboard or question. + +Next, exit the Admin Panel and go to question that you want to share, then click on the `Sharing and Embedding` icon in the bottom-right of the screen (it looks like an arrow pointing up and to the right). Then click on the toggle to enable public sharing for this question. + +In the case of a dashboard, the button is located on the top right of the page. ### Copy, paste, and share! diff --git a/docs/administration-guide/data-permissions.md b/docs/administration-guide/data-permissions.md new file mode 100644 index 000000000000..0201791a5d37 --- /dev/null +++ b/docs/administration-guide/data-permissions.md @@ -0,0 +1,92 @@ +# Data permissions + +This page covers permissions for databases and tables. If you haven't already, check out our [Permissions overview][permissons-overview]. + +## Permissions view + +Now that you have some groups, you’ll want to control their data access by going to the **Permissions** section of the Admin Panel. You’ll see an interactive table that displays all of your databases and all of your groups, and the level of access your groups have for each database. + +![Permissions view](images/permissions.png) + +You can click on any cell in the table to change a group’s access level. When you’re done making your changes, just click the `save changes` button in the top-right, and you’ll see a confirmation dialog summarizing the changes. + +### Unrestricted access + +Members of the group can access data from all tables (within all namespaces/schemas, if your database uses those), including any tables that might get added to this database in the future. + +### Granular access + +__Granular access__ allows administrators to explicitly set access to tables or schemas within a database. In practice, this means that: + +- Admins can set the groups access to individual tables to either __Unrestricted__, __No self-service__, or __Sandboxed__ access. +- If a new table gets added to this database in the future, the group won't get access to that new table. An administrator would need to explicitly grant access to that table. + +### No self-service access + +__No self-service__ prevents people in a group from creating new ad hoc queries or questions based on this data, or from seeing this data in the Browse Data screen. Groups with this level of access can still see saved questions and charts based on this data in Collections they have access to. + +### Block + +{% include plans-blockquote.html %} + +__Block__ ensures people can’t ever see the data from this database, regardless of their permissions at the Collection level. So if they want to see a question in a collection that have access to, but that question uses data from a database that's been blocked for that person's group, then they won't be able to see that question. + +Keep in mind people can be in multiple groups. If a person belongs to _another_ group that _does_ have access to that database, that more privileged access will take precedence (overruling the block), and they'll be able to view that question. + +### Native query editing + +Members of a group with native query editing set to Yes can write new SQL/native queries using the native query editor. This access level requires the group to additionally have Unrestricted data access for the database in question, since SQL queries can circumvent table-level permissions. +Members in groups without native query editing access can't view, write, or edit SQL/native queries. People who are not in groups with native query editing permissions will still be able to view the results of questions created from SQL/native queries, but not the code itself. They also won't see the "View the SQL" button when composing custom questions in the notebook editor. + +## Table permissions + +When you select [Granular access](#granular-access) for a database, you'll be prompted to set permissions on the tables (or schemas) within that database. Here you'll have two or three options, depending on your Metabase plan. + +### Unrestricted access to the table + +Groups with unrestricted access can ask questions about this table and see saved questions and dashboard cards that use the table. + +### No self-service access to the table + +Groups with no self-service access to a table can’t access the table at all. They can, however, view questions that use data from that table, provided the group has access to the question's collection. + +### Sandboxed access to the table + +Only available in paid plans, Sandboxed access to a table can restrict access to columns and rows of a table. Check out [data sandboxing][data-sandboxing]. + +## Permissions and dashboard subscriptions + +You don't explicitly set permissions on [dashboards subscriptions][dashboard-subscriptions], as the subscriptions are a feature of a dashboard. Which means that What you can do j + +If a person is in a group that has __Curate access__ to the collection containing the dashboard, they can view and edit all subscriptions for the dashboard, including subscriptions created by other people. +If a group has read-only access to a dashboard (based on its collection permissions), they can view all subscriptions for that dashboard. They can also create subscriptions and edit ones that they’ve created, but they can’t edit ones that other users created. (That last point is enforced by the BE only, the FE still needs to be updated to show the subscriptions as read-only.) +If a group has no access to a dashboard, they can’t view any of its subscriptions, including ones that they may have created in the past, prior to having access revoked. + +If you have read-only access to a dashboard, you can also unsubscribe yourself from a subscription that somebody else created via the new page in account settings. + +## A note about Pulses + +If you're using [Pulses][pulses], we recommend switching to [dashboard subscriptions][dashboard-subscriptions]. + +Pulses act a bit differently with regard to permissions. When someone creates a new Pulse, they will only have the option to include saved questions that they have permission to view. Note, however, that they are not prevented from emailing that Pulse to anyone, or posting that Pulse to a Slack channel (if you have Slack integration set up), regardless of the recipients’ permissions. Unlike dashboards, where individual cards are blocked based on a person's permissions, a Pulse will always render all of its cards. + +## Further reading + +- [Guide to data permissions](https://www.metabase.com/learn/organization/organization/data-permissions.html). +- [Data sandboxing: setting row-level permissions][sandbox-rows] +- [Advanced data sandboxing: limiting access to columns][sandbox-columns] + +--- + +## Next: Collection permissions + +Metabase lets you create and set permissions on collections of dashboards and questions. [Learn how][collections]. + +[collections]: 06-collections.md +[dashboard-subscriptions]: ../users-guide/dashboard-subscriptions.md +[data-sandboxing]: ../enterprise-guide/data-sandboxes.md +[permissions-overview]: 05-setting-permissions.md +[pulses]: ../users-guide/10-pulses.md +[sandbox-columns]: /learn/permissions/data-sandboxing-column-permissions.html +[sandbox-rows]: /learn/permissions/data-sandboxing-row-permissions.html +[sql-snippet-folders]: ../enterprise-guide/sql-snippets.md diff --git a/docs/administration-guide/databases/snowflake.md b/docs/administration-guide/databases/snowflake.md index 8f6417ea2b19..335ae1b6dfb6 100644 --- a/docs/administration-guide/databases/snowflake.md +++ b/docs/administration-guide/databases/snowflake.md @@ -2,7 +2,7 @@ Here are some gotchas to look out for when connecting to Snowflake: -- **Account vs. Region fields**. The `Account` field requires _only_ the alphanumeric account ID. Enter the suffixes indicating region and cloud provider in the `Region ID` field below. For example, if the Snowflake account URL is `https://az12345.ca-central-1.snowflakecomputing.com` then the `Account` would be `az12345` and the `Region ID` would be `ca-central-1.aws`. +- **Account**. The `Account` field requires the alphanumeric account ID _with_ the region that your Snowflake cluster is running on. For example, if you're running Snowflake on AWS and your account URL is `https://az12345.ca-central-1.snowflakecomputing.com`, then the `Account` would be `az12345.ca-central-1.aws` (note the `.aws` suffix). - **The `Role` and `Schema` fields are optional**. Specifying a role will override the database user's default role. For example, if the database user is `REPORTER` with default role `REPORTER`, but the user also has access to role `REPORTERPRODUCT`, then filling in `REPORTERPRODUCT` in the `Role` field will ensure that the `REPORTERPRODUCT` role is used instead of the user's default `REPORTER` role. If no schema is passed, then all schema available to that user and role will be listed as folders in the Metabase UI. diff --git a/docs/administration-guide/images/change-access.png b/docs/administration-guide/images/change-access.png deleted file mode 100644 index 11f766fc7ea9..000000000000 Binary files a/docs/administration-guide/images/change-access.png and /dev/null differ diff --git a/docs/administration-guide/images/permissions.png b/docs/administration-guide/images/permissions.png index 83ad3dcd39d2..344b3fc13764 100644 Binary files a/docs/administration-guide/images/permissions.png and b/docs/administration-guide/images/permissions.png differ diff --git a/docs/administration-guide/images/table-permissions.png b/docs/administration-guide/images/table-permissions.png deleted file mode 100644 index a96d8750217d..000000000000 Binary files a/docs/administration-guide/images/table-permissions.png and /dev/null differ diff --git a/docs/api-documentation.md b/docs/api-documentation.md index 566e2e9b61a5..10225167e417 100644 --- a/docs/api-documentation.md +++ b/docs/api-documentation.md @@ -1,47 +1,73 @@ # API Documentation for Metabase -_This file was generated from source comments by `lein run api-documentation`_. +_This file was generated from source comments by `clojure -M:run api-documentation`_. -Check out an introduction to the [Metabase API](https://www.metabase.com/learn/developing-applications/advanced-metabase/metabase-api.html). +Check out an introduction to the [Metabase API](https://www.metabase.com/learn/administration/metabase-api.html). -## `GET /api/activity/` -Get recent activity. +## Activity + + - [GET /api/activity/](#get-apiactivity) + - [GET /api/activity/recent_views](#get-apiactivityrecent_views) + +### `GET /api/activity/` +Get recent activity. -## `GET /api/activity/recent_views` +### `GET /api/activity/recent_views` Get the list of 10 things the current user has been viewing most recently. -## `DELETE /api/alert/:id` +## Alert + +/api/alert endpoints. + + - [DELETE /api/alert/:id](#delete-apialertid) + - [GET /api/alert/](#get-apialert) + - [GET /api/alert/:id](#get-apialertid) + - [GET /api/alert/question/:id](#get-apialertquestionid) + - [POST /api/alert/](#post-apialert) + - [PUT /api/alert/:id](#put-apialertid) + - [PUT /api/alert/:id/unsubscribe](#put-apialertidunsubscribe) -Delete an Alert. (DEPRECATED -- don't delete a Alert anymore -- archive it instead.) +### `DELETE /api/alert/:id` + +Delete an Alert. (DEPRECATED -- don't delete a Alert anymore -- archive it instead.). ##### PARAMS: * **`id`** +### `GET /api/alert/` -## `GET /api/alert/` - -Fetch all alerts +Fetch all alerts. ##### PARAMS: * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). +* **`user_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `GET /api/alert/question/:id` +### `GET /api/alert/:id` -Fetch all questions for the given question (`Card`) id +Fetch an alert by ID. ##### PARAMS: * **`id`** +### `GET /api/alert/question/:id` + +Fetch all questions for the given question (`Card`) id. -## `POST /api/alert/` +##### PARAMS: + +* **`id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). + +### `POST /api/alert/` Create a new Alert. @@ -59,8 +85,7 @@ Create a new Alert. * **`new-alert-request-body`** - -## `PUT /api/alert/:id` +### `PUT /api/alert/:id` Update a `Alert` with ID. @@ -82,17 +107,28 @@ Update a `Alert` with ID. * **`alert-updates`** +### `PUT /api/alert/:id/unsubscribe` -## `PUT /api/alert/:id/unsubscribe` - -Unsubscribes a user from the given alert +Unsubscribes a user from the given alert. ##### PARAMS: * **`id`** -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query` +## Automagic dashboards + + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-querycomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-queryruleprefixrule) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-querycellcell-queryruleprefixrulecomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-querycomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule](#get-apiautomagic-dashboardsentityentity-id-or-queryruleprefixrule) + - [GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query](#get-apiautomagic-dashboardsentityentity-id-or-queryruleprefixrulecomparecomparison-entitycomparison-entity-id-or-query) + - [GET /api/automagic-dashboards/database/:id/candidates](#get-apiautomagic-dashboardsdatabaseidcandidates) + +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query` Return an automagic dashboard for entity `entity` with id `ìd`. @@ -104,8 +140,7 @@ Return an automagic dashboard for entity `entity` with id `ìd`. * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query` Return an automagic dashboard analyzing cell in automagic dashboard for entity `entity` defined by @@ -121,8 +156,7 @@ Return an automagic dashboard analyzing cell in automagic dashboard for entity * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for cell in automagic dashboard for entity `entity` with id `ìd` defined by query `cell-querry`; compared with entity `comparison-entity` with id @@ -142,8 +176,7 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule` Return an automagic dashboard analyzing cell in question with id `id` defined by query `cell-querry` using rule `rule`. @@ -162,8 +195,7 @@ Return an automagic dashboard analyzing cell in question with id `id` defined b * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for cell in automagic dashboard for entity `entity` with id `ìd` defined by query `cell-querry` using rule `rule`; compared with entity @@ -187,11 +219,10 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for entity `entity` with id `ìd` compared with entity - `comparison-entity` with id `comparison-entity-id-or-query.` + `comparison-entity` with id `comparison-entity-id-or-query.`. ##### PARAMS: @@ -205,8 +236,7 @@ Return an automagic comparison dashboard for entity `entity` with id `ìd` compa * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule` Return an automagic dashboard for entity `entity` with id `ìd` using rule `rule`. @@ -222,8 +252,7 @@ Return an automagic dashboard for entity `entity` with id `ìd` using rule `rule * **`show`** invalid show value - -## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` +### `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` Return an automagic comparison dashboard for entity `entity` with id `ìd` using rule `rule`; compared with entity `comparison-entity` with id `comparison-entity-id-or-query.`. @@ -244,8 +273,7 @@ Return an automagic comparison dashboard for entity `entity` with id `ìd` using * **`comparison-entity-id-or-query`** - -## `GET /api/automagic-dashboards/database/:id/candidates` +### `GET /api/automagic-dashboards/database/:id/candidates` Return a list of candidates for automagic dashboards orderd by interestingness. @@ -254,7 +282,29 @@ Return a list of candidates for automagic dashboards orderd by interestingness. * **`id`** -## `DELETE /api/card/:card-id/favorite` +## Card + +/api/card endpoints. + + - [DELETE /api/card/:card-id/favorite](#delete-apicardcard-idfavorite) + - [DELETE /api/card/:card-id/public_link](#delete-apicardcard-idpublic_link) + - [DELETE /api/card/:id](#delete-apicardid) + - [GET /api/card/](#get-apicard) + - [GET /api/card/:id](#get-apicardid) + - [GET /api/card/:id/related](#get-apicardidrelated) + - [GET /api/card/embeddable](#get-apicardembeddable) + - [GET /api/card/public](#get-apicardpublic) + - [POST /api/card/](#post-apicard) + - [POST /api/card/:card-id/favorite](#post-apicardcard-idfavorite) + - [POST /api/card/:card-id/public_link](#post-apicardcard-idpublic_link) + - [POST /api/card/:card-id/query](#post-apicardcard-idquery) + - [POST /api/card/:card-id/query/:export-format](#post-apicardcard-idqueryexport-format) + - [POST /api/card/collections](#post-apicardcollections) + - [POST /api/card/pivot/:card-id/query](#post-apicardpivotcard-idquery) + - [POST /api/card/related](#post-apicardrelated) + - [PUT /api/card/:id](#put-apicardid) + +### `DELETE /api/card/:card-id/favorite` Unfavorite a Card. @@ -262,8 +312,7 @@ Unfavorite a Card. * **`card-id`** - -## `DELETE /api/card/:card-id/public_link` +### `DELETE /api/card/:card-id/public_link` Delete the publicly-accessible link to this Card. @@ -273,21 +322,19 @@ You must be a superuser to do this. * **`card-id`** +### `DELETE /api/card/:id` -## `DELETE /api/card/:id` - -Delete a Card. (DEPRECATED -- don't delete a Card anymore -- archive it instead.) +Delete a Card. (DEPRECATED -- don't delete a Card anymore -- archive it instead.). ##### PARAMS: * **`id`** - -## `GET /api/card/` +### `GET /api/card/` Get all the Cards. Option filter param `f` can be used to change the set of Cards that are returned; default is `all`, but other options include `mine`, `fav`, `database`, `table`, `recent`, `popular`, and `archived`. See - corresponding implementation functions above for the specific behavior of each filter option. :card_index: + corresponding implementation functions above for the specific behavior of each filter option. :card_index:. ##### PARAMS: @@ -295,8 +342,7 @@ Get all the Cards. Option filter param `f` can be used to change the set of Card * **`model_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `GET /api/card/:id` +### `GET /api/card/:id` Get `Card` with ID. @@ -304,8 +350,7 @@ Get `Card` with ID. * **`id`** - -## `GET /api/card/:id/related` +### `GET /api/card/:id/related` Return related entities. @@ -313,23 +358,20 @@ Return related entities. * **`id`** - -## `GET /api/card/embeddable` +### `GET /api/card/embeddable` Fetch a list of Cards where `enable_embedding` is `true`. The cards can be embedded using the embedding endpoints and a signed JWT. You must be a superuser to do this. - -## `GET /api/card/public` +### `GET /api/card/public` Fetch a list of Cards with public UUIDs. These cards are publicly-accessible *if* public sharing is enabled. You must be a superuser to do this. - -## `POST /api/card/` +### `POST /api/card/` Create a new `Card`. @@ -349,12 +391,13 @@ Create a new `Card`. * **`name`** value must be a non-blank string. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`dataset_query`** * **`display`** value must be a non-blank string. - -## `POST /api/card/:card-id/favorite` +### `POST /api/card/:card-id/favorite` Favorite a Card. @@ -362,8 +405,7 @@ Favorite a Card. * **`card-id`** - -## `POST /api/card/:card-id/public_link` +### `POST /api/card/:card-id/public_link` Generate publicly-accessible links for this Card. Returns UUID to be used in public links. (If this Card has already been shared, it will return the existing public link rather than creating a new one.) Public sharing must @@ -375,8 +417,7 @@ You must be a superuser to do this. * **`card-id`** - -## `POST /api/card/:card-id/query` +### `POST /api/card/:card-id/query` Run the query associated with a Card. @@ -388,11 +429,12 @@ Run the query associated with a Card. * **`ignore_cache`** value may be nil, or if non-nil, value must be a boolean. +* **`dashboard_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `POST /api/card/:card-id/query/:export-format` +### `POST /api/card/:card-id/query/:export-format` Run the query associated with a Card, and return its results as a file in the specified format. Note that this - expects the parameters as serialized JSON in the 'parameters' parameter + expects the parameters as serialized JSON in the 'parameters' parameter. ##### PARAMS: @@ -402,8 +444,7 @@ Run the query associated with a Card, and return its results as a file in the sp * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `POST /api/card/collections` +### `POST /api/card/collections` Bulk update endpoint for Card Collections. Move a set of `Cards` with CARD_IDS into a `Collection` with COLLECTION_ID, or remove them from any Collections by passing a `null` COLLECTION_ID. @@ -414,8 +455,7 @@ Bulk update endpoint for Card Collections. Move a set of `Cards` with CARD_IDS i * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `POST /api/card/pivot/:card-id/query` +### `POST /api/card/pivot/:card-id/query` Run the query associated with a Card. @@ -427,8 +467,7 @@ Run the query associated with a Card. * **`ignore_cache`** value may be nil, or if non-nil, value must be a boolean. - -## `POST /api/card/related` +### `POST /api/card/related` Return related entities for an ad-hoc query. @@ -436,8 +475,7 @@ Return related entities for an ad-hoc query. * **`query`** - -## `PUT /api/card/:id` +### `PUT /api/card/:id` Update a `Card`. @@ -465,6 +503,8 @@ Update a `Card`. * **`embedding_params`** value may be nil, or if non-nil, value must be a valid embedding params map. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`dataset_query`** value may be nil, or if non-nil, value must be a map. * **`id`** @@ -472,7 +512,26 @@ Update a `Card`. * **`display`** value may be nil, or if non-nil, value must be a non-blank string. -## `GET /api/collection/` +## Collection + +`/api/collection` endpoints. By default, these endpoints operate on Collections in the 'default' namespace, which is + the one that has things like Dashboards and Cards. Other namespaces of Collections exist as well, such as the + `:snippet` namespace, (called 'Snippet folders' in the UI). These namespaces are completely independent hierarchies. + To use these endpoints for other Collections namespaces, you can pass the `?namespace=` parameter (e.g. + `?namespace=snippet`). + + - [GET /api/collection/](#get-apicollection) + - [GET /api/collection/:id](#get-apicollectionid) + - [GET /api/collection/:id/items](#get-apicollectioniditems) + - [GET /api/collection/graph](#get-apicollectiongraph) + - [GET /api/collection/root](#get-apicollectionroot) + - [GET /api/collection/root/items](#get-apicollectionrootitems) + - [GET /api/collection/tree](#get-apicollectiontree) + - [POST /api/collection/](#post-apicollection) + - [PUT /api/collection/:id](#put-apicollectionid) + - [PUT /api/collection/graph](#put-apicollectiongraph) + +### `GET /api/collection/` Fetch a list of all Collections that the current user has read permissions for (`:can_write` is returned as an additional property of each Collection so you can tell which of these you have write permissions for.) @@ -486,33 +545,39 @@ Fetch a list of all Collections that the current user has read permissions for ( * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. +### `GET /api/collection/:id` -## `GET /api/collection/:id` - -Fetch a specific Collection with standard details added +Fetch a specific Collection with standard details added. ##### PARAMS: * **`id`** - -## `GET /api/collection/:id/items` +### `GET /api/collection/:id/items` Fetch a specific Collection's items with the following options: - * `model` - only include objects of a specific `model`. If unspecified, returns objects of all models + * `models` - only include objects of a specific set of `models`. If unspecified, returns objects of all models * `archived` - when `true`, return archived objects *instead* of unarchived ones. Defaults to `false`. + * `pinned_state` - when `is_pinned`, return pinned objects only. + when `is_not_pinned`, return non pinned objects only. + when `all`, return everything. By default returns everything. ##### PARAMS: * **`id`** -* **`model`** value may be nil, or if non-nil, value must be one of: `card`, `collection`, `dashboard`, `pulse`, `snippet`. +* **`models`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be an array. Each value must be one of: `card`, `collection`, `dashboard`, `no_models`, `pulse`, `snippet`. 2) value must be one of: `card`, `collection`, `dashboard`, `no_models`, `pulse`, `snippet`. * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). +* **`pinned_state`** value may be nil, or if non-nil, value must be one of: `all`, `is_not_pinned`, `is_pinned`. + +* **`sort_column`** value may be nil, or if non-nil, value must be one of: `last_edited_at`, `last_edited_by`, `model`, `name`. -## `GET /api/collection/graph` +* **`sort_direction`** value may be nil, or if non-nil, value must be one of: `asc`, `desc`. + +### `GET /api/collection/graph` Fetch a graph of all Collection Permissions. @@ -522,17 +587,15 @@ You must be a superuser to do this. * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. +### `GET /api/collection/root` -## `GET /api/collection/root` - -Return the 'Root' Collection object with standard details added +Return the 'Root' Collection object with standard details added. ##### PARAMS: * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. - -## `GET /api/collection/root/items` +### `GET /api/collection/root/items` Fetch objects that the current user should see at their root level. As mentioned elsewhere, the 'Root' Collection doesn't actually exist as a row in the application DB: it's simply a virtual Collection where things with no @@ -550,14 +613,19 @@ Fetch objects that the current user should see at their root level. As mentioned ##### PARAMS: -* **`model`** value may be nil, or if non-nil, value must be one of: `card`, `collection`, `dashboard`, `pulse`, `snippet`. +* **`models`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be an array. Each value must be one of: `card`, `collection`, `dashboard`, `no_models`, `pulse`, `snippet`. 2) value must be one of: `card`, `collection`, `dashboard`, `no_models`, `pulse`, `snippet`. * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. +* **`pinned_state`** value may be nil, or if non-nil, value must be one of: `all`, `is_not_pinned`, `is_pinned`. -## `GET /api/collection/tree` +* **`sort_column`** value may be nil, or if non-nil, value must be one of: `last_edited_at`, `last_edited_by`, `model`, `name`. + +* **`sort_direction`** value may be nil, or if non-nil, value must be one of: `asc`, `desc`. + +### `GET /api/collection/tree` Similar to `GET /`, but returns Collections in a tree structure, e.g. @@ -568,10 +636,13 @@ Similar to `GET /`, but returns Collections in a tree structure, e.g. :children [{:name "E"}]} {:name "F" :children [{:name "G"}]}]}]} - {:name "H"}] + {:name "H"}]. + +##### PARAMS: +* **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. -## `POST /api/collection/` +### `POST /api/collection/` Create a new Collection. @@ -587,29 +658,33 @@ Create a new Collection. * **`namespace`** value may be nil, or if non-nil, value must be a non-blank string. +* **`authority_level`** value may be nil, or if non-nil, value must be one of: `official`. -## `PUT /api/collection/:id` +### `PUT /api/collection/:id` Modify an existing Collection, including archiving or unarchiving it, or moving it. ##### PARAMS: -* **`id`** - -* **`name`** value may be nil, or if non-nil, value must be a non-blank string. - -* **`color`** value may be nil, or if non-nil, value must be a string that matches the regex `^#[0-9A-Fa-f]{6}$`. +* **`authority_level`** value may be nil, or if non-nil, value must be one of: `official`. * **`description`** value may be nil, or if non-nil, value must be a non-blank string. * **`archived`** value may be nil, or if non-nil, value must be a boolean. +* **`collection-updates`** + +* **`color`** value may be nil, or if non-nil, value must be a string that matches the regex `^#[0-9A-Fa-f]{6}$`. + +* **`name`** value may be nil, or if non-nil, value must be a non-blank string. + * **`parent_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -* **`collection-updates`** +* **`id`** +* **`update_collection_tree_authority_level`** value may be nil, or if non-nil, value must be a boolean. -## `PUT /api/collection/graph` +### `PUT /api/collection/graph` Do a batch update of Collections Permissions by passing in a modified graph. @@ -622,7 +697,35 @@ You must be a superuser to do this. * **`body`** value must be a map. -## `DELETE /api/dashboard/:dashboard-id/public_link` +## Dashboard + +/api/dashboard endpoints. + + - [DELETE /api/dashboard/:dashboard-id/public_link](#delete-apidashboarddashboard-idpublic_link) + - [DELETE /api/dashboard/:id](#delete-apidashboardid) + - [DELETE /api/dashboard/:id/cards](#delete-apidashboardidcards) + - [DELETE /api/dashboard/:id/favorite](#delete-apidashboardidfavorite) + - [GET /api/dashboard/](#get-apidashboard) + - [GET /api/dashboard/:id](#get-apidashboardid) + - [GET /api/dashboard/:id/params/:param-key/search/:query](#get-apidashboardidparamsparam-keysearchquery) + - [GET /api/dashboard/:id/params/:param-key/values](#get-apidashboardidparamsparam-keyvalues) + - [GET /api/dashboard/:id/related](#get-apidashboardidrelated) + - [GET /api/dashboard/:id/revisions](#get-apidashboardidrevisions) + - [GET /api/dashboard/embeddable](#get-apidashboardembeddable) + - [GET /api/dashboard/params/valid-filter-fields](#get-apidashboardparamsvalid-filter-fields) + - [GET /api/dashboard/public](#get-apidashboardpublic) + - [POST /api/dashboard/](#post-apidashboard) + - [POST /api/dashboard/:dashboard-id/public_link](#post-apidashboarddashboard-idpublic_link) + - [POST /api/dashboard/:from-dashboard-id/copy](#post-apidashboardfrom-dashboard-idcopy) + - [POST /api/dashboard/:id/cards](#post-apidashboardidcards) + - [POST /api/dashboard/:id/favorite](#post-apidashboardidfavorite) + - [POST /api/dashboard/:id/revert](#post-apidashboardidrevert) + - [POST /api/dashboard/save](#post-apidashboardsave) + - [POST /api/dashboard/save/collection/:parent-collection-id](#post-apidashboardsavecollectionparent-collection-id) + - [PUT /api/dashboard/:id](#put-apidashboardid) + - [PUT /api/dashboard/:id/cards](#put-apidashboardidcards) + +### `DELETE /api/dashboard/:dashboard-id/public_link` Delete the publicly-accessible link to this Dashboard. @@ -632,8 +735,7 @@ You must be a superuser to do this. * **`dashboard-id`** - -## `DELETE /api/dashboard/:id` +### `DELETE /api/dashboard/:id` Delete a Dashboard. @@ -641,8 +743,7 @@ Delete a Dashboard. * **`id`** - -## `DELETE /api/dashboard/:id/cards` +### `DELETE /api/dashboard/:id/cards` Remove a `DashboardCard` from a Dashboard. @@ -652,8 +753,7 @@ Remove a `DashboardCard` from a Dashboard. * **`dashcardId`** value must be a valid integer greater than zero. - -## `DELETE /api/dashboard/:id/favorite` +### `DELETE /api/dashboard/:id/favorite` Unfavorite a Dashboard. @@ -661,21 +761,19 @@ Unfavorite a Dashboard. * **`id`** - -## `GET /api/dashboard/` +### `GET /api/dashboard/` Get `Dashboards`. With filter option `f` (default `all`), restrict results as follows: * `all` - Return all Dashboards. * `mine` - Return Dashboards created by the current user. - * `archived` - Return Dashboards that have been archived. (By default, these are *excluded*.) + * `archived` - Return Dashboards that have been archived. (By default, these are *excluded*.). ##### PARAMS: * **`f`** value may be nil, or if non-nil, value must be one of: `all`, `archived`, `mine`. - -## `GET /api/dashboard/:id` +### `GET /api/dashboard/:id` Get Dashboard with ID. @@ -683,8 +781,7 @@ Get Dashboard with ID. * **`id`** - -## `GET /api/dashboard/:id/params/:param-key/search/:query` +### `GET /api/dashboard/:id/params/:param-key/search/:query` Fetch possible values of the parameter whose ID is `:param-key` that contain `:query`. Optionally restrict these values by passing query parameters like `other-parameter=value` e.g. @@ -693,7 +790,7 @@ Fetch possible values of the parameter whose ID is `:param-key` that contain `:q ;; to 100 GET /api/dashboard/1/params/abc/search/Cam?def=100 - Currently limited to first 100 results + Currently limited to first 1000 results. ##### PARAMS: @@ -705,14 +802,13 @@ Fetch possible values of the parameter whose ID is `:param-key` that contain `:q * **`query-params`** - -## `GET /api/dashboard/:id/params/:param-key/values` +### `GET /api/dashboard/:id/params/:param-key/values` Fetch possible values of the parameter whose ID is `:param-key`. Optionally restrict these values by passing query parameters like `other-parameter=value` e.g. ;; fetch values for Dashboard 1 parameter 'abc' that are possible when parameter 'def' is set to 100 - GET /api/dashboard/1/params/abc/values?def=100 + GET /api/dashboard/1/params/abc/values?def=100. ##### PARAMS: @@ -722,8 +818,7 @@ Fetch possible values of the parameter whose ID is `:param-key`. Optionally rest * **`query-params`** - -## `GET /api/dashboard/:id/related` +### `GET /api/dashboard/:id/related` Return related entities. @@ -731,8 +826,7 @@ Return related entities. * **`id`** - -## `GET /api/dashboard/:id/revisions` +### `GET /api/dashboard/:id/revisions` Fetch `Revisions` for Dashboard with ID. @@ -740,16 +834,14 @@ Fetch `Revisions` for Dashboard with ID. * **`id`** - -## `GET /api/dashboard/embeddable` +### `GET /api/dashboard/embeddable` Fetch a list of Dashboards where `enable_embedding` is `true`. The dashboards can be embedded using the embedding endpoints and a signed JWT. You must be a superuser to do this. - -## `GET /api/dashboard/params/valid-filter-fields` +### `GET /api/dashboard/params/valid-filter-fields` Utility endpoint for powering Dashboard UI. Given some set of `filtered` Field IDs (presumably Fields used in parameters) and a set of `filtering` Field IDs that will be used to restrict values of `filtered` Fields, for each @@ -770,7 +862,7 @@ Utility endpoint for powering Dashboard UI. Given some set of `filtered` Field I Results are returned as a map of - `filtered` Field ID -> subset of `filtering` Field IDs that would be used in chain filter query + `filtered` Field ID -> subset of `filtering` Field IDs that would be used in chain filter query. ##### PARAMS: @@ -778,16 +870,14 @@ Utility endpoint for powering Dashboard UI. Given some set of `filtered` Field I * **`filtering`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be a valid integer greater than zero. 2) value must be an array. Each value must be a valid integer greater than zero. The array cannot be empty. - -## `GET /api/dashboard/public` +### `GET /api/dashboard/public` Fetch a list of Dashboards with public UUIDs. These dashboards are publicly-accessible *if* public sharing is enabled. You must be a superuser to do this. - -## `POST /api/dashboard/` +### `POST /api/dashboard/` Create a new Dashboard. @@ -799,14 +889,15 @@ Create a new Dashboard. * **`parameters`** value must be an array. Each value must be a map. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. * **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. * **`dashboard`** - -## `POST /api/dashboard/:dashboard-id/public_link` +### `POST /api/dashboard/:dashboard-id/public_link` Generate publicly-accessible links for this Dashboard. Returns UUID to be used in public links. (If this Dashboard has already been shared, it will return the existing public link rather than creating a new one.) Public @@ -818,8 +909,7 @@ You must be a superuser to do this. * **`dashboard-id`** - -## `POST /api/dashboard/:from-dashboard-id/copy` +### `POST /api/dashboard/:from-dashboard-id/copy` Copy a Dashboard. @@ -837,8 +927,7 @@ Copy a Dashboard. * **`dashboard`** - -## `POST /api/dashboard/:id/cards` +### `POST /api/dashboard/:id/cards` Add a `Card` to a Dashboard. @@ -854,8 +943,7 @@ Add a `Card` to a Dashboard. * **`dashboard-card`** - -## `POST /api/dashboard/:id/favorite` +### `POST /api/dashboard/:id/favorite` Favorite a Dashboard. @@ -863,8 +951,7 @@ Favorite a Dashboard. * **`id`** - -## `POST /api/dashboard/:id/revert` +### `POST /api/dashboard/:id/revert` Revert a Dashboard to a prior `Revision`. @@ -874,8 +961,7 @@ Revert a Dashboard to a prior `Revision`. * **`revision_id`** value must be an integer greater than zero. - -## `POST /api/dashboard/save` +### `POST /api/dashboard/save` Save a denormalized description of dashboard. @@ -883,8 +969,7 @@ Save a denormalized description of dashboard. * **`dashboard`** - -## `POST /api/dashboard/save/collection/:parent-collection-id` +### `POST /api/dashboard/save/collection/:parent-collection-id` Save a denormalized description of dashboard into collection with ID `:parent-collection-id`. @@ -894,8 +979,7 @@ Save a denormalized description of dashboard into collection with ID `:parent-co * **`dashboard`** - -## `PUT /api/dashboard/:id` +### `PUT /api/dashboard/:id` Update a Dashboard. @@ -929,12 +1013,13 @@ Update a Dashboard. * **`embedding_params`** value may be nil, or if non-nil, value must be a valid embedding params map. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`id`** * **`position`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `PUT /api/dashboard/:id/cards` +### `PUT /api/dashboard/:id/cards` Update `Cards` on a Dashboard. Request body should have the form: @@ -944,7 +1029,7 @@ Update `Cards` on a Dashboard. Request body should have the form: :row ... :col ... :series [{:id 123 - ...}]} ...]} + ...}]} ...]}. ##### PARAMS: @@ -953,7 +1038,34 @@ Update `Cards` on a Dashboard. Request body should have the form: * **`cards`** -## `DELETE /api/database/:id` +## Database + +/api/database endpoints. + + - [DELETE /api/database/:id](#delete-apidatabaseid) + - [GET /api/database/](#get-apidatabase) + - [GET /api/database/:id](#get-apidatabaseid) + - [GET /api/database/:id/autocomplete_suggestions](#get-apidatabaseidautocomplete_suggestions) + - [GET /api/database/:id/fields](#get-apidatabaseidfields) + - [GET /api/database/:id/idfields](#get-apidatabaseididfields) + - [GET /api/database/:id/metadata](#get-apidatabaseidmetadata) + - [GET /api/database/:id/schema/](#get-apidatabaseidschema) + - [GET /api/database/:id/schema/:schema](#get-apidatabaseidschemaschema) + - [GET /api/database/:id/schemas](#get-apidatabaseidschemas) + - [GET /api/database/:virtual-db/metadata](#get-apidatabasevirtual-dbmetadata) + - [GET /api/database/:virtual-db/schema/:schema](#get-apidatabasevirtual-dbschemaschema) + - [GET /api/database/:virtual-db/schemas](#get-apidatabasevirtual-dbschemas) + - [GET /api/database/db-ids-with-deprecated-drivers](#get-apidatabasedb-ids-with-deprecated-drivers) + - [POST /api/database/](#post-apidatabase) + - [POST /api/database/:id/discard_values](#post-apidatabaseiddiscard_values) + - [POST /api/database/:id/rescan_values](#post-apidatabaseidrescan_values) + - [POST /api/database/:id/sync](#post-apidatabaseidsync) + - [POST /api/database/:id/sync_schema](#post-apidatabaseidsync_schema) + - [POST /api/database/sample_dataset](#post-apidatabasesample_dataset) + - [POST /api/database/validate](#post-apidatabasevalidate) + - [PUT /api/database/:id](#put-apidatabaseid) + +### `DELETE /api/database/:id` Delete a `Database`. @@ -961,8 +1073,7 @@ Delete a `Database`. * **`id`** - -## `GET /api/database/` +### `GET /api/database/` Fetch all `Databases`. @@ -975,7 +1086,7 @@ Fetch all `Databases`. * `include_cards` here means we should also include virtual Table entries for saved Questions, e.g. so we can easily use them as source Tables in queries. This is a deprecated alias for `saved=true` + `include=tables` (for the saved - questions virtual DB). Prefer using `include` and `saved` instead. + questions virtual DB). Prefer using `include` and `saved` instead. . ##### PARAMS: @@ -987,8 +1098,7 @@ Fetch all `Databases`. * **`saved`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/database/:id` +### `GET /api/database/:id` Get a single Database with `id`. Optionally pass `?include=tables` or `?include=tables.fields` to include the Tables belonging to this database, or the Tables and Fields, respectively. @@ -999,8 +1109,7 @@ Get a single Database with `id`. Optionally pass `?include=tables` or `?include= * **`include`** value may be nil, or if non-nil, value must be one of: `tables`, `tables.fields`. - -## `GET /api/database/:id/autocomplete_suggestions` +### `GET /api/database/:id/autocomplete_suggestions` Return a list of autocomplete suggestions for a given `prefix`. @@ -1008,7 +1117,7 @@ Return a list of autocomplete suggestions for a given `prefix`. and `Fields` in this `Database`. Tables are returned in the format `[table_name "Table"]`; - Fields are returned in the format `[field_name "table_name base_type semantic_type"]` + Fields are returned in the format `[field_name "table_name base_type semantic_type"]`. ##### PARAMS: @@ -1016,8 +1125,7 @@ Return a list of autocomplete suggestions for a given `prefix`. * **`prefix`** value must be a non-blank string. - -## `GET /api/database/:id/fields` +### `GET /api/database/:id/fields` Get a list of all `Fields` in `Database`. @@ -1025,8 +1133,7 @@ Get a list of all `Fields` in `Database`. * **`id`** - -## `GET /api/database/:id/idfields` +### `GET /api/database/:id/idfields` Get a list of all primary key `Fields` for `Database`. @@ -1034,8 +1141,7 @@ Get a list of all primary key `Fields` for `Database`. * **`id`** - -## `GET /api/database/:id/metadata` +### `GET /api/database/:id/metadata` Get metadata about a `Database`, including all of its `Tables` and `Fields`. By default only non-hidden tables and fields are returned. Passing include_hidden=true includes them. @@ -1047,8 +1153,7 @@ Get metadata about a `Database`, including all of its `Tables` and `Fields`. * **`include_hidden`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/database/:id/schema/` +### `GET /api/database/:id/schema/` Return a list of Tables for a Database whose `schema` is `nil` or an empty string. @@ -1056,10 +1161,9 @@ Return a list of Tables for a Database whose `schema` is `nil` or an empty strin * **`id`** +### `GET /api/database/:id/schema/:schema` -## `GET /api/database/:id/schema/:schema` - -Returns a list of Tables for the given Database `id` and `schema` +Returns a list of Tables for the given Database `id` and `schema`. ##### PARAMS: @@ -1067,23 +1171,20 @@ Returns a list of Tables for the given Database `id` and `schema` * **`schema`** +### `GET /api/database/:id/schemas` -## `GET /api/database/:id/schemas` - -Returns a list of all the schemas found for the database `id` +Returns a list of all the schemas found for the database `id`. ##### PARAMS: * **`id`** - -## `GET /api/database/:virtual-db/metadata` +### `GET /api/database/:virtual-db/metadata` Endpoint that provides metadata for the Saved Questions 'virtual' database. Used for fooling the frontend and allowing it to treat the Saved Questions virtual DB just like any other database. - -## `GET /api/database/:virtual-db/schema/:schema` +### `GET /api/database/:virtual-db/schema/:schema` Returns a list of Tables for the saved questions virtual database. @@ -1091,13 +1192,15 @@ Returns a list of Tables for the saved questions virtual database. * **`schema`** - -## `GET /api/database/:virtual-db/schemas` +### `GET /api/database/:virtual-db/schemas` Returns a list of all the schemas found for the saved questions virtual database. +### `GET /api/database/db-ids-with-deprecated-drivers` -## `POST /api/database/` +Return a list of database IDs using currently deprecated drivers. + +### `POST /api/database/` Add a new `Database`. @@ -1119,8 +1222,9 @@ You must be a superuser to do this. * **`auto_run_queries`** value may be nil, or if non-nil, value must be a boolean. +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `POST /api/database/:id/discard_values` +### `POST /api/database/:id/discard_values` Discards all saved field values for this `Database`. @@ -1130,8 +1234,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/database/:id/rescan_values` +### `POST /api/database/:id/rescan_values` Trigger a manual scan of the field values for this `Database`. @@ -1141,8 +1244,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/database/:id/sync` +### `POST /api/database/:id/sync` Update the metadata for this `Database`. This happens asynchronously. @@ -1150,8 +1252,7 @@ Update the metadata for this `Database`. This happens asynchronously. * **`id`** - -## `POST /api/database/:id/sync_schema` +### `POST /api/database/:id/sync_schema` Trigger a manual update of the schema metadata for this `Database`. @@ -1161,15 +1262,13 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/database/sample_dataset` +### `POST /api/database/sample_dataset` Add the sample dataset as a new `Database`. You must be a superuser to do this. - -## `POST /api/database/validate` +### `POST /api/database/validate` Validate that we can connect to a database given a set of details. @@ -1181,8 +1280,7 @@ You must be a superuser to do this. * **`details`** value must be a map. - -## `PUT /api/database/:id` +### `PUT /api/database/:id` Update a `Database`. @@ -1208,6 +1306,8 @@ You must be a superuser to do this. * **`is_full_sync`** +* **`cache_ttl`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`details`** value may be nil, or if non-nil, value must be a map. * **`id`** @@ -1215,7 +1315,17 @@ You must be a superuser to do this. * **`is_on_demand`** -## `POST /api/dataset/` +## Dataset + +/api/dataset endpoints. + + - [POST /api/dataset/](#post-apidataset) + - [POST /api/dataset/:export-format](#post-apidatasetexport-format) + - [POST /api/dataset/duration](#post-apidatasetduration) + - [POST /api/dataset/native](#post-apidatasetnative) + - [POST /api/dataset/pivot](#post-apidatasetpivot) + +### `POST /api/dataset/` Execute a query and retrieve the results in the usual format. @@ -1227,8 +1337,7 @@ Execute a query and retrieve the results in the usual format. * **`query`** - -## `POST /api/dataset/:export-format` +### `POST /api/dataset/:export-format` Execute a query and download the result data as a file in the specified format. @@ -1238,8 +1347,9 @@ Execute a query and download the result data as a file in the specified format. * **`query`** value must be a valid JSON string. +* **`visualization_settings`** value must be a valid JSON string. -## `POST /api/dataset/duration` +### `POST /api/dataset/duration` Get historical query execution duration. @@ -1249,8 +1359,7 @@ Get historical query execution duration. * **`query`** - -## `POST /api/dataset/native` +### `POST /api/dataset/native` Fetch a native version of an MBQL query. @@ -1258,10 +1367,9 @@ Fetch a native version of an MBQL query. * **`query`** +### `POST /api/dataset/pivot` -## `POST /api/dataset/pivot` - -Generate a pivoted dataset for an ad-hoc query +Generate a pivoted dataset for an ad-hoc query. ##### PARAMS: @@ -1272,21 +1380,28 @@ Generate a pivoted dataset for an ad-hoc query * **`query`** -## `DELETE /api/email/` - -Clear all email related settings. You must be a superuser to ddo this +## Email -You must be a superuser to do this. +/api/email endpoints. + - [DELETE /api/email/](#delete-apiemail) + - [POST /api/email/test](#post-apiemailtest) + - [PUT /api/email/](#put-apiemail) -## `POST /api/email/test` +### `DELETE /api/email/` -Send a test email. You must be a superuser to do this. +Clear all email related settings. You must be a superuser to ddo this. You must be a superuser to do this. +### `POST /api/email/test` + +Send a test email using the SMTP Settings. You must be a superuser to do this. Returns `{:ok true}` if we were able + to send the message successfully, otherwise a standard 400 error response. + +You must be a superuser to do this. -## `PUT /api/email/` +### `PUT /api/email/` Update multiple email Settings. You must be a superuser to do this. @@ -1297,20 +1412,54 @@ You must be a superuser to do this. * **`settings`** value must be a map. -## `GET /api/embed/card/:token` +## Embed + +Various endpoints that use [JSON web tokens](https://jwt.io/introduction/) to fetch Cards and Dashboards. + The endpoints are the same as the ones in `api/public/`, and differ only in the way they are authorized. + + To use these endpoints: + + 1. Set the `embedding-secret-key` Setting to a hexadecimal-encoded 32-byte sequence (i.e., a 64-character string). + You can use `/api/util/random_token` to get a cryptographically-secure value for this. + 2. Sign/base-64 encode a JSON Web Token using the secret key and pass it as the relevant part of the URL path + to the various endpoints here. + + Tokens can have the following fields: + + {:resource {:question + :dashboard } + :params }. + + - [GET /api/embed/card/:token](#get-apiembedcardtoken) + - [GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id](#get-apiembedcardtokenfieldfield-idremappingremapped-id) + - [GET /api/embed/card/:token/field/:field-id/search/:search-field-id](#get-apiembedcardtokenfieldfield-idsearchsearch-field-id) + - [GET /api/embed/card/:token/field/:field-id/values](#get-apiembedcardtokenfieldfield-idvalues) + - [GET /api/embed/card/:token/query](#get-apiembedcardtokenquery) + - [GET /api/embed/card/:token/query/:export-format](#get-apiembedcardtokenqueryexport-format) + - [GET /api/embed/dashboard/:token](#get-apiembeddashboardtoken) + - [GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apiembeddashboardtokendashcarddashcard-idcardcard-id) + - [GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format](#get-apiembeddashboardtokendashcarddashcard-idcardcard-idexport-format) + - [GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id](#get-apiembeddashboardtokenfieldfield-idremappingremapped-id) + - [GET /api/embed/dashboard/:token/field/:field-id/search/:search-field-id](#get-apiembeddashboardtokenfieldfield-idsearchsearch-field-id) + - [GET /api/embed/dashboard/:token/field/:field-id/values](#get-apiembeddashboardtokenfieldfield-idvalues) + - [GET /api/embed/dashboard/:token/params/:param-key/search/:prefix](#get-apiembeddashboardtokenparamsparam-keysearchprefix) + - [GET /api/embed/dashboard/:token/params/:param-key/values](#get-apiembeddashboardtokenparamsparam-keyvalues) + - [GET /api/embed/pivot/card/:token/query](#get-apiembedpivotcardtokenquery) + - [GET /api/embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apiembedpivotdashboardtokendashcarddashcard-idcardcard-id) + +### `GET /api/embed/card/:token` Fetch a Card via a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: - {:resource {:question }} + {:resource {:question }}. ##### PARAMS: * **`token`** - -## `GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id` +### `GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with embedded Cards. @@ -1325,8 +1474,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/embed/card/:token/field/:field-id/search/:search-field-id` +### `GET /api/embed/card/:token/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by an embedded Card. @@ -1342,8 +1490,7 @@ Search for values of a Field that is referenced by an embedded Card. * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/embed/card/:token/field/:field-id/values` +### `GET /api/embed/card/:token/field/:field-id/values` Fetch FieldValues for a Field that is referenced by an embedded Card. @@ -1353,15 +1500,14 @@ Fetch FieldValues for a Field that is referenced by an embedded Card. * **`field-id`** - -## `GET /api/embed/card/:token/query` +### `GET /api/embed/card/:token/query` Fetch the results of running a Card using a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: {:resource {:question } - :params } + :params }. ##### PARAMS: @@ -1371,8 +1517,7 @@ Fetch the results of running a Card using a JSON Web Token signed with the `embe * **`query-params`** - -## `GET /api/embed/card/:token/query/:export-format` +### `GET /api/embed/card/:token/query/:export-format` Like `GET /api/embed/card/query`, but returns the results as a file in the specified format. @@ -1384,24 +1529,22 @@ Like `GET /api/embed/card/query`, but returns the results as a file in the speci * **`query-params`** - -## `GET /api/embed/dashboard/:token` +### `GET /api/embed/dashboard/:token` Fetch a Dashboard via a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: - {:resource {:dashboard }} + {:resource {:dashboard }}. ##### PARAMS: * **`token`** - -## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the - `embedding-secret-key` + `embedding-secret-key`. ##### PARAMS: @@ -1415,11 +1558,10 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To * **`query-params`** - -## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format` +### `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format` Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the - `embedding-secret-key` return the data in one of the export formats + `embedding-secret-key` return the data in one of the export formats. ##### PARAMS: @@ -1433,8 +1575,7 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To * **`query-params`** - -## `GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id` +### `GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with embedded Dashboards. @@ -1449,8 +1590,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/embed/dashboard/:token/field/:field-id/search/:search-field-id` +### `GET /api/embed/dashboard/:token/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by a Card in an embedded Dashboard. @@ -1466,8 +1606,7 @@ Search for values of a Field that is referenced by a Card in an embedded Dashboa * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/embed/dashboard/:token/field/:field-id/values` +### `GET /api/embed/dashboard/:token/field/:field-id/values` Fetch FieldValues for a Field that is used as a param in an embedded Dashboard. @@ -1477,8 +1616,7 @@ Fetch FieldValues for a Field that is used as a param in an embedded Dashboard. * **`field-id`** - -## `GET /api/embed/dashboard/:token/params/:param-key/search/:prefix` +### `GET /api/embed/dashboard/:token/params/:param-key/search/:prefix` Embedded version of chain filter search endpoint. @@ -1492,8 +1630,7 @@ Embedded version of chain filter search endpoint. * **`query-params`** - -## `GET /api/embed/dashboard/:token/params/:param-key/values` +### `GET /api/embed/dashboard/:token/params/:param-key/values` Embedded version of chain filter values endpoint. @@ -1505,15 +1642,14 @@ Embedded version of chain filter values endpoint. * **`query-params`** - -## `GET /api/embed/pivot/card/:token/query` +### `GET /api/embed/pivot/card/:token/query` Fetch the results of running a Card using a JSON Web Token signed with the `embedding-secret-key`. Token should have the following format: {:resource {:question } - :params } + :params }. ##### PARAMS: @@ -1523,11 +1659,10 @@ Fetch the results of running a Card using a JSON Web Token signed with the `embe * **`query-params`** - -## `GET /api/embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the - `embedding-secret-key` + `embedding-secret-key`. ##### PARAMS: @@ -1542,16 +1677,31 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To * **`query-params`** -## `DELETE /api/field/:id/dimension` +## Field + + - [DELETE /api/field/:id/dimension](#delete-apifieldiddimension) + - [GET /api/field/:id](#get-apifieldid) + - [GET /api/field/:id/related](#get-apifieldidrelated) + - [GET /api/field/:id/remapping/:remapped-id](#get-apifieldidremappingremapped-id) + - [GET /api/field/:id/search/:search-id](#get-apifieldidsearchsearch-id) + - [GET /api/field/:id/summary](#get-apifieldidsummary) + - [GET /api/field/:id/values](#get-apifieldidvalues) + - [GET /api/field/field%2C:field-name%2C:options/values](#get-apifieldfield2cfield-name2coptionsvalues) + - [POST /api/field/:id/dimension](#post-apifieldiddimension) + - [POST /api/field/:id/discard_values](#post-apifieldiddiscard_values) + - [POST /api/field/:id/rescan_values](#post-apifieldidrescan_values) + - [POST /api/field/:id/values](#post-apifieldidvalues) + - [PUT /api/field/:id](#put-apifieldid) -Remove the dimension associated to field at ID +### `DELETE /api/field/:id/dimension` + +Remove the dimension associated to field at ID. ##### PARAMS: * **`id`** - -## `GET /api/field/:id` +### `GET /api/field/:id` Get `Field` with ID. @@ -1559,8 +1709,7 @@ Get `Field` with ID. * **`id`** - -## `GET /api/field/:id/related` +### `GET /api/field/:id/related` Return related entities. @@ -1568,8 +1717,7 @@ Return related entities. * **`id`** - -## `GET /api/field/:id/remapping/:remapped-id` +### `GET /api/field/:id/remapping/:remapped-id` Fetch remapped Field values. @@ -1581,8 +1729,7 @@ Fetch remapped Field values. * **`value`** - -## `GET /api/field/:id/search/:search-id` +### `GET /api/field/:id/search/:search-id` Search for values of a Field with `search-id` that start with `value`. See docstring for `metabase.api.field/search-values` for a more detailed explanation. @@ -1595,10 +1742,7 @@ Search for values of a Field with `search-id` that start with `value`. See docst * **`value`** value must be a non-blank string. -* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - - -## `GET /api/field/:id/summary` +### `GET /api/field/:id/summary` Get the count and distinct count of `Field` with ID. @@ -1606,8 +1750,7 @@ Get the count and distinct count of `Field` with ID. * **`id`** - -## `GET /api/field/:id/values` +### `GET /api/field/:id/values` If a Field's value of `has_field_values` is `list`, return a list of all the distinct values of the Field, and (if defined by a User) a map of human-readable remapped values. @@ -1616,8 +1759,7 @@ If a Field's value of `has_field_values` is `list`, return a list of all the dis * **`id`** - -## `GET /api/field/field%2C:field-name%2C:options/values` +### `GET /api/field/field%2C:field-name%2C:options/values` Implementation of the field values endpoint for fields in the Saved Questions 'virtual' DB. This endpoint is just a convenience to simplify the frontend code. It just returns the standard 'empty' field values response. @@ -1626,10 +1768,9 @@ Implementation of the field values endpoint for fields in the Saved Questions 'v * **`_`** +### `POST /api/field/:id/dimension` -## `POST /api/field/:id/dimension` - -Sets the dimension for the given field at ID +Sets the dimension for the given field at ID. ##### PARAMS: @@ -1641,8 +1782,7 @@ Sets the dimension for the given field at ID * **`human_readable_field_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `POST /api/field/:id/discard_values` +### `POST /api/field/:id/discard_values` Discard the FieldValues belonging to this Field. Only applies to fields that have FieldValues. If this Field's Database is set up to automatically sync FieldValues, they will be recreated during the next cycle. @@ -1653,8 +1793,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/field/:id/rescan_values` +### `POST /api/field/:id/rescan_values` Manually trigger an update for the FieldValues for this Field. Only applies to Fields that are eligible for FieldValues. @@ -1665,8 +1804,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/field/:id/values` +### `POST /api/field/:id/values` Update the fields values and human-readable values for a `Field` whose semantic type is `category`/`city`/`state`/`country` or whose base type is `type/Boolean`. The human-readable values are optional. @@ -1677,8 +1815,7 @@ Update the fields values and human-readable values for a `Field` whose semantic * **`value-pairs`** value must be an array. Each value must be an array. - -## `PUT /api/field/:id` +### `PUT /api/field/:id` Update `Field` with ID. @@ -1692,9 +1829,9 @@ Update `Field` with ID. * **`description`** value may be nil, or if non-nil, value must be a non-blank string. -* **`semantic_type`** value may be nil, or if non-nil, value must be a valid field type. +* **`semantic_type`** value may be nil, or if non-nil, value must be a valid field semantic or relation type (keyword or string). -* **`coercion_strategy`** value may be nil, or if non-nil, value must be a valid coercion type. +* **`coercion_strategy`** value may be nil, or if non-nil, value must be a valid coercion strategy (keyword or string). * **`has_field_values`** value may be nil, or if non-nil, value must be one of: `auto-list`, `list`, `none`, `search`. @@ -1707,7 +1844,25 @@ Update `Field` with ID. * **`id`** -## `GET /api/geojson/:key` +## Geojson + + - [GET /api/geojson/](#get-apigeojson) + - [GET /api/geojson/:key](#get-apigeojsonkey) + +### `GET /api/geojson/` + +Load a custom GeoJSON file based on a URL or file path provided as a query parameter. + This behaves similarly to /api/geojson/:key but doesn't require the custom map to be saved to the DB first. + +##### PARAMS: + +* **`url`** value must be a non-blank string. + +* **`respond`** + +* **`raise`** + +### `GET /api/geojson/:key` Fetch a custom GeoJSON file as defined in the `custom-geojson` setting. (This just acts as a simple proxy for the file specified for `key`). @@ -1721,7 +1876,13 @@ Fetch a custom GeoJSON file as defined in the `custom-geojson` setting. (This ju * **`raise`** -## `PUT /api/ldap/settings` +## Ldap + +/api/ldap endpoints. + + - [PUT /api/ldap/settings](#put-apildapsettings) + +### `PUT /api/ldap/settings` Update LDAP related settings. You must be a superuser to do this. @@ -1732,20 +1893,32 @@ You must be a superuser to do this. * **`settings`** value must be a map. -## `GET /api/login-history/current` +## Login history + + - [GET /api/login-history/current](#get-apilogin-historycurrent) + +### `GET /api/login-history/current` Fetch recent logins for the current user. -## `GET /api/metastore/token/status` +## Metric -Fetch info about the current MetaStore premium features token including whether it is `valid`, a `trial` token, its - `features`, and when it is `valid_thru`. +/api/metric endpoints. + - [DELETE /api/metric/:id](#delete-apimetricid) + - [GET /api/metric/](#get-apimetric) + - [GET /api/metric/:id](#get-apimetricid) + - [GET /api/metric/:id/related](#get-apimetricidrelated) + - [GET /api/metric/:id/revisions](#get-apimetricidrevisions) + - [POST /api/metric/](#post-apimetric) + - [POST /api/metric/:id/revert](#post-apimetricidrevert) + - [PUT /api/metric/:id](#put-apimetricid) + - [PUT /api/metric/:id/important_fields](#put-apimetricidimportant_fields) -## `DELETE /api/metric/:id` +### `DELETE /api/metric/:id` -Archive a Metric. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.) +Archive a Metric. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.). ##### PARAMS: @@ -1753,8 +1926,7 @@ Archive a Metric. (DEPRECATED -- Just pass updated value of `:archived` to the ` * **`revision_message`** value must be a non-blank string. - -## `GET /api/metric/` +### `GET /api/metric/` Fetch *all* `Metrics`. @@ -1762,8 +1934,7 @@ Fetch *all* `Metrics`. * **`id`** - -## `GET /api/metric/:id` +### `GET /api/metric/:id` Fetch `Metric` with ID. @@ -1771,8 +1942,7 @@ Fetch `Metric` with ID. * **`id`** - -## `GET /api/metric/:id/related` +### `GET /api/metric/:id/related` Return related entities. @@ -1780,8 +1950,7 @@ Return related entities. * **`id`** - -## `GET /api/metric/:id/revisions` +### `GET /api/metric/:id/revisions` Fetch `Revisions` for `Metric` with ID. @@ -1789,8 +1958,7 @@ Fetch `Revisions` for `Metric` with ID. * **`id`** - -## `POST /api/metric/` +### `POST /api/metric/` Create a new `Metric`. @@ -1804,8 +1972,7 @@ Create a new `Metric`. * **`definition`** value must be a map. - -## `POST /api/metric/:id/revert` +### `POST /api/metric/:id/revert` Revert a `Metric` to a prior `Revision`. @@ -1815,8 +1982,7 @@ Revert a `Metric` to a prior `Revision`. * **`revision_id`** value must be an integer greater than zero. - -## `PUT /api/metric/:id` +### `PUT /api/metric/:id` Update a `Metric` with ID. @@ -1842,8 +2008,7 @@ Update a `Metric` with ID. * **`how_is_this_calculated`** value may be nil, or if non-nil, value must be a string. - -## `PUT /api/metric/:id/important_fields` +### `PUT /api/metric/:id/important_fields` Update the important `Fields` for a `Metric` with ID. (This is used for the Getting Started guide). @@ -1857,16 +2022,24 @@ You must be a superuser to do this. * **`important_field_ids`** value must be an array. Each value must be an integer greater than zero. -## `GET /api/native-query-snippet/` +## Native query snippet + +Native query snippet (/api/native-query-snippet) endpoints. -Fetch all snippets + - [GET /api/native-query-snippet/](#get-apinative-query-snippet) + - [GET /api/native-query-snippet/:id](#get-apinative-query-snippetid) + - [POST /api/native-query-snippet/](#post-apinative-query-snippet) + - [PUT /api/native-query-snippet/:id](#put-apinative-query-snippetid) + +### `GET /api/native-query-snippet/` + +Fetch all snippets. ##### PARAMS: * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/native-query-snippet/:id` +### `GET /api/native-query-snippet/:id` Fetch native query snippet with ID. @@ -1874,8 +2047,7 @@ Fetch native query snippet with ID. * **`id`** - -## `POST /api/native-query-snippet/` +### `POST /api/native-query-snippet/` Create a new `NativeQuerySnippet`. @@ -1889,8 +2061,7 @@ Create a new `NativeQuerySnippet`. * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `PUT /api/native-query-snippet/:id` +### `PUT /api/native-query-snippet/:id` Update an existing `NativeQuerySnippet`. @@ -1909,14 +2080,20 @@ Update an existing `NativeQuerySnippet`. * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `POST /api/notify/db/:id` +## Notify + +/api/notify/* endpoints which receive inbound etl server notifications. + + - [POST /api/notify/db/:id](#post-apinotifydbid) + +### `POST /api/notify/db/:id` Notification about a potential schema change to one of our `Databases`. Caller can optionally specify a `:table_id` or `:table_name` in the body to limit updates to a single `Table`. Optional Parameter `:scan` can be `"full"` or `"schema"` for a full sync or a schema sync, available regardless if a `:table_id` or `:table_name` is passed. This endpoint is secured by an API key that needs to be passed as a `X-METABASE-APIKEY` header which needs to be defined in - the `MB_API_KEY` [environment variable](https://www.metabase.com/docs/latest/operations-guide/environment-variables.html#mb_api_key) + the `MB_API_KEY` [environment variable](https://www.metabase.com/docs/latest/operations-guide/environment-variables.html#mb_api_key). ##### PARAMS: @@ -1929,7 +2106,22 @@ Notification about a potential schema change to one of our `Databases`. * **`scan`** value may be nil, or if non-nil, value must be one of: `full`, `schema`. -## `DELETE /api/permissions/group/:group-id` +## Permissions + +/api/permissions endpoints. + + - [DELETE /api/permissions/group/:group-id](#delete-apipermissionsgroupgroup-id) + - [DELETE /api/permissions/membership/:id](#delete-apipermissionsmembershipid) + - [GET /api/permissions/graph](#get-apipermissionsgraph) + - [GET /api/permissions/group](#get-apipermissionsgroup) + - [GET /api/permissions/group/:id](#get-apipermissionsgroupid) + - [GET /api/permissions/membership](#get-apipermissionsmembership) + - [POST /api/permissions/group](#post-apipermissionsgroup) + - [POST /api/permissions/membership](#post-apipermissionsmembership) + - [PUT /api/permissions/graph](#put-apipermissionsgraph) + - [PUT /api/permissions/group/:group-id](#put-apipermissionsgroupgroup-id) + +### `DELETE /api/permissions/group/:group-id` Delete a specific `PermissionsGroup`. @@ -1939,8 +2131,7 @@ You must be a superuser to do this. * **`group-id`** - -## `DELETE /api/permissions/membership/:id` +### `DELETE /api/permissions/membership/:id` Remove a User from a PermissionsGroup (delete their membership). @@ -1950,22 +2141,19 @@ You must be a superuser to do this. * **`id`** - -## `GET /api/permissions/graph` +### `GET /api/permissions/graph` Fetch a graph of all Permissions. You must be a superuser to do this. - -## `GET /api/permissions/group` +### `GET /api/permissions/group` Fetch all `PermissionsGroups`, including a count of the number of `:members` in that group. You must be a superuser to do this. - -## `GET /api/permissions/group/:id` +### `GET /api/permissions/group/:id` Fetch the details for a certain permissions group. @@ -1975,19 +2163,17 @@ You must be a superuser to do this. * **`id`** - -## `GET /api/permissions/membership` +### `GET /api/permissions/membership` Fetch a map describing the group memberships of various users. This map's format is: { [{:membership_id - :group_id }]} + :group_id }]}. You must be a superuser to do this. - -## `POST /api/permissions/group` +### `POST /api/permissions/group` Create a new `PermissionsGroup`. @@ -1997,8 +2183,7 @@ You must be a superuser to do this. * **`name`** value must be a non-blank string. - -## `POST /api/permissions/membership` +### `POST /api/permissions/membership` Add a `User` to a `PermissionsGroup`. Returns updated list of members belonging to the group. @@ -2010,8 +2195,7 @@ You must be a superuser to do this. * **`user_id`** value must be an integer greater than zero. - -## `PUT /api/permissions/graph` +### `PUT /api/permissions/graph` Do a batch update of Permissions by passing in a modified graph. This should return the same graph, in the same format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever necessary. This @@ -2028,8 +2212,7 @@ You must be a superuser to do this. * **`body`** value must be a map. - -## `PUT /api/permissions/group/:group-id` +### `PUT /api/permissions/group/:group-id` Update the name of a `PermissionsGroup`. @@ -2042,7 +2225,36 @@ You must be a superuser to do this. * **`name`** value must be a non-blank string. -## `GET /api/preview-embed/card/:token` +## Premium features + + - [GET /api/premium-features/token/status](#get-apipremium-featurestokenstatus) + +### `GET /api/premium-features/token/status` + +Fetch info about the current Premium-Features premium features token including whether it is `valid`, a `trial` token, its + `features`, and when it is `valid_thru`. + + +## Preview embed + +Endpoints for previewing how Cards and Dashboards will look when embedding them. + These endpoints are basically identical in functionality to the ones in `/api/embed`, but: + + 1. Require admin access + 2. Ignore the values of `:enabled_embedding` for Cards/Dashboards + 3. Ignore the `:embed_params` whitelist for Card/Dashboards, instead using a field called `:_embedding_params` in + the JWT token itself. + + Refer to the documentation for those endpoints for further details. + + - [GET /api/preview-embed/card/:token](#get-apipreview-embedcardtoken) + - [GET /api/preview-embed/card/:token/query](#get-apipreview-embedcardtokenquery) + - [GET /api/preview-embed/dashboard/:token](#get-apipreview-embeddashboardtoken) + - [GET /api/preview-embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apipreview-embeddashboardtokendashcarddashcard-idcardcard-id) + - [GET /api/preview-embed/pivot/card/:token/query](#get-apipreview-embedpivotcardtokenquery) + - [GET /api/preview-embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id](#get-apipreview-embedpivotdashboardtokendashcarddashcard-idcardcard-id) + +### `GET /api/preview-embed/card/:token` Fetch a Card you're considering embedding by passing a JWT `token`. @@ -2050,8 +2262,7 @@ Fetch a Card you're considering embedding by passing a JWT `token`. * **`token`** - -## `GET /api/preview-embed/card/:token/query` +### `GET /api/preview-embed/card/:token/query` Fetch the query results for a Card you're considering embedding by passing a JWT `token`. @@ -2063,17 +2274,15 @@ Fetch the query results for a Card you're considering embedding by passing a JWT * **`query-params`** +### `GET /api/preview-embed/dashboard/:token` -## `GET /api/preview-embed/dashboard/:token` - -Fetch a Dashboard you're considering embedding by passing a JWT `token`. +Fetch a Dashboard you're considering embedding by passing a JWT `token`. . ##### PARAMS: * **`token`** - -## `GET /api/preview-embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/preview-embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard you're considering embedding with JWT `token`. @@ -2089,8 +2298,7 @@ Fetch the results of running a Card belonging to a Dashboard you're considering * **`query-params`** - -## `GET /api/preview-embed/pivot/card/:token/query` +### `GET /api/preview-embed/pivot/card/:token/query` Fetch the query results for a Card you're considering embedding by passing a JWT `token`. @@ -2102,8 +2310,7 @@ Fetch the query results for a Card you're considering embedding by passing a JWT * **`query-params`** - -## `GET /api/preview-embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +### `GET /api/preview-embed/pivot/dashboard/:token/dashcard/:dashcard-id/card/:card-id` Fetch the results of running a Card belonging to a Dashboard you're considering embedding with JWT `token`. @@ -2120,7 +2327,28 @@ Fetch the results of running a Card belonging to a Dashboard you're considering * **`query-params`** -## `GET /api/public/card/:uuid` +## Public + +Metabase API endpoints for viewing publicly-accessible Cards and Dashboards. + + - [GET /api/public/card/:uuid](#get-apipubliccarduuid) + - [GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id](#get-apipubliccarduuidfieldfield-idremappingremapped-id) + - [GET /api/public/card/:uuid/field/:field-id/search/:search-field-id](#get-apipubliccarduuidfieldfield-idsearchsearch-field-id) + - [GET /api/public/card/:uuid/field/:field-id/values](#get-apipubliccarduuidfieldfield-idvalues) + - [GET /api/public/card/:uuid/query](#get-apipubliccarduuidquery) + - [GET /api/public/card/:uuid/query/:export-format](#get-apipubliccarduuidqueryexport-format) + - [GET /api/public/dashboard/:uuid](#get-apipublicdashboarduuid) + - [GET /api/public/dashboard/:uuid/card/:card-id](#get-apipublicdashboarduuidcardcard-id) + - [GET /api/public/dashboard/:uuid/field/:field-id/remapping/:remapped-id](#get-apipublicdashboarduuidfieldfield-idremappingremapped-id) + - [GET /api/public/dashboard/:uuid/field/:field-id/search/:search-field-id](#get-apipublicdashboarduuidfieldfield-idsearchsearch-field-id) + - [GET /api/public/dashboard/:uuid/field/:field-id/values](#get-apipublicdashboarduuidfieldfield-idvalues) + - [GET /api/public/dashboard/:uuid/params/:param-key/search/:query](#get-apipublicdashboarduuidparamsparam-keysearchquery) + - [GET /api/public/dashboard/:uuid/params/:param-key/values](#get-apipublicdashboarduuidparamsparam-keyvalues) + - [GET /api/public/oembed](#get-apipublicoembed) + - [GET /api/public/pivot/card/:uuid/query](#get-apipublicpivotcarduuidquery) + - [GET /api/public/pivot/dashboard/:uuid/card/:card-id](#get-apipublicpivotdashboarduuidcardcard-id) + +### `GET /api/public/card/:uuid` Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. @@ -2129,8 +2357,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info * **`uuid`** - -## `GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id` +### `GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with public Cards. @@ -2145,8 +2372,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/public/card/:uuid/field/:field-id/search/:search-field-id` +### `GET /api/public/card/:uuid/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by a public Card. @@ -2162,8 +2388,7 @@ Search for values of a Field that is referenced by a public Card. * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/public/card/:uuid/field/:field-id/values` +### `GET /api/public/card/:uuid/field/:field-id/values` Fetch FieldValues for a Field that is referenced by a public Card. @@ -2173,8 +2398,7 @@ Fetch FieldValues for a Field that is referenced by a public Card. * **`field-id`** - -## `GET /api/public/card/:uuid/query` +### `GET /api/public/card/:uuid/query` Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. @@ -2185,8 +2409,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/card/:uuid/query/:export-format` +### `GET /api/public/card/:uuid/query/:export-format` Fetch a publicly-accessible Card and return query results in the specified format. Does not require auth credentials. Public sharing must be enabled. @@ -2199,8 +2422,7 @@ Fetch a publicly-accessible Card and return query results in the specified forma * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/dashboard/:uuid` +### `GET /api/public/dashboard/:uuid` Fetch a publicly-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. @@ -2208,8 +2430,7 @@ Fetch a publicly-accessible Dashboard. Does not require auth credentials. Public * **`uuid`** - -## `GET /api/public/dashboard/:uuid/card/:card-id` +### `GET /api/public/dashboard/:uuid/card/:card-id` Fetch the results for a Card in a publicly-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. @@ -2222,8 +2443,7 @@ Fetch the results for a Card in a publicly-accessible Dashboard. Does not requir * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/dashboard/:uuid/field/:field-id/remapping/:remapped-id` +### `GET /api/public/dashboard/:uuid/field/:field-id/remapping/:remapped-id` Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with public Dashboards. @@ -2238,8 +2458,7 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/: * **`value`** value must be a non-blank string. - -## `GET /api/public/dashboard/:uuid/field/:field-id/search/:search-field-id` +### `GET /api/public/dashboard/:uuid/field/:field-id/search/:search-field-id` Search for values of a Field that is referenced by a Card in a public Dashboard. @@ -2255,8 +2474,7 @@ Search for values of a Field that is referenced by a Card in a public Dashboard. * **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. - -## `GET /api/public/dashboard/:uuid/field/:field-id/values` +### `GET /api/public/dashboard/:uuid/field/:field-id/values` Fetch FieldValues for a Field that is referenced by a Card in a public Dashboard. @@ -2266,8 +2484,7 @@ Fetch FieldValues for a Field that is referenced by a Card in a public Dashboard * **`field-id`** - -## `GET /api/public/dashboard/:uuid/params/:param-key/search/:query` +### `GET /api/public/dashboard/:uuid/params/:param-key/search/:query` Fetch filter values for dashboard parameter `param-key`, containing specified `query`. @@ -2281,8 +2498,7 @@ Fetch filter values for dashboard parameter `param-key`, containing specified `q * **`query-params`** - -## `GET /api/public/dashboard/:uuid/params/:param-key/values` +### `GET /api/public/dashboard/:uuid/params/:param-key/values` Fetch filter values for dashboard parameter `param-key`. @@ -2294,8 +2510,7 @@ Fetch filter values for dashboard parameter `param-key`. * **`query-params`** - -## `GET /api/public/oembed` +### `GET /api/public/oembed` oEmbed endpoint used to retreive embed code and metadata for a (public) Metabase URL. @@ -2309,8 +2524,7 @@ oEmbed endpoint used to retreive embed code and metadata for a (public) Metabase * **`maxwidth`** value may be nil, or if non-nil, value must be a valid integer. - -## `GET /api/public/pivot/card/:uuid/query` +### `GET /api/public/pivot/card/:uuid/query` Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. @@ -2321,8 +2535,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. - -## `GET /api/public/pivot/dashboard/:uuid/card/:card-id` +### `GET /api/public/pivot/dashboard/:uuid/card/:card-id` Fetch the results for a Card in a publicly-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. @@ -2336,16 +2549,22 @@ Fetch the results for a Card in a publicly-accessible Dashboard. Does not requir * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. -## `DELETE /api/pulse/:id` - -Delete a Pulse. (DEPRECATED -- don't delete a Pulse anymore -- archive it instead.) - -##### PARAMS: +## Pulse -* **`id`** +/api/pulse endpoints. + - [DELETE /api/pulse/:id/subscription](#delete-apipulseidsubscription) + - [GET /api/pulse/](#get-apipulse) + - [GET /api/pulse/:id](#get-apipulseid) + - [GET /api/pulse/form_input](#get-apipulseform_input) + - [GET /api/pulse/preview_card/:id](#get-apipulsepreview_cardid) + - [GET /api/pulse/preview_card_info/:id](#get-apipulsepreview_card_infoid) + - [GET /api/pulse/preview_card_png/:id](#get-apipulsepreview_card_pngid) + - [POST /api/pulse/](#post-apipulse) + - [POST /api/pulse/test](#post-apipulsetest) + - [PUT /api/pulse/:id](#put-apipulseid) -## `DELETE /api/pulse/:id/subscription/email` +### `DELETE /api/pulse/:id/subscription` For users to unsubscribe themselves from a pulse subscription. @@ -2353,10 +2572,11 @@ For users to unsubscribe themselves from a pulse subscription. * **`id`** +### `GET /api/pulse/` -## `GET /api/pulse/` - -Fetch all Pulses +Fetch all Pulses. If `dashboard_id` is specified, restricts results to dashboard subscriptions + associated with that dashboard. If `user_id` is specified, restricts results to pulses or subscriptions + created by the user, or for which the user is a known recipient. ##### PARAMS: @@ -2364,8 +2584,9 @@ Fetch all Pulses * **`dashboard_id`** value may be nil, or if non-nil, value must be an integer greater than zero. +* **`user_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `GET /api/pulse/:id` +### `GET /api/pulse/:id` Fetch `Pulse` with ID. @@ -2373,13 +2594,11 @@ Fetch `Pulse` with ID. * **`id`** - -## `GET /api/pulse/form_input` +### `GET /api/pulse/form_input` Provides relevant configuration information and user choices for creating/updating Pulses. - -## `GET /api/pulse/preview_card/:id` +### `GET /api/pulse/preview_card/:id` Get HTML rendering of a Card with `id`. @@ -2387,8 +2606,7 @@ Get HTML rendering of a Card with `id`. * **`id`** - -## `GET /api/pulse/preview_card_info/:id` +### `GET /api/pulse/preview_card_info/:id` Get JSON object containing HTML rendering of a Card with `id` and other information. @@ -2396,8 +2614,7 @@ Get JSON object containing HTML rendering of a Card with `id` and other informat * **`id`** - -## `GET /api/pulse/preview_card_png/:id` +### `GET /api/pulse/preview_card_png/:id` Get PNG rendering of a Card with `id`. @@ -2405,8 +2622,7 @@ Get PNG rendering of a Card with `id`. * **`id`** - -## `POST /api/pulse/` +### `POST /api/pulse/` Create a new `Pulse`. @@ -2428,8 +2644,7 @@ Create a new `Pulse`. * **`parameters`** value must be an array. Each value must be a map. - -## `POST /api/pulse/test` +### `POST /api/pulse/test` Test send an unsaved pulse. @@ -2449,8 +2664,7 @@ Test send an unsaved pulse. * **`dashboard_id`** value may be nil, or if non-nil, value must be an integer greater than zero. - -## `PUT /api/pulse/:id` +### `PUT /api/pulse/:id` Update a Pulse with `id`. @@ -2475,7 +2689,12 @@ Update a Pulse with `id`. * **`pulse-updates`** -## `GET /api/revision/` +## Revision + + - [GET /api/revision/](#get-apirevision) + - [POST /api/revision/revert](#post-apirevisionrevert) + +### `GET /api/revision/` Get revisions of an object. @@ -2485,8 +2704,7 @@ Get revisions of an object. * **`id`** value must be an integer. - -## `POST /api/revision/revert` +### `POST /api/revision/revert` Revert an object to a prior revision. @@ -2499,9 +2717,22 @@ Revert an object to a prior revision. * **`revision_id`** value must be an integer. -## `GET /api/search/` +## Search + + - [GET /api/search/](#get-apisearch) + - [GET /api/search/models](#get-apisearchmodels) -Search Cards, Dashboards, Collections and Pulses for the substring `q`. +### `GET /api/search/` + +Search within a bunch of models for the substring `q`. + For the list of models, check `metabase.search.config/searchable-models. + + To search in archived portions of models, pass in `archived=true`. + If you want, while searching tables, only tables of a certain DB id, + pass in a DB id value to `table_db_id`. + + To specify a list of models, pass in an array to `models`. + . ##### PARAMS: @@ -2509,10 +2740,39 @@ Search Cards, Dashboards, Collections and Pulses for the substring `q`. * **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). +* **`table_db_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `DELETE /api/segment/:id` +* **`models`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be an array. Each value must be a non-blank string. 2) value must be a non-blank string. -Archive a Segment. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.) +### `GET /api/search/models` + +Get the set of models that a search query will return. + +##### PARAMS: + +* **`q`** + +* **`archived-string`** + +* **`table-db-id`** + + +## Segment + +/api/segment endpoints. + + - [DELETE /api/segment/:id](#delete-apisegmentid) + - [GET /api/segment/](#get-apisegment) + - [GET /api/segment/:id](#get-apisegmentid) + - [GET /api/segment/:id/related](#get-apisegmentidrelated) + - [GET /api/segment/:id/revisions](#get-apisegmentidrevisions) + - [POST /api/segment/](#post-apisegment) + - [POST /api/segment/:id/revert](#post-apisegmentidrevert) + - [PUT /api/segment/:id](#put-apisegmentid) + +### `DELETE /api/segment/:id` + +Archive a Segment. (DEPRECATED -- Just pass updated value of `:archived` to the `PUT` endpoint instead.). ##### PARAMS: @@ -2520,13 +2780,11 @@ Archive a Segment. (DEPRECATED -- Just pass updated value of `:archived` to the * **`revision_message`** value must be a non-blank string. - -## `GET /api/segment/` +### `GET /api/segment/` Fetch *all* `Segments`. - -## `GET /api/segment/:id` +### `GET /api/segment/:id` Fetch `Segment` with ID. @@ -2534,8 +2792,7 @@ Fetch `Segment` with ID. * **`id`** - -## `GET /api/segment/:id/related` +### `GET /api/segment/:id/related` Return related entities. @@ -2543,8 +2800,7 @@ Return related entities. * **`id`** - -## `GET /api/segment/:id/revisions` +### `GET /api/segment/:id/revisions` Fetch `Revisions` for `Segment` with ID. @@ -2552,8 +2808,7 @@ Fetch `Revisions` for `Segment` with ID. * **`id`** - -## `POST /api/segment/` +### `POST /api/segment/` Create a new `Segment`. @@ -2567,8 +2822,7 @@ Create a new `Segment`. * **`definition`** value must be a map. - -## `POST /api/segment/:id/revert` +### `POST /api/segment/:id/revert` Revert a `Segement` to a prior `Revision`. @@ -2578,8 +2832,7 @@ Revert a `Segement` to a prior `Revision`. * **`revision_id`** value must be an integer greater than zero. - -## `PUT /api/segment/:id` +### `PUT /api/segment/:id` Update a `Segment` with ID. @@ -2604,7 +2857,19 @@ Update a `Segment` with ID. * **`id`** -## `DELETE /api/session/` +## Session + +/api/session endpoints. + + - [DELETE /api/session/](#delete-apisession) + - [GET /api/session/password_reset_token_valid](#get-apisessionpassword_reset_token_valid) + - [GET /api/session/properties](#get-apisessionproperties) + - [POST /api/session/](#post-apisession) + - [POST /api/session/forgot_password](#post-apisessionforgot_password) + - [POST /api/session/google_auth](#post-apisessiongoogle_auth) + - [POST /api/session/reset_password](#post-apisessionreset_password) + +### `DELETE /api/session/` Logout. @@ -2612,8 +2877,7 @@ Logout. * **`metabase-session-id`** - -## `GET /api/session/password_reset_token_valid` +### `GET /api/session/password_reset_token_valid` Check is a password reset token is valid and isn't expired. @@ -2621,13 +2885,11 @@ Check is a password reset token is valid and isn't expired. * **`token`** value must be a string. - -## `GET /api/session/properties` +### `GET /api/session/properties` Get all global properties and their values. These are the specific `Settings` which are meant to be public. - -## `POST /api/session/` +### `POST /api/session/` Login. @@ -2639,8 +2901,7 @@ Login. * **`request`** - -## `POST /api/session/forgot_password` +### `POST /api/session/forgot_password` Send a reset email when user has forgotten their password. @@ -2652,8 +2913,7 @@ Send a reset email when user has forgotten their password. * **`request`** - -## `POST /api/session/google_auth` +### `POST /api/session/google_auth` Login with Google Auth. @@ -2663,8 +2923,7 @@ Login with Google Auth. * **`request`** - -## `POST /api/session/reset_password` +### `POST /api/session/reset_password` Reset password with a reset token. @@ -2672,19 +2931,27 @@ Reset password with a reset token. * **`token`** value must be a non-blank string. -* **`password`** Password is insufficiently complex, or is too common +* **`password`** password is too common. * **`request`** -## `GET /api/setting/` +## Setting + +/api/setting endpoints. + + - [GET /api/setting/](#get-apisetting) + - [GET /api/setting/:key](#get-apisettingkey) + - [PUT /api/setting/](#put-apisetting) + - [PUT /api/setting/:key](#put-apisettingkey) + +### `GET /api/setting/` Get all `Settings` and their values. You must be a superuser to do this. You must be a superuser to do this. - -## `GET /api/setting/:key` +### `GET /api/setting/:key` Fetch a single `Setting`. You must be a superuser to do this. @@ -2694,8 +2961,7 @@ You must be a superuser to do this. * **`key`** value must be a non-blank string. - -## `PUT /api/setting/` +### `PUT /api/setting/` Update multiple `Settings` values. You must be a superuser to do this. @@ -2705,8 +2971,7 @@ You must be a superuser to do this. * **`settings`** - -## `PUT /api/setting/:key` +### `PUT /api/setting/:key` Create/update a `Setting`. You must be a superuser to do this. This endpoint can also be used to delete Settings by passing `nil` for `:value`. @@ -2720,14 +2985,19 @@ You must be a superuser to do this. * **`value`** -## `GET /api/setup/admin_checklist` +## Setup + + - [GET /api/setup/admin_checklist](#get-apisetupadmin_checklist) + - [POST /api/setup/](#post-apisetup) + - [POST /api/setup/validate](#post-apisetupvalidate) + +### `GET /api/setup/admin_checklist` Return various "admin checklist" steps and whether they've been completed. You must be a superuser to see this! You must be a superuser to do this. - -## `POST /api/setup/` +### `POST /api/setup/` Special endpoint for creating the first user during setup. This endpoint both creates the user AND logs them in and returns a session ID. @@ -2750,7 +3020,7 @@ Special endpoint for creating the first user during setup. This endpoint both cr * **`auto_run_queries`** value may be nil, or if non-nil, value must be a boolean. -* **`password`** Password is insufficiently complex, or is too common +* **`password`** password is too common. * **`name`** @@ -2768,8 +3038,7 @@ Special endpoint for creating the first user during setup. This endpoint both cr * **`last_name`** value must be a non-blank string. - -## `POST /api/setup/validate` +### `POST /api/setup/validate` Validate that we can connect to a database given a set of details. @@ -2782,7 +3051,13 @@ Validate that we can connect to a database given a set of details. * **`token`** Token does not match the setup token. -## `PUT /api/slack/settings` +## Slack + +/api/slack endpoints. + + - [PUT /api/slack/settings](#put-apislacksettings) + +### `PUT /api/slack/settings` Update Slack related settings. You must be a superuser to do this. @@ -2797,12 +3072,28 @@ You must be a superuser to do this. * **`slack-settings`** -## `GET /api/table/` +## Table -Get all `Tables`. +/api/table endpoints. + - [GET /api/table/](#get-apitable) + - [GET /api/table/:id](#get-apitableid) + - [GET /api/table/:id/fks](#get-apitableidfks) + - [GET /api/table/:id/query_metadata](#get-apitableidquery_metadata) + - [GET /api/table/:id/related](#get-apitableidrelated) + - [GET /api/table/card__:id/fks](#get-apitablecard__idfks) + - [GET /api/table/card__:id/query_metadata](#get-apitablecard__idquery_metadata) + - [POST /api/table/:id/discard_values](#post-apitableiddiscard_values) + - [POST /api/table/:id/rescan_values](#post-apitableidrescan_values) + - [PUT /api/table/](#put-apitable) + - [PUT /api/table/:id](#put-apitableid) + - [PUT /api/table/:id/fields/order](#put-apitableidfieldsorder) -## `GET /api/table/:id` +### `GET /api/table/` + +Get all `Tables`. + +### `GET /api/table/:id` Get `Table` with ID. @@ -2810,8 +3101,7 @@ Get `Table` with ID. * **`id`** - -## `GET /api/table/:id/fks` +### `GET /api/table/:id/fks` Get all foreign keys whose destination is a `Field` that belongs to this `Table`. @@ -2819,8 +3109,7 @@ Get all foreign keys whose destination is a `Field` that belongs to this `Table` * **`id`** - -## `GET /api/table/:id/query_metadata` +### `GET /api/table/:id/query_metadata` Get metadata about a `Table` useful for running queries. Returns DB, fields, field FKs, and field values. @@ -2838,8 +3127,7 @@ Get metadata about a `Table` useful for running queries. * **`include_hidden_fields`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). - -## `GET /api/table/:id/related` +### `GET /api/table/:id/related` Return related entities. @@ -2847,14 +3135,12 @@ Return related entities. * **`id`** - -## `GET /api/table/card__:id/fks` +### `GET /api/table/card__:id/fks` Return FK info for the 'virtual' table for a Card. This is always empty, so this endpoint serves mainly as a placeholder to avoid having to change anything on the frontend. - -## `GET /api/table/card__:id/query_metadata` +### `GET /api/table/card__:id/query_metadata` Return metadata for the 'virtual' table for a Card. @@ -2862,8 +3148,7 @@ Return metadata for the 'virtual' table for a Card. * **`id`** - -## `POST /api/table/:id/discard_values` +### `POST /api/table/:id/discard_values` Discard the FieldValues belonging to the Fields in this Table. Only applies to fields that have FieldValues. If this Table's Database is set up to automatically sync FieldValues, they will be recreated during the next cycle. @@ -2874,8 +3159,7 @@ You must be a superuser to do this. * **`id`** - -## `POST /api/table/:id/rescan_values` +### `POST /api/table/:id/rescan_values` Manually trigger an update for the FieldValues for the Fields belonging to this Table. Only applies to Fields that are eligible for FieldValues. @@ -2886,8 +3170,7 @@ You must be a superuser to do this. * **`id`** - -## `PUT /api/table/` +### `PUT /api/table/` Update all `Table` in `ids`. @@ -2909,8 +3192,7 @@ Update all `Table` in `ids`. * **`show_in_getting_started`** value may be nil, or if non-nil, value must be a boolean. - -## `PUT /api/table/:id` +### `PUT /api/table/:id` Update `Table` with ID. @@ -2934,10 +3216,9 @@ Update `Table` with ID. * **`id`** +### `PUT /api/table/:id/fields/order` -## `PUT /api/table/:id/fields/order` - -Reorder fields +Reorder fields. You must be a superuser to do this. @@ -2948,20 +3229,21 @@ You must be a superuser to do this. * **`field_order`** value must be an array. Each value must be an integer greater than zero. -## `GET /api/task/` +## Task -Fetch a list of recent tasks stored as Task History +/api/task endpoints. -You must be a superuser to do this. - -##### PARAMS: + - [GET /api/task/](#get-apitask) + - [GET /api/task/:id](#get-apitaskid) + - [GET /api/task/info](#get-apitaskinfo) -* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. +### `GET /api/task/` -* **`offset`** value may be nil, or if non-nil, value must be a valid integer greater than or equal to zero. +Fetch a list of recent tasks stored as Task History. +You must be a superuser to do this. -## `GET /api/task/:id` +### `GET /api/task/:id` Get `TaskHistory` entry with ID. @@ -2969,15 +3251,20 @@ Get `TaskHistory` entry with ID. * **`id`** - -## `GET /api/task/info` +### `GET /api/task/info` Return raw data about all scheduled tasks (i.e., Quartz Jobs and Triggers). You must be a superuser to do this. -## `GET /api/tiles/:zoom/:x/:y/:lat-field-id/:lon-field-id/:lat-col-idx/:lon-col-idx/` +## Tiles + +`/api/tiles` endpoints. + + - [GET /api/tiles/:zoom/:x/:y/:lat-field-id/:lon-field-id/:lat-col-idx/:lon-col-idx/](#get-apitileszoomxylat-field-idlon-field-idlat-col-idxlon-col-idx) + +### `GET /api/tiles/:zoom/:x/:y/:lat-field-id/:lon-field-id/:lat-col-idx/:lon-col-idx/` This endpoints provides an image with the appropriate pins rendered given a MBQL `query` (passed as a GET query string param). We evaluate the query and find the set of lat/lon pairs which are relevant and then render the @@ -3003,9 +3290,13 @@ This endpoints provides an image with the appropriate pins rendered given a MBQL * **`query`** value must be a valid JSON string. -## `GET /api/transform/:db-id/:schema/:transform-name` +## Transform -Look up a database schema transform + - [GET /api/transform/:db-id/:schema/:transform-name](#get-apitransformdb-idschematransform-name) + +### `GET /api/transform/:db-id/:schema/:transform-name` + +Look up a database schema transform. ##### PARAMS: @@ -3016,7 +3307,22 @@ Look up a database schema transform * **`transform-name`** -## `DELETE /api/user/:id` +## User + +/api/user endpoints. + + - [DELETE /api/user/:id](#delete-apiuserid) + - [GET /api/user/](#get-apiuser) + - [GET /api/user/:id](#get-apiuserid) + - [GET /api/user/current](#get-apiusercurrent) + - [POST /api/user/](#post-apiuser) + - [POST /api/user/:id/send_invite](#post-apiuseridsend_invite) + - [PUT /api/user/:id](#put-apiuserid) + - [PUT /api/user/:id/password](#put-apiuseridpassword) + - [PUT /api/user/:id/qbnewb](#put-apiuseridqbnewb) + - [PUT /api/user/:id/reactivate](#put-apiuseridreactivate) + +### `DELETE /api/user/:id` Disable a `User`. This does not remove the `User` from the DB, but instead disables their account. @@ -3026,19 +3332,32 @@ You must be a superuser to do this. * **`id`** +### `GET /api/user/` -## `GET /api/user/` +Fetch a list of `Users`. By default returns every active user but only active users. -Fetch a list of `Users` for the admin People page or for Pulses. By default returns only active users. If - `include_deactivated` is true, return all Users (active and inactive). (Using `include_deactivated` requires - superuser permissions.). For users with segmented permissions, return only themselves. + If `status` is `deactivated`, include deactivated users only. + If `status` is `all`, include all users (active and inactive). + Also supports `include_deactivated`, which if true, is equivalent to `status=all`. + `status` and `included_deactivated` requires superuser permissions. + + For users with segmented permissions, return only themselves. + + Takes `limit`, `offset` for pagination. + Takes `query` for filtering on first name, last name, email. + Also takes `group_id`, which filters on group id. ##### PARAMS: -* **`include_deactivated`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). +* **`status`** value may be nil, or if non-nil, value must be a string. +* **`query`** value may be nil, or if non-nil, value must be a string. + +* **`group_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`include_deactivated`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). -## `GET /api/user/:id` +### `GET /api/user/:id` Fetch a `User`. You must be fetching yourself *or* be a superuser. @@ -3046,15 +3365,13 @@ Fetch a `User`. You must be fetching yourself *or* be a superuser. * **`id`** - -## `GET /api/user/current` +### `GET /api/user/current` Fetch the current `User`. +### `POST /api/user/` -## `POST /api/user/` - -Create a new `User`, return a 400 if the email address is already taken +Create a new `User`, return a 400 if the email address is already taken. You must be a superuser to do this. @@ -3072,8 +3389,7 @@ You must be a superuser to do this. * **`login_attributes`** value may be nil, or if non-nil, login attribute keys must be a keyword or string - -## `POST /api/user/:id/send_invite` +### `POST /api/user/:id/send_invite` Resend the user invite email for a given user. @@ -3083,8 +3399,7 @@ You must be a superuser to do this. * **`id`** - -## `PUT /api/user/:id` +### `PUT /api/user/:id` Update an existing, active `User`. @@ -3106,8 +3421,7 @@ Update an existing, active `User`. * **`locale`** value may be nil, or if non-nil, String must be a valid two-letter ISO language or language-country code e.g. en or en_US. - -## `PUT /api/user/:id/password` +### `PUT /api/user/:id/password` Update a user's password. @@ -3115,12 +3429,11 @@ Update a user's password. * **`id`** -* **`password`** Password is insufficiently complex, or is too common +* **`password`** password is too common. * **`old_password`** - -## `PUT /api/user/:id/qbnewb` +### `PUT /api/user/:id/qbnewb` Indicate that a user has been informed about the vast intricacies of 'the' Query Builder. @@ -3128,10 +3441,9 @@ Indicate that a user has been informed about the vast intricacies of 'the' Query * **`id`** +### `PUT /api/user/:id/reactivate` -## `PUT /api/user/:id/reactivate` - -Reactivate user at `:id` +Reactivate user at `:id`. You must be a superuser to do this. @@ -3140,38 +3452,52 @@ You must be a superuser to do this. * **`id`** -## `GET /api/util/bug_report_details` +## Util + +Random utilty endpoints for things that don't belong anywhere else in particular, e.g. endpoints for certain admin + page tasks. + + - [GET /api/util/bug_report_details](#get-apiutilbug_report_details) + - [GET /api/util/diagnostic_info/connection_pool_info](#get-apiutildiagnostic_infoconnection_pool_info) + - [GET /api/util/logs](#get-apiutillogs) + - [GET /api/util/random_token](#get-apiutilrandom_token) + - [GET /api/util/stats](#get-apiutilstats) + - [POST /api/util/password_check](#post-apiutilpassword_check) + +### `GET /api/util/bug_report_details` Returns version and system information relevant to filing a bug report against Metabase. You must be a superuser to do this. +### `GET /api/util/diagnostic_info/connection_pool_info` -## `GET /api/util/logs` +Returns database connection pool info for the current Metabase instance. + +You must be a superuser to do this. + +### `GET /api/util/logs` Logs. You must be a superuser to do this. - -## `GET /api/util/random_token` +### `GET /api/util/random_token` Return a cryptographically secure random 32-byte token, encoded as a hexadecimal string. Intended for use when creating a value for `embedding-secret-key`. - -## `GET /api/util/stats` +### `GET /api/util/stats` Anonymous usage stats. Endpoint for testing, and eventually exposing this to instance admins to let them see what is being phoned home. You must be a superuser to do this. - -## `POST /api/util/password_check` +### `POST /api/util/password_check` Endpoint that checks if the supplied password meets the currently configured password complexity rules. ##### PARAMS: -* **`password`** Password is insufficiently complex, or is too common +* **`password`** password is too common. \ No newline at end of file diff --git a/docs/developers-guide-vscode.md b/docs/developers-guide-vscode.md deleted file mode 100644 index 032a374f816b..000000000000 --- a/docs/developers-guide-vscode.md +++ /dev/null @@ -1,36 +0,0 @@ -# Developing Metabase with Visual Studio Code - -These instructions allow you to work on Metabase codebase on Windows, Linux, or macOS using [Visual Studio Code](https://code.visualstudio.com/), **without** manually installing the necessary dependencies. This is possible by leveraging Docker container and the Remote Containers extension from VS Code. - -For more details, please follow the complete VS Code guide on [Developing inside a Container](https://code.visualstudio.com/docs/remote/containers). The summary is as follows. - -Requirements: - -* [Visual Studio Code](https://code.visualstudio.com/) (obviously) -* [Docker](https://www.docker.com/) -* [Remote - Containers extension](vscode:extension/ms-vscode-remote.remote-containers) for VS Code - -_Important_: Ensure that Docker is running properly and it can be used to download an image and launch a container, e.g. by running: - -``` -$ docker run hello-world -``` -If everything goes well, you should see the following message: - -``` -Hello from Docker! -This message shows that your installation appears to be working correctly. -``` - -Steps: - -1. Clone Metabase repository - -2. Launch VS Code and open your cloned Metabase repository - -3. From the _View_ menu, choose _Command Palette..._ and then find _Remote-Container: Reopen in Container_. (VS Code may also prompt you to do this with an "Open in container" popup). -**Note**: VS Code will create the container for the first time and it may take some time. Subsequent loads should be much faster. - -4. Use the menu _View_, _Command Palette_, search for and choose _Tasks: Run Build Task_ (alternatively, use the shortcut `Ctrl+Shift+B`). - -5. After a while (after all JavaScript and Clojure dependencies are completely downloaded), open localhost:3000 with your web browser. diff --git a/docs/developers-guide.md b/docs/developers-guide.md index 4380e1648663..1fda11bb93e6 100644 --- a/docs/developers-guide.md +++ b/docs/developers-guide.md @@ -1,253 +1,5 @@ -**This guide will teach you:** + -- [How to compile your own copy of Metabase](#build-metabase) -- [How to set up a development environment](#development-environment) -- [How to run the Metabase Server](#development-server-quick-start) -- [How to contribute back to the Metabase project](#contributing) -- [How to add support in Metabase for other languages](#internationalization) - -# Contributing - -In general, we like to have an open issue for every pull request as a place to discuss the nature of any bug or proposed improvement. Each pull request should address a single issue, and contain both the fix as well as a description of the pull request and tests that validate that the PR fixes the issue in question. - -For significant feature additions, it is expected that discussion will have taken place in the attached issue. Any feature that requires a major decision to be reached will need to have an explicit design document written. The goals of this document are to make explicit the assumptions, constraints and tradeoffs any given feature implementation will contain. The point is not to generate documentation but to allow discussion to reference a specific proposed design and to allow others to consider the implications of a given design. - -We don't like getting sued, so before merging any pull request, we'll need each person contributing code to sign a Contributor License Agreement [here](https://docs.google.com/a/metabase.com/forms/d/1oV38o7b9ONFSwuzwmERRMi9SYrhYeOrkbmNaq9pOJ_E/viewform). - -# Development on Windows - -The development scripts are designed for Linux/Mac environment, so we recommend using the latest Windows 10 version with [WSL (Windows Subsystem for Linux)](https://msdn.microsoft.com/en-us/commandline/wsl/about) and [Ubuntu on Windows](https://www.microsoft.com/store/p/ubuntu/9nblggh4msv6). The Ubuntu Bash shell works well for both backend and frontend development. - -If you have problems with your development environment, make sure that you are not using any development commands outside the Bash shell. As an example, Node dependencies installed in normal Windows environment will not work inside Ubuntu Bash environment. - -# Install Prerequisites - -These are the tools which are required in order to complete any build of the Metabase code. Follow the links to download and install them on your own before continuing. - -1. [Clojure (https://clojure.org)](https://clojure.org/guides/getting_started) - install the latest release by following the guide depending on your OS -2. [Java Development Kit JDK (https://adoptopenjdk.net/releases.html)](https://adoptopenjdk.net/releases.html) - you need to install JDK 11 ([more info on Java versions](./operations-guide/java-versions.md)) -3. [Node.js (http://nodejs.org/)](http://nodejs.org/) - latest LTS release -4. [Yarn package manager for Node.js](https://yarnpkg.com/) - latest release of version 1.x - you can install it in any OS by doing `npm install --global yarn` -5. [Leiningen (http://leiningen.org/)](http://leiningen.org/) - latest release - -On a most recent stable Ubuntu/Debian, all the tools above, with the exception of Clojure, can be installed by using: - -``` -sudo apt install openjdk-11-jdk nodejs leiningen && sudo npm install --global yarn -``` -If you have multiple JDK versions installed in your machine, be sure to switch your JDK before building by doing `sudo update-alternatives --config java` and selecting Java 11 in the menu - -If you are developing on Windows, make sure to use Ubuntu on Windows and follow instructions for Ubuntu/Linux instead of installing ordinary Windows versions. - -Alternatively, without the need to explicitly install the above dependencies, follow the guide [on using Visual Studio Code](developers-guide-vscode.md) and its remote container support. - -# Build Metabase - -The entire Metabase application is compiled and assembled into a single .jar file which can run on any modern JVM. There is a script which will execute all steps in the process and output the final artifact for you. You can pass the environment variable MB_EDITION before running the build script to choose the version that you want to build. If you don't provide a value, the default is `oss` which will build the Community Edition. - - ./bin/build - -After running the build script simply look in `target/uberjar` for the output .jar file and you are ready to go. - -## Building `Metabase.app` - -See [this guide](developers-guide-osx.md). - -# Development Environment - -If you plan to work on the Metabase code and make changes then you'll need to understand a few more things. - -### Overview - -The Metabase application has two basic components: - -1. a backend written in Clojure which contains a REST API as well as all the relevant code for talking to databases and processing queries. -2. a frontend written as a Javascript single-page application which provides the web UI. - -Both components are built and assembled together into a single jar file which runs the entire application. - -### 3rd party dependencies - -Metabase depends on lots of other 3rd party libraries to run, so as you are developing you'll need to keep those up to date. Leiningen will automatically fetch Clojure dependencies when needed, but for JavaScript dependencies you'll need to kick off the installation process manually when needed. - -```sh -# javascript dependencies -$ yarn -``` - -### Development server (quick start) - -Run your backend development server with - - lein run - -Start the frontend build process with - - yarn build-hot - -## Frontend development - -We use these technologies for our FE build process to allow us to use modules, es6 syntax, and css variables. - -- webpack -- babel -- cssnext - -Frontend tasks are executed using `yarn`. All available tasks can be found in `package.json` under _scripts_. - -To build the frontend client without watching for changes, you can use: - -```sh -$ yarn build -``` - -If you're working on the frontend directly, you'll most likely want to reload changes on save, and in the case of React components, do so while maintaining state. To start a build with hot reloading, use: - -```sh -$ yarn build-hot -``` - -Note that at this time if you change CSS variables, those changes will only be picked up when a build is restarted. - -There is also an option to reload changes on save without hot reloading if you prefer that. - -```sh -$ yarn build-watch -``` - -Some systems may have trouble detecting changes to frontend files. You can enable filesystem polling by uncommenting the `watchOptions` clause in `webpack.config.js`. If you do this it may be worth making git ignore changes to webpack config, using `git update-index --assume-unchanged webpack.config.js` - -### Frontend testing - -All frontend tests are located in `frontend/test` directory. Run all frontend tests with - -``` -yarn test -``` - -which will run unit and Cypress end-to-end tests in sequence. - -### Cypress end-to-end tests - -End-to-end tests simulate realistic sequences of user interactions. Read more about how we approach end-to-end testing with Cypress in our [wiki page](https://github.com/metabase/metabase/wiki/E2E-Tests-with-Cypress). - -Cypress end-to-end tests use an enforced file naming convention `.cy.spec.js` to separate them from unit tests. - -### Jest unit tests - -Unit tests are focused around isolated parts of business logic. - -Unit tests use an enforced file naming convention `.unit.spec.js` to separate them from end-to-end tests. - -``` -yarn test-unit # Run all tests at once -yarn test-unit-watch # Watch for file changes -``` - -## Backend development - -Leiningen and your REPL are the main development tools for the backend. There are some directions below on how to setup your REPL for easier development. - -And of course your Jetty development server is available via - - lein run - -### Building drivers - -Most of the drivers Metabase uses to connect to external data warehouse databases are separate Leiningen projects under the `modules/` subdirectory. When running Metabase via `lein`, you'll -need to build these drivers in order to have access to them. You can build drivers as follows: - -``` -# Build the 'mongo' driver -./bin/build-driver.sh mongo -``` - -(or) - -``` -# Build all drivers -./bin/build-drivers.sh -``` - -The first time you build a driver, it will be a bit slow, because Metabase needs to build the core project a couple of times so the driver can use it as a dependency; you can take comfort in the -fact that you won't need to build the driver again after that. Alternatively, running Metabase 1.0+ from the uberjar will unpack all of the pre-built drivers into your plugins directory; you can -do this instead if you already have a Metabase uberjar (just make sure `plugins` is in the root directory of the Metabase source, i.e. the same directory as `project.clj`). - -### Including driver source paths for development or other Leiningen tasks - -For development when running various Leiningen tasks you can add the `include-all-drivers` profile to merge the drivers' dependencies and source paths into the Metabase -project: - -``` -# Install dependencies -lein with-profiles +include-all-drivers deps -``` - -This profile is added by default when running `lein repl`, tests, and linters. - -#### Unit Tests / Linting - -Run unit tests with - - lein test - -or a specific test with - - lein test metabase.api.session-test - -By default, the tests only run against the `h2` driver. You can specify which drivers to run tests against with the env var `DRIVERS`: - - DRIVERS=h2,postgres,mysql,mongo lein test - -Some drivers require additional environment variables when testing since they are impossible to run locally (such as Redshift and Bigquery). The tests will fail on launch and let you know what parameters to supply if needed. - -##### Run the linters: - - lein eastwood && lein bikeshed && lein docstring-checker && lein check-namespace-decls && ./bin/reflection-linter - -#### Developing with Emacs - -`.dir-locals.el` contains some Emacs Lisp that tells `clojure-mode` how to indent Metabase macros and which arguments are docstrings. Whenever this file is updated, -Emacs will ask you if the code is safe to load. You can answer `!` to save it as safe. - -By default, Emacs will insert this code as a customization at the bottom of your `init.el`. -You'll probably want to tell Emacs to store customizations in a different file. Add the following to your `init.el`: - -```emacs-lisp -(setq custom-file (concat user-emacs-directory ".custom.el")) ; tell Customize to save customizations to ~/.emacs.d/.custom.el -(ignore-errors ; load customizations from ~/.emacs.d/.custom.el - (load-file custom-file)) -``` - -## Documentation - -## Internationalization - -We are an application with lots of users all over the world. To help them use Metabase in their own language, we mark all of our strings as i18n. - -### Adding new strings: - -If you need to add new strings (try to be judicious about adding copy) do the following: - -1. Tag strings in the frontend using `t` and `jt` ES6 template literals (see more details in https://ttag.js.org/): - -```javascript -const someString = t`Hello ${name}!`; -const someJSX =
{jt`Hello ${name}`}
; -``` - -and in the backend using `trs` (to use the site language) or `tru` (to use the current User's language): - -```clojure -(trs "Hello {0}!" name) -``` - -### Translation errors or missing strings - -If you see incorrect or missing strings for your language, please visit our [POEditor project](https://poeditor.com/join/project/ynjQmwSsGh) and submit your fixes there. - -## License - -Copyright © 2021 Metabase, Inc. - -Distributed under the terms of the GNU Affero General Public License (AGPL) except as otherwise noted. See individual files for details. +Please refer to the detailed [Developer's Guide](developers-guide/start.md). \ No newline at end of file diff --git a/docs/developers-guide-osx.md b/docs/developers-guide/build.md similarity index 73% rename from docs/developers-guide-osx.md rename to docs/developers-guide/build.md index d53c3255bf2c..d5e6b58e458c 100644 --- a/docs/developers-guide-osx.md +++ b/docs/developers-guide/build.md @@ -1,15 +1,45 @@ -# Metabase OS X App +# Build Metabase -NOTE: These instructions are only for packaging a built Metabase uberjar into `Metabase.app`. They are not useful if your goal is to work on Metabase itself; for development, please see -our [developers' guide](developers-guide.md). +## Install Prerequisites + +These are the tools which are required in order to complete any build of the Metabase code. Follow the links to download and install them on your own before continuing. + +1. [Clojure (https://clojure.org)](https://clojure.org/guides/getting_started) - install the latest release by following the guide depending on your OS +2. [Java Development Kit JDK (https://adoptopenjdk.net/releases.html)](https://adoptopenjdk.net/releases.html) - you need to install JDK 11 ([more info on Java versions](../operations-guide/java-versions.md)) +3. [Node.js (http://nodejs.org/)](http://nodejs.org/) - latest LTS release +4. [Yarn package manager for Node.js](https://yarnpkg.com/) - latest release of version 1.x - you can install it in any OS by doing `npm install --global yarn` + +On a most recent stable Ubuntu/Debian, all the tools above, with the exception of Clojure, can be installed by using: + +``` +sudo apt install openjdk-11-jdk nodejs && sudo npm install --global yarn +``` +If you have multiple JDK versions installed in your machine, be sure to switch your JDK before building by doing `sudo update-alternatives --config java` and selecting Java 11 in the menu + +If you are developing on Windows, make sure to use Ubuntu on Windows and follow instructions for Ubuntu/Linux instead of installing ordinary Windows versions. + +Alternatively, without the need to explicitly install the above dependencies, follow the guide [on using Visual Studio Code](deven.md#developing-with-visual-studio-code.md) and its remote container support. + +## Build Metabase Uberjar + +The entire Metabase application is compiled and assembled into a single .jar file which can run on any modern JVM. There is a script which will execute all steps in the process and output the final artifact for you. You can pass the environment variable MB_EDITION before running the build script to choose the version that you want to build. If you don't provide a value, the default is `oss` which will build the Community Edition. + + ./bin/build + +After running the build script simply look in `target/uberjar` for the output .jar file and you are ready to go. + +## Building macOS App (`Metabase.app`) + +NOTE: These instructions are only for packaging a built Metabase uberjar into `Metabase.app`. They are not useful if your goal is to work on Metabase itself; for development, please see above. + +### First-Time Configuration -## First-Time Configuration
Steps -### Building +#### Building The following steps need to be done before building the Mac App: @@ -60,7 +90,7 @@ The following steps need to be done before building the Mac App: At this point, you should try opening up the Xcode project and building the Mac App in Xcode by clicking the run button. The app should build and launch at this point. If it doesn't, ask Cam for help! -### Releasing +#### Releasing The following steps are prereqs for *releasing* the Mac App: @@ -132,7 +162,7 @@ clojure.org](https://www.clojure.org/guides/getting_started) for more details.
-## Building & Releasing the Mac App +### Building & Releasing the Mac App After following the configuration steps above, to build and release the app you can use the build script: diff --git a/docs/developers-guide/contributing.md b/docs/developers-guide/contributing.md new file mode 100644 index 000000000000..3a873d4b51fa --- /dev/null +++ b/docs/developers-guide/contributing.md @@ -0,0 +1,7 @@ +# Contributing + +In general, we like to have an open issue for every pull request as a place to discuss the nature of any bug or proposed improvement. Each pull request should address a single issue, and contain both the fix as well as a description of the pull request and tests that validate that the PR fixes the issue in question. + +For significant feature additions, it is expected that discussion will have taken place in the attached issue. Any feature that requires a major decision to be reached will need to have an explicit design document written. The goals of this document are to make explicit the assumptions, constraints and tradeoffs any given feature implementation will contain. The point is not to generate documentation but to allow discussion to reference a specific proposed design and to allow others to consider the implications of a given design. + +We don't like getting sued, so before merging any pull request, we'll need each person contributing code to sign a Contributor License Agreement [here](https://docs.google.com/a/metabase.com/forms/d/1oV38o7b9ONFSwuzwmERRMi9SYrhYeOrkbmNaq9pOJ_E/viewform). \ No newline at end of file diff --git a/docs/developers-guide/devenv.md b/docs/developers-guide/devenv.md new file mode 100644 index 000000000000..ba742b48c648 --- /dev/null +++ b/docs/developers-guide/devenv.md @@ -0,0 +1,252 @@ +# Development Environment + +If you plan to work on the Metabase code and make changes then you'll need to understand a few more things. + +## Overview + +The Metabase application has two basic components: + +1. a backend written in Clojure which contains a REST API as well as all the relevant code for talking to databases and processing queries. +2. a frontend written as a Javascript single-page application which provides the web UI. + +Both components are built and assembled together into a single jar file which runs the entire application. + +## 3rd party dependencies + +Metabase depends on lots of third-party libraries to run, so you'll need to keep those up to date. The Clojure CLI will automatically fetch the dependencies when needed. With JavaScript dependencies, however, you'll need to kick off the installation process manually. + +```sh +# javascript dependencies +$ yarn +``` + +## Development server (quick start) + +Run your backend development server with + + clojure -M:run + +Start the frontend build process with + + yarn build-hot + +## Frontend development + +We use these technologies for our FE build process to allow us to use modules, es6 syntax, and css variables. + +- webpack +- babel +- cssnext + +Frontend tasks are executed using `yarn`. All available tasks can be found in `package.json` under _scripts_. + +To build the frontend client without watching for changes, you can use: + +```sh +$ yarn build +``` + +If you're working on the frontend directly, you'll most likely want to reload changes on save, and in the case of React components, do so while maintaining state. To start a build with hot reloading, use: + +```sh +$ yarn build-hot +``` + +Note that at this time if you change CSS variables, those changes will only be picked up when a build is restarted. + +There is also an option to reload changes on save without hot reloading if you prefer that. + +```sh +$ yarn build-watch +``` + +Some systems may have trouble detecting changes to frontend files. You can enable filesystem polling by uncommenting the `watchOptions` clause in `webpack.config.js`. If you do this it may be worth making git ignore changes to webpack config, using `git update-index --assume-unchanged webpack.config.js` + +## Frontend testing + +All frontend tests are located in `frontend/test` directory. Run all frontend tests with + +``` +yarn test +``` + +which will run unit and Cypress end-to-end tests in sequence. + +## Frontend debugging + +By default, we use a simple source mapping option that is optimized for speed. + +If you run into issues with breakpoints, especially inside jsx, please set env variable `BETTER_SOURCE_MAPS` to true before you run the server. + +Example: + +``` +BETTER_SOURCE_MAPS=true yarn dev +``` + +### Cypress end-to-end tests + +End-to-end tests simulate realistic sequences of user interactions. Read more about how we approach [end-to-end testing with Cypress](./e2e-tests.md). + +Cypress end-to-end tests use an enforced file naming convention `.cy.spec.js` to separate them from unit tests. + +### Jest unit tests + +Unit tests are focused around isolated parts of business logic. + +Unit tests use an enforced file naming convention `.unit.spec.js` to separate them from end-to-end tests. + +``` +yarn test-unit # Run all tests at once +yarn test-unit-watch # Watch for file changes +``` + +## Backend development + +Clojure REPL is the main development tool for the backend. There are some directions below on how to setup your REPL for easier development. + +And of course your Jetty development server is available via + + clojure -M:run + +### Building drivers + +Most of the drivers Metabase uses to connect to external data warehouse databases are separate projects under the +`modules/` subdirectory. When running Metabase via `clojure`, you'll need to build these drivers in order to have access +to them. You can build drivers as follows: + +``` +# Build the 'mongo' driver +./bin/build-driver.sh mongo +``` + +(or) + +``` +# Build all drivers +./bin/build-drivers.sh +``` + +### Including driver source paths for development or other tasks + +For development when running various Clojure tasks you can add the `drivers` and `drivers-dev` aliases to merge the +drivers' dependencies and source paths into the Metabase project: + +``` +# Install dependencies, including for drivers +clojure -P -X:dev:ci:drivers:drivers-dev +``` + +#### Unit Tests / Linting + +Run unit tests with + + # OSS tests only + clojure -X:dev:test + + # OSS + EE tests + clojure -X:dev:ee:ee-dev:test + +or a specific test (or test namespace) with + + # run tests in only one namespace (pass in a symbol) + clojure -X:dev:test :only metabase.api.session-test + + # run one specific test (pass in a qualified symbol) + clojure -X:dev:test :only metabase.api.session-test/my-test + + # run tests in one specific folder (test/metabase/util in this example) + # pass arg in double-quotes so Clojure CLI interprets it as a string; + # our test runner treats strings as directories + clojure -X:dev:test :only '"test/metabase/util"' + +By default, the tests only run against the `h2` driver. You can specify which drivers to run tests against with the env var `DRIVERS`: + + DRIVERS=h2,postgres,mysql,mongo clojure -X:dev:drivers:drivers-dev:test + +Some drivers require additional environment variables when testing since they are impossible to run locally (such as Redshift and Bigquery). The tests will fail on launch and let you know what parameters to supply if needed. + +##### Run the linters: + +`clj-kondo` must be installed separately; see https://github.com/clj-kondo/clj-kondo/blob/master/doc/install.md for +instructions. + + # Run Eastwood + clojure -X:dev:ee:ee-dev:drivers:drivers-dev:eastwood + + # Run the namespace checker + clojure -X:dev:ee:ee-dev:drivers:drivers-dev:namespace-checker + + # Run clj-kondo + clj-kondo --parallel --lint src shared/src enterprise/backend/src --config lint-config.edn + +### Developing with Emacs + +`.dir-locals.el` contains some Emacs Lisp that tells `clojure-mode` how to indent Metabase macros and which arguments are docstrings. Whenever this file is updated, +Emacs will ask you if the code is safe to load. You can answer `!` to save it as safe. + +By default, Emacs will insert this code as a customization at the bottom of your `init.el`. +You'll probably want to tell Emacs to store customizations in a different file. Add the following to your `init.el`: + +```emacs-lisp +(setq custom-file (concat user-emacs-directory ".custom.el")) ; tell Customize to save customizations to ~/.emacs.d/.custom.el +(ignore-errors ; load customizations from ~/.emacs.d/.custom.el + (load-file custom-file)) +``` + +## Developing with Visual Studio Code + +### Debugging + +First, install the following extension: +* [Debugger for Firefox](https://marketplace.visualstudio.com/items?itemName=firefox-devtools.vscode-firefox-debug) + +_Note_: Debugger for Chrome has been deprecated. You can safely delete it as Visual Studio Code now has [a bundled JavaScript Debugger](https://github.com/microsoft/vscode-js-debug) that covers the same functionality. + +Before starting the debugging session, make sure that Metabase is built and running. Choose menu _View_, _Command Palette_, search for and choose _Tasks: Run Build Task_. Alternatively, use the corresponding shortcut `Ctrl+Shift+B`. The built-in terminal will appear to show the progress, wait a few moment until webpack indicates a complete (100%) bundling. + +To begin debugging Metabase, switch to the Debug view (shortcut: `Ctrl+Shift+D`) and then select one of the two launch configurations from the drop-down at the top: + +* Debug with Firefox, or +* Debug with Chrome + +After that, begin the debugging session by choosing menu _Run_, _Start Debugging_ (shortcut: `F5`). + +For more details, please refer to the complete VS Code documentation on [Debugging](https://code.visualstudio.com/docs/editor/debugging). + +### Docker-based Workflow + +These instructions allow you to work on Metabase codebase on Windows, Linux, or macOS using [Visual Studio Code](https://code.visualstudio.com/), **without** manually installing the necessary dependencies. This is possible by leveraging Docker container and the Remote Containers extension from VS Code. + +For more details, please follow the complete VS Code guide on [Developing inside a Container](https://code.visualstudio.com/docs/remote/containers). The summary is as follows. + +Requirements: + +* [Visual Studio Code](https://code.visualstudio.com/) (obviously) +* [Docker](https://www.docker.com/) +* [Remote - Containers extension](vscode:extension/ms-vscode-remote.remote-containers) for VS Code + +_Important_: Ensure that Docker is running properly and it can be used to download an image and launch a container, e.g. by running: + +``` +$ docker run hello-world +``` +If everything goes well, you should see the following message: + +``` +Hello from Docker! +This message shows that your installation appears to be working correctly. +``` + +Steps: + +1. Clone Metabase repository + +2. Launch VS Code and open your cloned Metabase repository + +3. From the _View_ menu, choose _Command Palette..._ and then find _Remote-Container: Reopen in Container_. (VS Code may also prompt you to do this with an "Open in container" popup). +**Note**: VS Code will create the container for the first time and it may take some time. Subsequent loads should be much faster. + +4. Use the menu _View_, _Command Palette_, search for and choose _Tasks: Run Build Task_ (alternatively, use the shortcut `Ctrl+Shift+B`). + +5. After a while (after all JavaScript and Clojure dependencies are completely downloaded), open localhost:3000 with your web browser. diff --git a/docs/developers-guide/e2e-tests.md b/docs/developers-guide/e2e-tests.md new file mode 100644 index 000000000000..20965b135c59 --- /dev/null +++ b/docs/developers-guide/e2e-tests.md @@ -0,0 +1,85 @@ +# End-to-end Tests with Cypress + +Metabase uses Cypress for “end-to-end testing”, that is, tests that are executed against the application as a whole, including the frontend, backend, and application database. These tests are essentially scripts written in JavaScript that run in the web browser: visit different URLs, click various UI elements, type text, and assert that things happen as expected (for example, an element appearing on screen, or a network request occuring). + +## Getting Started + +Metabase’s Cypress tests are located in the `frontend/test/metabase/scenarios` source tree, in a structure that roughly mirrors Metabase’s URL structure. For example, tests for the admin “datamodel” pages are located in `frontend/test/metabase/scenarios/admin/datamodel`. + +During development you will want to run `yarn build-hot` to continuously build the frontend, and `yarn test-cypress-open` to open the Cypress application where you can execute the tests you are working on. + +To run all Cypress tests programmatically in the terminal: +``` +yarn run test-cypress-no-build +``` + +You can run a specific set of scenarios by using the `--folder` flag, which will pick up the chosen scenarios under `frontend/test/metabase/scenarios/`. + +``` +yarn run test-cypress-no-build --folder sharing +``` + +You can quickly test a single file only by using the `--spec` flag. + +``` +yarn test-cypress-no-build --spec frontend/test/metabase/scenarios/question/new.cy.spec.js +``` + +Cypress test files are structured like Mocha tests, where `describe` blocks are used to group related tests, and `it` blocks are the tests themselves. + +```js +describe("homepage",() => { + it('should load the homepage and...', () => { + cy.visit("/metabase/url"); + // ... + }) +}) +``` + +We strongly prefer using selectors like `cy.findByText()` and `cy.findByLabelText()` from [`@testing-library/cypress`](https://github.com/testing-library/cypress-testing-library) since they encourage writing tests that don't depend on implementation details like CSS class names. + +Try to avoid repeatedly testing pieces of the application incidentally. For example, if you want to test something about the query builder, jump straight there using a URL like `cy.visit("/question/new?database=1&table=2");` rather than starting from the home page, clicking "Ask a question", etc. + +## Cypress Documentation + +* Introduction: https://docs.cypress.io/guides/core-concepts/introduction-to-cypress.html#Querying-by-Text-Content +* Commands: https://docs.cypress.io/api/api/table-of-contents.html +* Assertions: https://docs.cypress.io/guides/references/assertions.html + +## Tips/Gotchas + +### `contains` vs `find` vs `get` +(TODO: talk about `@testing-library/cypress`). Cypress has a set of similar commands for selecting elements. Here are some tips for using them: +* `contains` is case-sensitive to the text *in the DOM*. If it’s not matching text you’d expect, check that CSS hasn’t updated the case. +* `contains` matches substrings, so if you see “filter by” and “Add a filter”, `contains(“filter”)` will match both. To avoid these issues, you can either pass a regexp that pins the start/end of the string or pass a selector in addition to the string: `.contains(selector, content)`. +* `find` will let you search within your previous selection. `get` will search the entire page even if chained. + +### Increase viewport size to avoid scrolling +Sometimes Metabase views are a bit large for Cypress’s default 1000x660 viewport. This can require you to scroll for tests to work. To avoid that, you can increase the viewport size for a specific test by calling `cy.viewport(width, height)`. + +### Code reloading vs test reloading +When you edit a Cypress test file, the tests will refresh and run again. However, when you edit a code file, Cypress won’t detect that change. If you’re running `yarn build-hot`, the code will rebuild and update within Cypress. You’ll have to manually click rerun after the new code has loaded. + +### Inspecting while the “contains helper” is open +One great feature of Cypress is that you can use the Chrome inspector after each step of a test. They also helpfully provide a helper that can test out `contains` and `get` calls. This helper creates new UI that prevents inspecting from targeting the correct elements. If you want to inspect the DOM in Chrome, you should close this helper. + +### Putting the wrong HTML template in the Uberjar +`yarn build` and `yarn build-hot` each overwrite an HTML template to reference the correct Javascript files. If you run `yarn build` before building an Uberjar for Cypress tests, you won’t see changes to your Javascript reflected even if you then start `yarn build-hot`. + + +## DB Snapshots + +At the beginning of each test suite we wipe the backend's db and settings cache. This ensures that the test suite starts in a predictable state. + +Typically, we use the default snapshot by adding `before(restore)` inside the first `describe` block to restore before running the whole test suite. If you want to use a snapshot besides the default one, specify the name as an argument to `restore` like this: `before(() => restore("blank"))`. You can also call `restore()` inside `beforeEach()` to reset before every test, or inside specific tests. + +Snapshots are created with a separate set of Cypress tests. These tests start with a blank database and execute specific actions to put the database in predictable state. For example: signup as bob@metabase.com, add a question, turn on setting ABC. + +These snapshot-generating tests have the extension `.cy.snap.js`. When these tests run they create db dumps in `frontend/tests/snapshots/*.sql`. They are run before the tests begin and don't get committed to git. + +## Running in CI +Cypress records videos of each test run, which can be helpful in debugging. Additionally, failed tests have higher quality images saved. + + +These files can be found under the “Artifacts” tab in Circle: +![Circle CI Artifacts tab](https://user-images.githubusercontent.com/691495/72190614-f5995380-33cd-11ea-875e-4203d6dcf1c1.png) \ No newline at end of file diff --git a/docs/developers-guide/frontend.md b/docs/developers-guide/frontend.md new file mode 100644 index 000000000000..85de2c4fda5a --- /dev/null +++ b/docs/developers-guide/frontend.md @@ -0,0 +1,602 @@ +# Frontend + + +## Entity Loaders + +If you're developing a new feature or just generally need to get at some of the application data on the frontend, Entity Loaders are going to be your friend. They abstract away calling the API, handling loading and error state, cache previously loaded objects, invalidating the cache (in some cases) and let you easily perform updates, or create new items. + + +### Good uses for Entity Loaders + +- I need to get a specific X (user, database, etc) and display it. +- I need to get a list of X (databases, questions, etc) and display it. + +### Currently available entities: + +- Questions, Dashboards, Pulses +- Collections +- Databases, Tables, Fields, Segments, Metrics +- Users, Groups +- Full current list of entities here: https://github.com/metabase/metabase/tree/master/frontend/src/metabase/entities + + +There are two ways to use loaders, either as React "render prop" components or as React component class decorators ("higher order components"). + + +### Object loading + +In this example we're going to load information about a specific database for a new page. + +```js +import React from "react" +import Databases from "metabase/entities/databases" + + +@Databases.load({ id: 4 }) +class MyNewPage extends React.Component { + render () { + const { database } = this.props + return ( +
+

{database.name}

+
+ ) + } +} +``` + + +This example uses a class decorator to ask for and then display a database with ID 4. If you instead wanted to use a render prop component your code would look like this. + + +```js +import React from "react" +import Databases from "metabase/entities/databases" + +class MyNewPage extends React.Component { + render () { + const { database } = this.props + return ( +
+ + { ({ database }) => +

{database.name}

+ } +
+
+ ) + } +} +``` + +Now you most likely don't just want to display just one static item so for cases where some of the values you might need will be dynamic you can use a function to get at the props and return the value you need. If you're using the component approach you can just pass props as you would normally for dynamic values. + + +```js +@Databases.load({ + id: (state, props) => props.params.databaseId +})) +``` + +## List loading + +Loading a list of items is as easy as applying the `loadList` decorator: + +```js +import React from "react" +import Users from "metabase/entities/users" + +@Users.loadList() +class MyList extends React.Component { + render () { + const { users } = this.props + return ( +
+ { users.map(u => u.first_name) } +
+ ) + } +} +``` + +Similar to the object loader's `id` argument you can also pass a `query` object (if the API supports it): + +```js +@Users.loadList({ + query: (state, props) => ({ archived: props.showArchivedOnly }) +}) +``` + +### Control over loading and error states + +By default both `EntityObject` and `EntityList` loaders will handle loading state for you by using `LoadingAndErrorWrapper` under the hood. If for some reason you want to handle loading on your own you can disable this behavior by setting `loadingAndErrorWrapper: false`. + +### Wrapped objects + +If you pass `wrapped: true` to a loader then the object or objects will be wrapped with helper classes that let you do things like `user.getName()`, `user.delete()`, or `user.update({ name: "new name" )`. Actions are automatically already bound to `dispatch`. + +This may incur a performance penalty if there are many objects. + +Any additional selectors and actions defined in the entities' `objectSelectors` or `objectActions` will appear as the wrapped object's methods. + +### Advanced usage + +You can also use the Redux actions and selectors directly, for example, `dispatch(Users.actions.loadList())` and `Users.selectors.getList(state)`. + +## Forms + +Metabase includes a comprehensive custom React and [`redux-form`](https://redux-form.com/5.2.3/) based form library. It also integrates with Metabase's [Entities](https://github.com/metabase/metabase/wiki/Frontend:-Entity-Loaders) system. + +The core React component of the system is [`metabase/containers/Form`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/containers/Form.jsx). + +### Form Definitions + +Form definitions can be provided in two different ways, with a JavaScript-based form definition object, or inline React `` elements. + +Pass a form definition to the `form` prop: + +```javascript +
alert(JSON.stringify(values))} +/> +``` + +If `` doesn't have any children elements then it will use the [`metabase/components/StandardLayout`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/components/StandardLayout) component to provide a default form layout. + +The schema for this object is defined in [`Form.jsx`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/containers/Form.jsx#L41-L60). + +`fields` and `initial` (for initial values) can be provided directly or as functions that dynamically compute them based on the current form state and additional props. + +```javascript +{ + "fields": (values) => [ + { name: "a", type: } +``` + +`initial`, `normalize`, and `validate` properties can be provided at the top-level, or per-field. They can also be provided as props to the `` and `` components For definitions can be provided + +### Custom Layout + +Form definition can also be provided via `` React elements (exported from the same `metabase/containers/Form` module), which will also serve as the layout (this uses the [`metabase/components/CustomLayout`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/components/CustomLayout)) + +```javascript +import Form, { FormField, FormFooter } from "metabase/containers/Form"; + + alert(JSON.stringify(values))}> + + + + +``` + +You can also provide both the `form` prop and children `` elements, in which case the `form` prop will be merged with the ``s' props. + +### Custom Widgets + +Built-in field `type`s are defined in [metabase/components/form/FormWidget](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/components/form/FormWidget.jsx#L17-L28). You can also provide a React component as the `type` property. + +### Validation + +You might have noticed the `validate` API above. These are simple chainable validators compatible with this form library, and are provided by [`metabase/lib/validate`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/lib/validate.js). You can add additional validators in that file. + +Server-side validation and other errors are returned in a standard format understood by `
`. + +Field-level errors: + +```json +{ "errors": { "field_name": "error message" } } +``` + +Top-level errors: + +```json +{ "message": "error message" } +``` + + +### Integration with Entities + +The Form library is integrated with Metabase's [Entities](https://github.com/metabase/metabase/wiki/Frontend:-Entity-Loaders) system (via the [`EntityForm`](https://github.com/metabase/metabase/blob/master/frontend/src/metabase/entities/containers/EntityForm.jsx) component), so that every entity includes a `Form` component that can be used like so: + +```javascript + +``` + +which uses the default `form` defined on the entity, e.x. + +```javascript +const Users = createEntity({ + name: "users", + path: "/api/user", + + form: { + fields: [ + { name: "email" } + ] + } + + // Alternatively, it will take the first form from the `forms` object: + // form: { + // default: { + // fields: [ + // { name: "email" } + // ] + // } + // } +} +``` + +You can also explicitly pass a different form object: + +```javascript + +``` + +Entity `Form`s will automatically be wired up to the correct REST endpoints for creating or updating entities. + +If you need to load an object first, they compose nicely with the Entities `Loader` render prop: + +```javascript + + {({ user }) => } + +``` + +Or higher-order component: + +```javascript +Users.load({ id: (state, props) => props.params.userId })(Users.Form) +``` + +## Style Guide + +### Set up Prettier + +We use [Prettier](https://prettier.io/) to format our JavaScript code, and it is enforced by CI. We recommend setting your editor to "format on save". You can also format code using `yarn prettier`, and verify it has been formatted correctly using `yarn lint-prettier`. + +We use ESLint to enforce additional rules. It is integrated into the Webpack build, or you can manually run `yarn lint-eslint` to check. + +### React and JSX Style Guide + +For the most part we follow the [Airbnb React/JSX Style Guide](https://github.com/airbnb/javascript/tree/master/react). ESLint and Prettier should take care of a majority of the rules in the Airbnb style guide. Exceptions will be noted in this document. + +* Prefer React [function components over class components](https://reactjs.org/docs/components-and-props.html#function-and-class-components) +* For control components, typically we use `value` and `onChange`. Controls that have options (e.x. `Radio`, `Select`) usually take an `options` array of objects with `name` and `value` properties. +* Components named like `FooModal` and `FooPopover` typically refer to the modal/popover *content* which should be used inside a `Modal`/`ModalWithTrigger` or `Popover`/`PopoverWithTrigger` +* Components named like `FooWidget` typically include a `FooPopover` inside a `PopoverWithTrigger` with some sort of trigger element, often `FooName` + +* Use arrow function instance properties if you need to bind a method in a class (instead of `this.method = this.method.bind(this);` in the constructor), but only if the function needs to be bound (e.x. if you're passing it as a prop to a React component) + +```javascript +class MyComponent extends React.Component { + constructor(props) { + super(props); + // NO: + this.handleChange = this.handleChange.bind(this); + } + // YES: + handleChange = e => { + // ... + } + // no need to bind: + componentDidMount() { + } + render() { + return + } +} +``` + +* For styling components we currently use a mix of `styled-components` and ["atomic" / "utility-first" CSS classes](https://github.com/metabase/metabase/tree/master/frontend/src/metabase/css/core). +* Prefer using `grid-styled`'s `Box` and `Flex` components over raw `div`. +* Components should typically pass along their `className` prop to the root element of the component. It can be merged with additional classes using the `cx` function from the `classnames` package. +* In order to make components more reusable, a component should only apply classes or styles to the root element of the component which affects the layout/styling of it's own content, but *not* the layout of itself within it's parent container. For example, it can include padding or the `flex` class, but it shouldn't include margin or `flex-full`, `full`, `absolute`, `spread`, etc. Those should be passed via `className` or `style` props by the consumer of the component, which knows how the component should be positioned within itself. +* Avoid breaking JSX up into separate method calls within a single component. Prefer inlining JSX so that you can better see what the relation is of the JSX a `render` method returns to what is in the `state` or `props` of a component. By inlining JSX you'll also get a better sense of what parts should and should not be separate components. +```javascript + +// don't do this +render () { + return ( +
+ {this.renderThing1()} + {this.renderThing2()} + {this.state.thing3Needed && this.renderThing3()} +
+ ); +} + +// do this +render () { + return ( +
+ + + {this.state.thing3Needed && } +
+ ); +} +``` + +### JavaScript Conventions + +* `import`s should be ordered by type, typically: + 1. external libraries (`react` is often first, along with things like `ttags`, `underscore`, `classnames`, etc) + 2. Metabase's top-level React components and containers (`metabase/components/*`, `metabase/containers/*`, etc) + 3. Metabase's React components and containers specific to this part of the application (`metabase/*/components/*` etc) + 4. Metabase's `lib`s, `entities`, `services`, Redux files, etc +* Prefer `const` to `let` (and never use `var`). Only use `let` if you have a specific reason to reassign the identifier (note: this now enforced by ESLint) +* Prefer [arrow functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions) for inline functions, especially if you need to reference `this` from the parent scope (there should almost never be a need to do `const self = this;` etc), but usually even if you don't (e.x. `array.map(x => x * 2)`). +* Prefer `function` declarations for top-level functions, including React function components. The exception is for one-liner functions that return a value +```javascript +// YES: +function MyComponent(props) { + return
...
+} +// NO: +const MyComponent = (props) => { + return
...
+} +// YES: +const double = n => n * 2; +// ALSO OK: +function double(n) { + return n * 2; +} +``` + +* Prefer native `Array` methods over `underscore`'s. We polyfill all ES6 features. Use Underscore for things that aren't implemented natively. +* Prefer [`async`/`await`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) over using `promise.then(...)` etc directly. +* You may use assignment destructuring or argument destructuring, but avoid deeply nested destructuring, since they can be hard to read and `prettier` sometimes formats them with extra whitespace. + * avoid destructuring properties from "entity"-like objects, e.x. don't do `const { display_name } = column;` + * don't destructure `this` directly, e.x. `const { foo } = this.props; const { bar } = this.state;` instead of `const { props: { foo }, state: { bar } } = this;` +* Avoid nested ternaries as they often result in code that is difficult to read. If you have logical branches in your code that are dependent on the value of a string, prefer using an object as a map to multiple values (when evaluation is trivial) or a `switch` statement (when evaluation is more complex, like when branching on which React component to return): + +```javascript +// don't do this +const foo = str == 'a' ? 123 : str === 'b' ? 456 : str === 'c' : 789 : 0; + +// do this +const foo = { + a: 123, + b: 456, + c: 789, +}[str] || 0; + +// or do this +switch (str) { + case 'a': + return ; + case 'b': + return ; + case 'c': + return ; + case 'd': + default: + return ; +} +``` +If your nested ternaries are in the form of predicates evaluating to booleans, prefer an `if/if-else/else` statement that is siloed to a separate, pure function: + +```javascript +const foo = getFoo(a, b); + +function getFoo(a, b, c) { + if (a.includes('foo')) { + return 123; + } else if (a === b) { + return 456; + } else { + return 0; + } +} +``` +* Be conservative with what comments you add to the codebase. Comments shouldn't be used as reminders or as TODOs--record those by creating a new issue in Github. Ideally, code should be written in such a way that it explains itself clearly. When it does not, you should first try rewriting the code. If for whatever reason you are unable to write something clearly, add a comment to explain the "why". +```javascript + +// don't do this--the comment is redundant + +// get the native permissions for this db +const nativePermissions = getNativePermissions(perms, groupId, { + databaseId: database.id, +}); + +// don't add TODOs -- they quickly become forgotten cruft + +isSearchable(): boolean { + // TODO: this should return the thing instead + return this.isString(); +} + +// this is acceptable -- the implementer explains a not-obvious edge case of a third party library + +// foo-lib seems to return undefined/NaN occasionally, which breaks things +if (isNaN(x) || isNaN(y)) { + return; +} + +``` +* Avoid complex logical expressions inside of if statements +```javascript +// don't do this +if (typeof children === "string" && children.split(/\n/g).length > 1) { + // ... +} + +// do this +const isMultilineText = typeof children === "string" && children.split(/\n/g).length > 1 +if (isMultilineText) { + // ... +} +``` + +* Use ALL_CAPS for constants + +```javascript +// do this +const MIN_HEIGHT = 200; + +// also acceptable +const OBJECT_CONFIG_CONSTANT = { + camelCaseProps: "are OK", + abc: 123 +} +``` +* Prefer named exports over default exports +```javascript +// this makes it harder to search for Widget +import Foo from "./Widget"; +// do this to enforce using the proper name +import {Widget} from "./Widget"; +``` +* Avoid magic strings and numbers +```javascript +// don't do this +const options = _.times(10, () => ...); + +// do this in a constants file +export const MAX_NUM_OPTIONS = 10; +const options = _.times(MAX_NUM_OPTIONS, () => ...); +``` + +### Write Declarative Code + +You should write code with other engineers in mind as other engineers will spend more time reading than you spend writing (and re-writing). Code is more readable when it tells the computer "what to do" versus "how to do." Avoid imperative patterns like for loops: + +```javascript +// don't do this +let foo = [] +for (let i = 0; i < list.length; i++) { + if (list[i].bar === false) { + continue; + } + + foo.push(list[i]); +} + +// do this +const foo = list.filter(entry => entry.bar !== false); +``` + +When dealing with business logic you don't want to be concerned with the specifics of the language. Instead of writing `const query = new Question(card).query();` which entails instantiating a new `Question` instance and calling a `query` method on said instance, you should introduce a function like `getQueryFromCard(card)` so that implementers can avoid thinking about what goes into getting a `query` value from a card. + +## Component Styling Tree Rings + +### classic / global CSS with BEM style selectors (deprecated) + +```css +.Button.Button--primary { + color: -var(--color-brand); +} +``` + +### atomic / utility CSS (still used) + +```css +.text-brand { + color: -var(--color-brand); +} +``` + +```javascript +const Foo = () => +
+``` + + +### inline style (discouraged) + +```javascript +const Foo = ({ color ) => +
+``` + +### CSS modules (deprecated) + +```css +:local(.primary) { + color: -var(--color-brand); +} +``` + +```javascript +import style from "./Foo.css"; + +const Foo = () => +
+``` + +### [Styled Components](https://styled-components.com/) + +```javascript +import styled from "styled-components"; + +const FooWrapper = styled.div` + color: ${props => props.color} +`; + +const Bar = ({ color }) => + +``` + +### Styled Components + [styled-system](https://styled-system.com/) + +e.x. + +```javascript +import styled from "styled-components"; +import { color } from "styled-system"; + +const Foo = styled.div` + ${color} +`; + +const Bar = ({ color }) => + +``` + +## Popover + +Popovers are popups or modals. + +In Metabase core, they are visually responsive: they appear above or below the element that triggers their appearance. Their height is automatically calculated to make them fit on the screen. + +### Where to Find Popovers in the User Journey + +#### When creating custom questions + +1. From home, click on `Ask a question` +2. Click on `Custom question` +3. 👀 The option picker next to `Pick your starting data` is a ``. +3. Choose `Sample Dataset` +4. Choose any of the tables, for example `People` + +Here, clicking on the following will open `` components: + +* `Columns` (right-hand side of section labeled `Data`) +* Gray icon of a grid with + below section labeled `Data` +* `Add filters to narrow your answers` +* `Pick the metric you want to see` +* `Pick a column to group by` +* `Sort` icon with arrows pointing up and down above `Visualize` button diff --git a/docs/developers-guide/images/visual-tests/percy-healthcheck-step.png b/docs/developers-guide/images/visual-tests/percy-healthcheck-step.png new file mode 100644 index 000000000000..2a5002c74a48 Binary files /dev/null and b/docs/developers-guide/images/visual-tests/percy-healthcheck-step.png differ diff --git a/docs/developers-guide/internationalization.md b/docs/developers-guide/internationalization.md new file mode 100644 index 000000000000..e986a9c8a299 --- /dev/null +++ b/docs/developers-guide/internationalization.md @@ -0,0 +1,24 @@ +# Internationalization + +We are an application with lots of users all over the world. To help them use Metabase in their own language, we mark all of our strings as i18n. + +## Adding new strings: + +If you need to add new strings (try to be judicious about adding copy) do the following: + +1. Tag strings in the frontend using `t` and `jt` ES6 template literals (see more details in https://ttag.js.org/): + +```javascript +const someString = t`Hello ${name}!`; +const someJSX =
{jt`Hello ${name}`}
; +``` + +and in the backend using `trs` (to use the site language) or `tru` (to use the current User's language): + +```clojure +(trs "Hello {0}!" name) +``` + +## Translation errors or missing strings + +If you see incorrect or missing strings for your language, please visit our [POEditor project](https://poeditor.com/join/project/ynjQmwSsGh) and submit your fixes there. \ No newline at end of file diff --git a/docs/developers-guide/start.md b/docs/developers-guide/start.md new file mode 100644 index 000000000000..f68ae00d66d1 --- /dev/null +++ b/docs/developers-guide/start.md @@ -0,0 +1,13 @@ +# Developer's Guide + +This guide contains detailed information on how to work on Metabase codebase. + +**Covered in this guide:** + +- [How to compile your own copy of Metabase](build.md) +- [How to set up a development environment](devenv.md) +- [How to write and run end-to-end tests](e2e-tests.md) +- [How to work with the frontend code](frontend.md) +- [How to contribute back to the Metabase project](contributing.md) +- [How to add support in Metabase for other languages](internationalization.md) +- [How to use visual tests](visual-tests.md) diff --git a/docs/developers-guide/visual-tests.md b/docs/developers-guide/visual-tests.md new file mode 100644 index 000000000000..e9c764b7007a --- /dev/null +++ b/docs/developers-guide/visual-tests.md @@ -0,0 +1,55 @@ +# Visual Tests + +We use [Percy](https://percy.io/) via Github actions to run visual regression tests. Percy provides pull-request-based workflow, handles diff review and approval flow conveniently. In addition to that, It integrates with Cypress, which allows us to use all power of our custom helpers and commands. We run + +## How to run visual tests on CI + +Percy tests are supposed to be run on CI since every run is attached to a pull request. Only when a cypress tests run command is prefixed by `percy exec -- ` and there is a valid `PERCY_TOKEN` environment variable specified, Percy CLI will submit pages snapshots to Percy servers, and we will be charged for every screenshot. To make use of Percy more cost-efficient, we manually trigger visual tests by posting a PR comment with a `@metabase-bot run visual tests` command. +In addition to that, we need to ensure that underlying Cypress tests are valid, so we run them without submitting screenshots to Percy on every commit. + +**1. Write `@metabase-bot run visual tests` comment in a PR** + +![https://user-images.githubusercontent.com/14301985/126784124-b6753632-2735-496c-b80b-29521e0b9d15.png](https://user-images.githubusercontent.com/14301985/126784124-b6753632-2735-496c-b80b-29521e0b9d15.png) + +It triggers a workflow that is visible in the repo Actions tab + +![https://user-images.githubusercontent.com/14301985/126784265-8137570f-0f68-4064-ab77-c4455a6ad706.png](https://user-images.githubusercontent.com/14301985/126784265-8137570f-0f68-4064-ab77-c4455a6ad706.png) + +**2. If there are some visual changes, it shows a failed Percy check in the PR** + +![https://user-images.githubusercontent.com/14301985/126795943-50ebbe5e-ed36-48fe-ab69-642555a1bc1d.png](https://user-images.githubusercontent.com/14301985/126795943-50ebbe5e-ed36-48fe-ab69-642555a1bc1d.png) + +**3. Once you review and approve the changes, the PR check becomes green** + +![https://user-images.githubusercontent.com/14301985/126796075-31d5ed5d-6926-4e98-99d2-4aef20738b56.png](https://user-images.githubusercontent.com/14301985/126796075-31d5ed5d-6926-4e98-99d2-4aef20738b56.png) + +![https://user-images.githubusercontent.com/14301985/126796104-c533bbea-006c-47ef-83fa-0c07fcf5393b.png](https://user-images.githubusercontent.com/14301985/126796104-c533bbea-006c-47ef-83fa-0c07fcf5393b.png) + + +## How to create a visual test + +We use Cypress to write Percy tests so we can fully use all existing helpers and custom commands. + +Visual regression tests live inside the `frontend/test/metabase-visual` directory. Writing a Percy test consists of creating a desired page state and executing `cy.percySnapshot()` command. + +### Goal + +Each visual test should cover as many as possible different elements, variants on the same screenshot. For instance, when we are writing E2E test that checks a chart on a dashboard we add just one card and run assertions. In opposite to that, a visual test can contain every type of chart on the same dashboard because it significantly reduces the number of screenshots we produce which reduces the cost of using Percy. + +### Workflow + +1) Run Metabase in the dev mode locally (`yarn dev` or similar commands). +2) Run `yarn test-visual-open` to open Cypress locally. You do not need to export any `PERCY_TOKEN`. +3) Create a spec inside `frontend/test/metabase-visual` and run it via Cypress runner. + +At this step, if you added `percySnapshot` command somewhere in your test, you will see `percyHealthCheck` step in your test: + +![Learn about your data in the SQL editor](./images/visual-tests/percy-healthcheck-step.png) + +Consider the page state at `percyHealthCheck` step as the one that will be captured. + +### Notes + +- You don't need to export `PERCY_TOKEN` for running tests. If a token is exported Percy will send snapshots from your local machine to their servers so that you will be able to see your local run in their interface. +- When the application code uses `Date.now()`, you can [freeze](https://docs.percy.io/docs/freezing-dynamic-data#freezing-datetime-in-cypress) date/time in Cypress. +- [Stub](https://github.com/metabase/metabase/pull/17380/files#diff-4e8ebaf75969143a5eee6bfb8adcd4b72d4330d18d77319e3434d11cf6c75e40R15) `Math.random` when to deal with randomization. diff --git a/docs/enterprise-guide/activating-the-enterprise-edition.md b/docs/enterprise-guide/activating-the-enterprise-edition.md index 525ebb421063..62727a428df2 100644 --- a/docs/enterprise-guide/activating-the-enterprise-edition.md +++ b/docs/enterprise-guide/activating-the-enterprise-edition.md @@ -1,24 +1,37 @@ -## Getting and activating the Enterprise edition +## Activating your Metabase commercial license -The Enterprise edition of Metabase is distinct from the open-source edition, so to use it you'll need to first get a license, get the Enterprise edition, and then activate enterprise features with your license. +The paid Pro and Enterprise editions of Metabase are distinct from the free Open Source edition, so to use your paid features you’ll need to first get a license. And if you want to self-host, you'll need a different JAR or Docker image that you can use to activate the advanced features with your license token. -You can get a license by signing up for a free trial of the Enterprise edition. [Find out more here](https://metabase.com/enterprise/). Once you sign up for a free trial, you will receive an email containing a license token +### Where to get a license -To get the Enterprise edition, you can either [download the latest .jar file](https://downloads.metabase.com/enterprise/latest/metabase.jar), or get the [latest Docker image](https://hub.docker.com/r/metabase/metabase-enterprise/) at `metabase/metabase-enterprise:latest` +You can get a license by signing up for a free trial of the [Pro or Enterprise edition plans](https://www.metabase.com/pricing), both of which can be self-hosted or hosted on Metabase Cloud. -If you don't want to host the Enterprise Edition yourself, you can have it hosted for you with [Metabase Cloud](https://www.metabase.com/start/hosted/index.html). +If you sign up for a Metabase Cloud option, you're already good to go. -Once you have the Enterprise edition running, to activate all of its features go to the Admin Panel within Metabase, click on the Enterprise tab, click the "Activate a license" button, and then paste in your license token. The page should change to show you that Enterprise features are now active. +### How to activate your token when self-hosting -### Validating Your Enterprise Token +If you chose to host Metabase yourself, you'll get an email containing a unique license token. But to use it, you'll need to install the right JAR file. -Your Metabase instance will need to be able to access the internet (specifically `https://store.metabase.com/api/[token-id]/v2/status`) in order to validate your token and grant access to the Enterprise feature set. +You can either: -If you need to route outbound Metabase traffic through a proxy on your network, use the following command: +- [Download the latest metabase-enterprise JAR](https://downloads.metabase.com/enterprise/latest/metabase.jar) (the filename is the same, irrespective of your plan), or +- [Get the latest Docker image](https://hub.docker.com/r/metabase/metabase-enterprise/) at `metabase/metabase-enterprise:latest`. -`java -Dhttps.proxyHost=[your proxy's hostname] -Dhttps.proxyPort=[your proxy's port] -jar enterprise_metabase.jar` +Run Metabase as you would normally, then go to the __Admin__ > __Enterprise__ tab, click the __Activate a license__ button, and paste in your license token. The page will show you the features that are now active. -Depending on your organization's set-up, additional configuration steps may need to be taken. If the command above does not work for you, we would recommend reaching out to your internal infrastructure or dev ops teams for assistance. +### **Validating your token** + +Your Metabase needs to be able to access the internet (specifically `https://store.metabase.com/api/[token-id]/v2/status`) in order to validate the token and maintain access to the advanced features. + +If your Metabase can't validate the token, it'll disable the advanced features, but will continue to work normally otherwise, as if it were the Open Source edition. + +In case you need to route outbound Metabase traffic through a proxy on your network, use the following command when starting Metabase: + +``` +java -Dhttps.proxyHost=[your proxy's hostname] -Dhttps.proxyPort=[your proxy's port] -jar enterprise_metabase.jar +``` + +Depending on your organization’s setup, you may need to take additional configuration steps. If the command above doesn't work for you, we recommend reaching out to your internal infrastructure or dev ops teams for assistance. --- diff --git a/docs/enterprise-guide/audit.md b/docs/enterprise-guide/audit.md index 99a30f17e530..8504fdc49059 100644 --- a/docs/enterprise-guide/audit.md +++ b/docs/enterprise-guide/audit.md @@ -1,40 +1,41 @@ -## Audit Logs +# Audit Logs -As an administrator of Metabase, you already know the importance of using data to understand your users. With the Audit tool, you can use Metabase to understand your Metabase users' usage. It's, well, meta! +As an administrator of Metabase, you already know the importance of using data to understand your how people interact with your products or services. With the Audit tool, you can use Metabase to understand how people in your organization use Metabase. It's, well, meta! -Access the tool by clicking the gear icon in the upper right and then clicking Audit in the top navigation. There's a lot of data available, not only about your people, but also about your questions, dashboards, databases and more! We'll walk you through each of the sections below. +Access the tool by clicking the gear icon in the upper right and then clicking __Audit__ in the top navigation. There's a lot of data available, not only about your people, but also about your questions, dashboards, databases and more! We'll walk you through each of the sections below. -### People +## People -Use the People section to gain a better understanding of how your end-users are interacting with Metabase. You'll want to pay attention to the Overview tab, especially when you first launch Metabase at your organization — it will give you data around how many active and newly created users you have each day. Further down the page, you'll see charts showing you which of your users are most engaged. Use these charts to find your power users! +### Team members -![Team Members](./images/audit-team.png) +Use the __People__ section to gain a better understanding of how people are interacting with Metabase. You'll want to pay attention to the __Overview__ tab, especially when you first launch Metabase at your organization — it will give you data around how many active and newly created accounts you have each day. Further down the page, you'll see charts showing you which people are most engaged. -The Team Members panel has a view of other options for viewing your team's usage data. The All Members tab will give you a list of your team members, and stats about their activity, such as when they were last active. +![Team Members](./images/audit-team.png) -The Audit Log tab will display, in chronological order, each query, who viewed it, and when. Note that if the user did not save their query, its name will be listed as `Ad-hoc`. Each query name can be clicked to view more details about your users' interactions with it, including a full revision history. You will also be able to view the query in Metabase. Note that this link will always show the latest version of the query - use the revision history to see changes over time. +The __Team Members__ panel has a view of other options for viewing your team's usage data. The __All Members__ tab will give you a list of your team members, and stats about their activity, such as when they were last active. -#### Drilling In +The __Audit Log__ tab will display, in chronological order, each query, who viewed it, and when. Note that if the person didn't save their query, its name will be listed as `Ad-hoc`. Each query name can be clicked to view more details about the person's interactions with it, including a full revision history. You'll also be able to view the query in Metabase. Note that this link will always show the latest version of the query - use the revision history to see changes over time. -Throughout the People section, names can be clicked to access the profile of a specific user’s activity. This profile includes: +Throughout the People section, names can be clicked to access the profile of a specific person’s activity. This profile includes: - Dashboard views - Query views - Downloads +- Subscriptions & Alerts ![Team Members](./images/audit-teammember.png) -### Data +## Data The Data section focuses on your databases, schemas and tables, and is divided into corresponding sections. Look here if you're trying to uncover queries and schemas that need optimization. Each section provides visualizations around the use and speed of querying against your databases, schemas or tables. You will also be able to view lists of stats about all of your databases, schemas and tables. ![Data](./images/audit-data.png) -### Items +## Items -The Items section focuses on questions, dashboards and downloads, and is divided into corresponding sections. Use these pages to gain a better understanding of what your users are interacting with. +The __Items__ section focuses on questions, dashboards, downloads, and Subscriptions and Alerts. -#### Questions +### Questions The Questions section will show you your most popular queries, as well as your slowest queries. If queries you think are important aren't appearing on your most popular queries list, you may want to make sure your team is focusing on the right things. @@ -53,22 +54,47 @@ A list of all of your questions is available as well, and you can see various da - Collection - Public Link -#### Drilling In - You can also click on any question to drill into a more detailed profile showing: - View activity - Revision History - A full audit log of who viewed the question, and when -#### Dashboards +### Dashboards -The Dashboards section is another great way to understand what your users who are dashboard-focused are looking at, and to make sure they're having a smooth experience. If you notice that a popular dashboard has a high average question loading time, you can investigate further using the Questions section outlined above. +The __Dashboards__ section helps you understand what dashboards people are looking at, and to make sure they're having a smooth experience. If you notice that a popular dashboard has a high average question loading time, you can investigate further using the Questions section outlined above. ![Items](./images/audit-dashboards.png) -A list of all of your dashboards is available as well, and you can see various data points about each dashboard at a glance, such as number of views and average question execution time. +A list of all of your dashboards is available as well, and you can see various data points about each dashboard at a glance, such as: + +- Number of views +- Average question execution time(ms) +- Number of cards +- Saved by +- Cache duration +- Public link +- Saved on +- Last edited on + +### Downloads + +Use the __Downloads__ section to understand which people are downloading (or exporting) data, and the size (number of rows) of the downloads they're performing. This section contains some visualizations, as well as a list of all downloads. + +### Subscriptions & Alerts + +Here Admins can get an overview of all of the [Dashboard subscriptions][dashboard-subscriptions] and [Alerts][alerts] that are currently active for that Metabase. + +- Dashboard name (or Question name for Alerts) +- Recipients +- Type (e.g., email or Slack) +- Collection +- Frequency +- Created By +- Created At +- Filters -#### Downloads +Admins can add and remove people from a subscription or alert by clicking on the item's __Recipients__. Admins can also delete the subscription or alert entirely by clicking on the **X** on the relevant line. -Use the Downloads section to understand which of your users are downloading (or exporting) data and the size (number of rows) of the downloads they're performing. This section contains some visualizations, as well as a list of all downloads. +[alerts]: ../users-guide/15-alerts.md +[dashboard-subscriptions]: ../users-guide/dashboard-subscriptions.md \ No newline at end of file diff --git a/docs/enterprise-guide/data-sandboxes.md b/docs/enterprise-guide/data-sandboxes.md index 4f87b5e096b9..d3a427fd9406 100644 --- a/docs/enterprise-guide/data-sandboxes.md +++ b/docs/enterprise-guide/data-sandboxes.md @@ -1,6 +1,6 @@ ## Sandboxing your data -Data sandboxes are a powerful and flexible permissions tool in Metabase Enterprise Edition that allow you to grant filtered access to specific tables. +Data sandboxes are a powerful and flexible permissions tool in Metabase Enterprise Edition that allow you to grant filtered access to specific tables. If you haven't already, check out our [overview of how permissions work in Metabase][permissions-overview]. Say you have users who you want to be able to log into your Metabase instance, but who should only be able to view data that pertains to them. For example, you might have some customers or partners who you want to let view your Orders table, but you only want them to see their orders. Sandboxes let you do just that. @@ -47,9 +47,7 @@ Next we’ll see a worksheet that will ask us how we want to filter this table f ![Sandbox settings](images/sandboxing/select-user-attribute.png) -We’ll click Done, then we’ll click Save Changes at the top of the screen to save the changes we’ve made to our permissions. If we ever want to edit how this table should be filtered for users in this group, we can just click on the blue box and select “Edit sandboxed access.” - -![Edit access](images/sandboxing/edit-sandboxed-access.png) +We’ll click Done, then we’ll click Save Changes at the top of the screen to save the changes we’ve made to our permissions. If we ever want to edit how this table should be filtered for users in this group, we can just click on the __Data access__ dropdown for that group and select __Edit sandboxed access__. To test this out, we’ll open up a new incognito browser window and log in with our test user account. We’ll click on the Sample Dataset on the home page and then pick the Orders table. As you can see here, this user correctly only sees orders where the User ID column is equal to 1, because that’s what this user’s user_id attribute is. @@ -99,6 +97,21 @@ The filtering question that I'll create will exclude columns that I don't want t ![Filtering question](images/sandboxing/advanced-example-2-filtering-question.png) +And here's the code: + +``` +SELECT + id, + created_at, + product_id, + quantity, + total, + user_id +FROM + orders +[[WHERE user_id = {%raw%}{{cid}}{%endraw%}]] +``` + Going back over to the Permissions section, when I open up the sandboxed access modal and select the second option and select my filtering question, I'll see an additional section which allows me to map the variable I defined in my question with a user attribute: ![Sandboxing options](images/sandboxing/advanced-example-2-sandboxing-options.png) @@ -135,8 +148,17 @@ An important distinction to make is that you can use a saved SQL query in the _c Public questions and dashboards can't be sandboxed. Sandboxing works by filtering data based on the group membership and user attributes of an authenticated user — so since a user doesn't have to log in to see a public question or dashboard, Metabase has no knowledge of who that user is. +## Further reading + +- [Learn track on permissions][permissions] +- [Troubleshooting access to columns and rows][troubleshoot-sandbox] + --- ## Next: embedding Metabase in your web app The next section will explain [how to embed](full-app-embedding.md) interactive dashboards and charts, or even whole sections of Metabase within your app. + +[permissions]: /learn/permissions/index.html +[permissions-overview]: ../administration-guide/05-setting-permissions.md +[troubleshoot-sandbox]: ../troubleshooting-guide/sandboxing.html \ No newline at end of file diff --git a/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png b/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png index 4250e4977be3..cc5defeeb02c 100644 Binary files a/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png and b/docs/enterprise-guide/images/sandboxing/advanced-example-2-filtering-question.png differ diff --git a/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png b/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png index fc23b1cfd334..01d733d65aa4 100644 Binary files a/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png and b/docs/enterprise-guide/images/sandboxing/advanced-example-2-sandboxing-options.png differ diff --git a/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png b/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png index 410b87621231..1569125c4570 100644 Binary files a/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png and b/docs/enterprise-guide/images/sandboxing/change-access-confirm-modal.png differ diff --git a/docs/enterprise-guide/images/sandboxing/edit-sandboxed-access.png b/docs/enterprise-guide/images/sandboxing/edit-sandboxed-access.png deleted file mode 100644 index dc8cd2ca3cdb..000000000000 Binary files a/docs/enterprise-guide/images/sandboxing/edit-sandboxed-access.png and /dev/null differ diff --git a/docs/enterprise-guide/images/sandboxing/edit-user-details.png b/docs/enterprise-guide/images/sandboxing/edit-user-details.png index b839c4b32a49..8dcb7e5fd040 100644 Binary files a/docs/enterprise-guide/images/sandboxing/edit-user-details.png and b/docs/enterprise-guide/images/sandboxing/edit-user-details.png differ diff --git a/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png b/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png index d0e31e577bea..536d4d6cc0d2 100644 Binary files a/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png and b/docs/enterprise-guide/images/sandboxing/grant-sandboxed-access.png differ diff --git a/docs/enterprise-guide/images/sandboxing/select-user-attribute.png b/docs/enterprise-guide/images/sandboxing/select-user-attribute.png index 163e3fa04da3..6612dea457b3 100644 Binary files a/docs/enterprise-guide/images/sandboxing/select-user-attribute.png and b/docs/enterprise-guide/images/sandboxing/select-user-attribute.png differ diff --git a/docs/faq/general/can-i-request-a-new-feature.md b/docs/faq/general/can-i-request-a-new-feature.md index b3aea18ef282..7e07e6113d26 100644 --- a/docs/faq/general/can-i-request-a-new-feature.md +++ b/docs/faq/general/can-i-request-a-new-feature.md @@ -1,3 +1,5 @@ # Can I request a new feature? -Absolutely! New features can be added as issues in our [Github repo](https://github.com/metabase/metabase/issues). Before filing, take a minute to search and see if your feature has already been requested. If it has, it’s better to leave a thumbs up reaction on the existing issue than to file a new one. We use the thumbs up to measure how interested our community is in new features, so it’s better to have all of the thumbs ups captured on one issue. \ No newline at end of file +Absolutely! New features can be added as [issues in our Github repo][github-issues]. Before filing, please take a minute to search and see if your feature has already been requested. If it has, it's better to add a thumbs up on the existing issue than to file a new one. (We use thumbs up to measure how interested our community is in new features, so it's better to have them all in one place.) + +[github-issues]: https://github.com/metabase/metabase/issues diff --git a/docs/faq/general/do-we-need-a-data-processing-agreement.md b/docs/faq/general/do-we-need-a-data-processing-agreement.md index b32af37b5c4f..593c1688d32c 100644 --- a/docs/faq/general/do-we-need-a-data-processing-agreement.md +++ b/docs/faq/general/do-we-need-a-data-processing-agreement.md @@ -1,3 +1,5 @@ # Do we need a Data Processing Agreement with Metabase to comply with GDPR? -Metabase is a self-hosted application, and as such we have no access to any data on your instance. We only collect [anonymized usage stats](../../information-collection.md) if you opted-in during installation, but even in that case, we see no personally identifiable data. As such, we are not a data processor for the purposes of GDPR. \ No newline at end of file +Metabase is a self-hosted application, and as such we have no access to any data on your instance. We only collect [anonymized usage stats][information-collection] if you opted-in during installation, but even in that case, we do not gather any personally-identifiable data. As such, we are not a data processor for the purposes of GDPR. + +[information-collection]: ../../information-collection.html diff --git a/docs/faq/general/does-metabase-do-x.md b/docs/faq/general/does-metabase-do-x.md index d62fe88a5c42..17af4a96a363 100644 --- a/docs/faq/general/does-metabase-do-x.md +++ b/docs/faq/general/does-metabase-do-x.md @@ -1,3 +1,5 @@ # Does Metabase do X? -If you’re not sure if Metabase has the capability you’re looking for, we would recommend asking in [Discourse](https://discourse.metabase.com/). If it’s something we don’t do, our friendly community may be able to point you towards a workaround, or the Github issue where the feature has been requested. \ No newline at end of file +If you're not sure whether Metabase can do something you need, please ask about it in our [Discourse forum][discourse]. If it's something we don't do, our community may be able to point you to a workaround or to a Github issue where the feature has been requested. + +[discourse]: https://discourse.metabase.com/ diff --git a/docs/faq/general/does-metabase-have-access-to-my-companys-data.md b/docs/faq/general/does-metabase-have-access-to-my-companys-data.md index 9153aea83ab7..13a9240fc949 100644 --- a/docs/faq/general/does-metabase-have-access-to-my-companys-data.md +++ b/docs/faq/general/does-metabase-have-access-to-my-companys-data.md @@ -1,5 +1,5 @@ # Does Metabase have access to my company's data? -If you run Metabase as a self-hosted instance, none of your data is collected or stored on Metabase servers. +If you run Metabase as a self-hosted instance, none of your data is collected or stored on Metabase servers. That said, you *can* opt in to share anonymous usage data with us, which helps us make decisions about product improvements. You can read more about it [here][data-collection]. -There is the option to opt-in to sharing anonymous usage data with us, which we use to make decisions about product improvements. You can read more about it [here](../../information-collection.md). \ No newline at end of file +[data-collection]: ../../information-collection.html diff --git a/docs/faq/general/how-do-i-ask-for-help.md b/docs/faq/general/how-do-i-ask-for-help.md index 3d7396007512..e4c31da364c9 100644 --- a/docs/faq/general/how-do-i-ask-for-help.md +++ b/docs/faq/general/how-do-i-ask-for-help.md @@ -1,7 +1,9 @@ # How do I ask for help? -The best place to start when asking for help are our [Troubleshooting guides](../../troubleshooting-guide/index.md). We’ve compiled a list of common problems that users experience, along with common symptoms and error messages to help you self-diagnose and resolve your issues. +The best place to start when asking for help are our [Troubleshooting guides][troubleshooting]. We've compiled a list of common problems that users experience, along with common symptoms and error messages to help you diagnose and resolve your issues. -As hard as we’ve tried to make the documentation as complete as possible, we know that you might need additional help or have a general question about Metabase not answered here. The best way to get help in these cases is to post on our [Discourse](https://discourse.metabase.com/) and harness the power of our community to get your question answered. +As hard as we've tried to make the documentation as complete as possible, we know that you might need additional help or have a general question about Metabase not answered here. If you're using the Enterprise Edition, please contact support via email or use the [contact form][contact]. If you're using the Open Source Edition, the best way to get help is to post on our [Discourse][discourse] and harness the power of our community. -If you're using the Enterprise Edition, then please contact support via email or use the [contact form](https://www.metabase.com/contact/). +[contact]: https://www.metabase.com/contact/ +[discourse]: https://discourse.metabase.com/ +[troubleshooting]: ../../troubleshooting-guide/index.html diff --git a/docs/faq/general/is-metabase-508-compliant.md b/docs/faq/general/is-metabase-508-compliant.md index 9920e1d93855..e9f1decdcce1 100644 --- a/docs/faq/general/is-metabase-508-compliant.md +++ b/docs/faq/general/is-metabase-508-compliant.md @@ -1,11 +1,16 @@ # Is Metabase accessible or 508 compliant? -Metabase strives for accessibility, but is not yet fully 508 compliant. Here is a quick summary of the specific areas where Metabase is not yet entirely compliant: +Metabase strives to be accessible, but is not yet fully compliant with [the US federal government's Section 508 standard][508-accessibility]. Some specific areas where Metabase we still have work to do include: -- The app does not have a method to allow screen readers to skip over repetitive navigation elements. -- Metabase is extremely close but not 100% compliant at providing text equivalents for all non-text elements in the app. -- Not all, but most, of the app's form elements are selectable by tabbing through elements. -- Metabase has minimal transition animations in it, but we have not yet conducted testing to determine the range of flickering to verify if it is always between 2 and 55 hertz. -- The app's data tables do not have row and column headers identified in markup. +- Metabase doesn't have a method to allow screen readers to skip over repetitive navigation elements. +- Metabase is extremely close but not 100% compliant at providing text equivalents for all non-text elements. +- Most of our form elements are selectable by tabbing through elements. +- Metabase has minimal transition animations in it, but we have not yet tested whether the range of flickering is always between 2 and 55 Hertz. +- Metabase's data tables do not have row and column headers identified in markup. - We do not yet have a published description of our accessibility and compatibility features. -- Also note that Metabase is a React-based web application, and cannot function without scripting (i.e., JavaScript) turned on. +- Since Metabase is a React-based web application, it cannot function without scripting (i.e., JavaScript) turned on. + +If you'd like to help us address these accessibility gaps, please see [our developers' guide][developers-guide]. + +[508-accessibility]: https://section508.gov/ +[developers-guide]: /docs/latest/developers-guide.html diff --git a/docs/faq/general/supported-browsers.md b/docs/faq/general/supported-browsers.md index 935cef3bbec6..5a70c38b37c7 100644 --- a/docs/faq/general/supported-browsers.md +++ b/docs/faq/general/supported-browsers.md @@ -1,14 +1,10 @@ -## Browser Support +# Which browsers does Metabase support? -### Which browsers does Metabase support? +We try our best to make sure Metabase works in as many browsers as possible, but as this is the Internet, there may be little quirks from time to time in different settings. We believe Metabase works on these versions of these browsers and will attempt to fix specific bugs if any are found: -We try our best to make sure Metabase works in as many browsers as possible but as this is the internet, there may be little quirks from time to time in different settings. Here are the browsers and major supported versions we know Metabase will work on (and will attempt to fix browser specific bugs on if found). - -- Chrome (v 70+) -- Internet Explorer 11. (Note: we'll be dropping support for IE11 in Metabase 0.40. It'll likely still work, but you may see some unexpected behavior.) +- Chrome (v70+) - Firefox (v68+) -- Microsoft Edge (17+) +- Microsoft Edge (v17+) - Safari (v11+) -Metabase _may_ run perfectly fine on older versions of your browser of choice or a specific browser not listed above, but your mileage may vary. We always recommend you use the most up to date browser you can. - +Metabase may run perfectly well on older versions of these browsers or on specific browsers not listed above, but your mileage may vary. (In particular, we ended support for IE11 in Metabase 0.40: it mostly still works, but you may see some unexpected behavior.) We always recommend you use the most up to date browser you can. diff --git a/docs/faq/general/what-if-i-find-a-bug.md b/docs/faq/general/what-if-i-find-a-bug.md index 6589d32f562b..03ea675a6dfa 100644 --- a/docs/faq/general/what-if-i-find-a-bug.md +++ b/docs/faq/general/what-if-i-find-a-bug.md @@ -1,3 +1,5 @@ # What if I find a bug? -If you think something isn’t working the way it’s supposed to, take a look at our [bug filing](../../troubleshooting-guide/bugs.md) guide. \ No newline at end of file +If you think something isn't working the way it's supposed to, please [file a bug report][filing-bugs]. + +[filing-bugs]: ../../troubleshooting-guide/bugs.html diff --git a/docs/faq/general/what-languages-can-be-used-with-metabase.md b/docs/faq/general/what-languages-can-be-used-with-metabase.md index fa5e22b33ff8..e651389d0d5a 100644 --- a/docs/faq/general/what-languages-can-be-used-with-metabase.md +++ b/docs/faq/general/what-languages-can-be-used-with-metabase.md @@ -1,16 +1,16 @@ # What languages can be used with Metabase? -Thanks to our amazing user community, Metabase has been translated into a variety of languages. Due to the nature of how we collect translations (more on that in a minute), available languages may be added or removed during major releases, depending on the translation coverage. +Thanks to our amazing user community, Metabase has been translated into many different languages. Due to [the way we collect translations](#policy-for-adding-and-removing-translations), languages may be added or removed during major releases depending on translation coverage. ## Currently available languages -The languages you can currently pick from in Metabase are: +The languages you can currently pick from are: - English (default) - Bulgarian - Catalan -- Chinese, simplified -- Chinese, traditional +- Chinese (simplified) +- Chinese (traditional) - Czech - Dutch - Farsi/Persian @@ -22,6 +22,7 @@ The languages you can currently pick from in Metabase are: - Polish - Portuguese - Russian +- Serbian - Slovak - Spanish - Swedish @@ -31,10 +32,12 @@ The languages you can currently pick from in Metabase are: ## Policy for adding and removing translations -The community contributes to Metabase translations on our [POEditor project](https://poeditor.com/join/project/ynjQmwSsGh). If you're a fan of Metabase and would like to see it made available in a language you're fluent in, we'd love your help! +Our community contributes to Metabase translations on our [POEditor project][metabase-poe]. If you'd like to help make Metabase available in a language you're fluent in, we'd love your help! -For new translations to be added to Metabase they must reach 100%. Once they have, we'll add them in the next major or minor release of Metabase. All _existing_ translations in Metabase _must stay at 100%_ to continue being included in the next _major_ version of Metabase. This is to ensure that no one encounters a confusing mishmash of English and another language when using Metabase. +For a new translation to be added to Metabase, it must reach 100%. Once it does, we add it in the next major or minor release of Metabase. All _existing_ translations in Metabase _must stay at 100%_ to continue being included in the next _major_ version of Metabase. This rule ensures that no one encounters a confusing mishmash of English and another language when using Metabase. -We understand that this is a high bar, so we commit to making sure that before each major release, any additions or changes to text in the product will be completed at least 10 calendar days before the final release ships, at which point we notify all translators that a new release will be happening soon. +We understand that this is a high bar, so we commit to making sure that before each major release, any additions or changes to text in the product are completed at least 10 calendar days before the release ships, at which point we notify all translators that a new release will be happening soon. -Note that while we will only remove languages in major releases, we will add them back in for minor releases — so it's always a good time to jump in and start translating! +Note that while we only remove languages in major releases, we are happy to add them back for minor releases, so it's always a good time to jump in and start translating. + +[metabase-poe]: https://poeditor.com/join/project/ynjQmwSsGh diff --git a/docs/faq/setup/how-do-i-integrate-with-sso.md b/docs/faq/setup/how-do-i-integrate-with-sso.md index 3892a584329d..ade48abaf31d 100644 --- a/docs/faq/setup/how-do-i-integrate-with-sso.md +++ b/docs/faq/setup/how-do-i-integrate-with-sso.md @@ -1,3 +1,8 @@ # How do I integrate Metabase and our single-sign on (SSO) option? -The open-source edition of Metabase allows you to [integrate with LDAP or Google sign-in](../../administration-guide/10-single-sign-on.md). If you’re using a [SAML](../../enterprise-guide/authenticating-with-saml.md) or [JWT](../../enterprise-guide/authenticating-with-jwt.md) SSO solution, you will need the [Enterprise Edition](https://www.metabase.com/enterprise/). +The Open Source Edition of Metabase allows you to [integrate with LDAP or Google sign-in][single-sign-on]. If you're using [SAML][saml-auth] or [JWT][jwt-auth] for SSO, you will need the [Enterprise Edition][enterprise-edition]. + +[enterprise-edition]: /enterprise/ +[jwt-auth]: ../../enterprise-guide/authenticating-with-jwt.html +[saml-auth]: ../../enterprise-guide/authenticating-with-saml.html +[single-sign-on]: ../../administration-guide/10-single-sign-on.html diff --git a/docs/faq/setup/i-am-having-trouble-running-metabase.md b/docs/faq/setup/i-am-having-trouble-running-metabase.md index 8fc4149bcfa6..5db5950bc582 100644 --- a/docs/faq/setup/i-am-having-trouble-running-metabase.md +++ b/docs/faq/setup/i-am-having-trouble-running-metabase.md @@ -1,3 +1,6 @@ # I'm having trouble running Metabase. -If you’re having trouble running Metabase, we recommend starting with our [troubleshooting guides](../../troubleshooting-guide/index.md). +We have tried to make it easy to set up and run Metabase, but it does need to interact with other systems. If you're having trouble running it, please take a look at our [troubleshooting guides][troubleshooting-guides] and our [tutorial articles][learn]. + +[learn]: /learn/ +[troubleshooting-guides]: ../../troubleshooting-guide/index.html diff --git a/docs/faq/setup/what-is-h2.md b/docs/faq/setup/what-is-h2.md index 9979d1696709..32922e1a6e40 100644 --- a/docs/faq/setup/what-is-h2.md +++ b/docs/faq/setup/what-is-h2.md @@ -1,3 +1,5 @@ # What's H2? -H2 is the underlying application database that’s packaged with Metabase. H2 is a lightweight, in-memory database: perfect for getting spun up quickly, not so perfect for long-term usage. We recommend that you [migrate away from H2](../../operations-guide/migrating-from-h2.md) for production instances of Metabase. +H2 is a lightweight, in-memory database; it's perfect for getting spun up quickly, but not so good for long-term usage. By default, Metabase uses H2 to store its internal data. If you want to run Metabase in production, we recommend that you [migrate off H2][migrate-off-h2]. + +[migrate-off-h2]: ../../operations-guide/migrating-from-h2.html diff --git a/docs/faq/setup/when-should-i-migrate-h2.md b/docs/faq/setup/when-should-i-migrate-h2.md index e7461ea93370..48b06a8e18d8 100644 --- a/docs/faq/setup/when-should-i-migrate-h2.md +++ b/docs/faq/setup/when-should-i-migrate-h2.md @@ -1,3 +1,5 @@ -# When should I migrate H2 to mySQL or Postgres? +# When should I migrate from H2 to MySQL or Postgres? -As soon as you’re planning on using Metabase for anything other than testing. H2 is fairly easily corruptible, so it’s better to be safe than sorry when running Metabase in production. The migration is fairly simple, and [full instructions](../../operations-guide/migrating-from-h2.md) are available. +As soon as you plan to use Metabase for anything other than testing: H2 is neither as fast or as robust as production database systems. The migration is fairly simple; please see [this guide][migrate-off-h2] for instructions. + +[migrate-off-h2]: ../../operations-guide/migrating-from-h2.html diff --git a/docs/faq/setup/which-databases-does-metabase-support.md b/docs/faq/setup/which-databases-does-metabase-support.md index e37462941bb1..1bfbd6714494 100644 --- a/docs/faq/setup/which-databases-does-metabase-support.md +++ b/docs/faq/setup/which-databases-does-metabase-support.md @@ -1,5 +1,6 @@ # Which databases does Metabase support? -See [our list of officially supported databases](../../administration-guide/01-managing-databases.md#officially-supported-databases). +Please see [our list of officially supported databases][supported-databases]. There are also [community-built database drivers][community-drivers] for databases that we do not currently support, and you are always welcome to build your own. -You may find some [community-built database drivers](../../developers-guide-drivers.md) for databases that we do not currently support - and you are always welcome to build your own! +[community-drivers]: ../../developers-guide-drivers.html +[supported-databases]: ../../administration-guide/01-managing-databases.html#officially-supported-databases diff --git a/docs/faq/start.md b/docs/faq/start.md index a366bcf45bd4..2240c69ad964 100644 --- a/docs/faq/start.md +++ b/docs/faq/start.md @@ -19,14 +19,14 @@ Here is a list of some frequently asked questions about Metabase. - [Which databases does Metabase support?](setup/which-databases-does-metabase-support.md) - [I'm having trouble running Metabase.](setup/i-am-having-trouble-running-metabase.md) - [What's H2?](setup/what-is-h2.md) -- [When should I migrate H2 to MySQL or Postgres?](setup/when-should-i-migrate-h2.md) +- [When should I migrate from H2 to MySQL or Postgres?](setup/when-should-i-migrate-h2.md) - [How do I integrate Metabase and our single-sign on (SSO) solution?](setup/how-do-i-integrate-with-sso.md) ## Using Metabase - [How do I reset my password?](using-metabase/how-do-i-reset-my-password.md) -- [How do I ask questions about my data?](using-metabase/how-do-i-ask-questions.md) -- [I'm not getting the email notifications I expect to!](using-metabase/i-am-not-getting-email-notifications.md) +- [How do I ask questions about my organization's data?](using-metabase/how-do-i-ask-questions.md) +- [I'm not getting the email notifications I expect to.](using-metabase/i-am-not-getting-email-notifications.md) - [How do I answer questions about data that lives in multiple databases?](using-metabase/how-do-i-answer-questions-when-data-is-in-multiple-databases.md) - [How do I answer questions where I need to join tables together?](using-metabase/how-do-i-answer-questions-with-joins.md) -- [I’m trying to ask a question, but it looks like I can’t access some of the data I need.](using-metabase/cant-access-data-i-need.md) +- [I'm trying to ask a question, but it looks like I can't access some of the data I need.](using-metabase/cant-access-data-i-need.md) diff --git a/docs/faq/using-metabase/cant-access-data-i-need.md b/docs/faq/using-metabase/cant-access-data-i-need.md index c7c857a7a167..532a108f0b34 100644 --- a/docs/faq/using-metabase/cant-access-data-i-need.md +++ b/docs/faq/using-metabase/cant-access-data-i-need.md @@ -1,12 +1,14 @@ -# I’m trying to ask a question, but it looks like I can’t access some of the data I need. +# I'm trying to ask a question, but it looks like I can't access some of the data I need -There are a few reasons that this may be occurring: +This can occur for several reasons: -- The data source containing the data may not be connected to Metabase. If you are an administrator, you can see a list of all of your connected data sources by clicking the gear icon, navigating to the Admin Panel, then clicking Databases in the top navigation. -- You may not have permission to access the data in question. Your administrator may need to [adjust your access](../../administration-guide/05-setting-permissions.md) by changing or modifying your user group. -- The data may live in a different table other than the one you selected to begin the question. - - If you are using a Metabase version earlier than 0.33, you will need to either write a SQL query that contains joins, or have your Metabase administrator [set up foreign keys](../../administration-guide/03-metadata-editing.md)). - - If you are using Metabase 0.33 or above, you can perform joins using the Notebook editor. Note that you will need to choose a field to join on, or link, the two tables together. For instance, if you want to combine a Customer table with an Order table, you might select the ID field in the Customer table and link it to the customer_id field in the order table. - -* The data may live in a different database than the one you selected to begin the question. Metabase does not currently support joining across multiple databases. Generally, it is better to bring related data into the same database. +- *The data source containing the data may not be connected to Metabase.* If you are an administrator, you can see a list of all of your connected data sources by clicking the gear icon, navigating to the Admin Panel, then clicking Databases in the top navigation. + +- *You may not have permission to access the data in question.* Your administrator may need to [adjust your permissions][setting-permissions] by changing or modifying your user group. +- *The data may live in a different table other than the one you began the question with.* + - If you are using Metabase version 0.32 or earlier, you will need to either write a SQL query that contains joins, or have your Metabase administrator [set up foreign keys][editing-metadata]. + - If you are using Metabase version 0.33 or above, you can perform joins using the Notebook Editor. + +[editing-metadata]: ../../administration-guide/03-metadata-editing.html +[setting-permissions]: ../../administration-guide/05-setting-permissions.html diff --git a/docs/faq/using-metabase/how-do-i-answer-questions-when-data-is-in-multiple-databases.md b/docs/faq/using-metabase/how-do-i-answer-questions-when-data-is-in-multiple-databases.md index e60255742d43..83f24eb9101d 100644 --- a/docs/faq/using-metabase/how-do-i-answer-questions-when-data-is-in-multiple-databases.md +++ b/docs/faq/using-metabase/how-do-i-answer-questions-when-data-is-in-multiple-databases.md @@ -1,3 +1,3 @@ # How do I answer questions about data that lives in multiple databases? -Metabase does not currently support joining across multiple databases. Generally, it is better to bring related data into the same database. +Metabase doesn't currently support joining tables across multiple databases. It's generally better to bring related data into a single database. diff --git a/docs/faq/using-metabase/how-do-i-answer-questions-with-joins.md b/docs/faq/using-metabase/how-do-i-answer-questions-with-joins.md index 376dda10ba6b..bf456f93c645 100644 --- a/docs/faq/using-metabase/how-do-i-answer-questions-with-joins.md +++ b/docs/faq/using-metabase/how-do-i-answer-questions-with-joins.md @@ -1,5 +1,8 @@ # How do I answer questions where I need to join tables together? -If you are using a Metabase version earlier than 0.33, you will need to either write a SQL query that contains joins, or have your Metabase administrator set up foreign keys (they can read more about that [here](../../administration-guide/03-metadata-editing.md)). +If you're using Metabase version 0.32 or earlier, you'll need to either write a SQL query that contains joins, or have your Metabase administrator set up foreign keys (they can read more about that [here][editing-metadata]). -If you are using Metabase 0.33 or above, you can [perform joins using the Notebook editor](https://www.metabase.com/blog/joining-tables/index.html). +If you are using Metabase 0.33 or above, you can [perform joins using the Notebook editor][notebook-editor-joins], but you should probably upgrade anyway. + +[editing-metadata]: ../../administration-guide/03-metadata-editing.html +[notebook-editor-joins]: /learn/questions/joins-in-metabase.html diff --git a/docs/faq/using-metabase/how-do-i-ask-questions.md b/docs/faq/using-metabase/how-do-i-ask-questions.md index 714875cd22ca..cb072fea9c24 100644 --- a/docs/faq/using-metabase/how-do-i-ask-questions.md +++ b/docs/faq/using-metabase/how-do-i-ask-questions.md @@ -1,3 +1,5 @@ # How do I ask questions about my organization's data? -Metabase provides a variety of ways to ask questions about your organization’s data, from using our GUI interface to construct a question, to writing a SQL query from scratch. Read all about how to ask questions [here](../../users-guide/04-asking-questions.md). +Metabase provides many ways to ask questions about your organization's data, from our GUI interface to to writing a SQL query from scratch. Read all about how to ask questions in [our users' guide][users-guide-asking-questions]. + +[users-guide-asking-questions]: ../../users-guide/04-asking-questions.html diff --git a/docs/faq/using-metabase/how-do-i-reset-my-password.md b/docs/faq/using-metabase/how-do-i-reset-my-password.md index d1a5f3c14895..cc600d0a2a57 100644 --- a/docs/faq/using-metabase/how-do-i-reset-my-password.md +++ b/docs/faq/using-metabase/how-do-i-reset-my-password.md @@ -1,32 +1,5 @@ -## How do I reset my password? +# How do I reset my password? -### Using the Mac App +Please see [this troubleshooting guide][cant-log-in] for help. -If you're running the MacOS application on your laptop, you can click on the Help menu item and click `Reset Password`. - -### Using the web app as a normal user - -If you're having trouble logging in due to a forgotten password, click the link that reads, "I seem to have forgotten my password" in the lower-right of the log-in screen. If your Metabase administrator has already [configured your email settings](../../administration-guide/02-setting-up-email.md), you'll receive a password reset email. If email has not been configured, you will need to contact a Metabase admin to perform a password reset via Admin Panel > People. - -### Using the web app as an administrator - -If you're the administrator of Metabase and have access to the server console, but have forgotten the password for your admin account, then you can get a reset token, which can be used to setup a new password. - -To get the token, stop the running Metabase application, then start Metabase with the parameters `reset-password email@example.com` (where "email@example.com" is the email associated with the admin account). - -Example: `java -jar metabase.jar reset-password email@example.com` - -This will return a token and stop Metabase again, like this: - -``` -... -Resetting password for email@example.com... - -OK [[[1_7db2b600-d538-4aeb-b4f7-0cf5b1970d89]]] -``` - -Now start Metabase normally again and navigate to the URL where you're running it, with the following path appended: `/auth/reset_password/:token`, where ":token" is the token that was generated from the step above. - -Example: `https://metabase.example.com/auth/reset_password/1_7db2b600-d538-4aeb-b4f7-0cf5b1970d89` - -You should now see a page where you can input a new password for the admin account. +[cant-log-in]: ../../troubleshooting-guide/cant-log-in.html diff --git a/docs/faq/using-metabase/i-am-not-getting-email-notifications.md b/docs/faq/using-metabase/i-am-not-getting-email-notifications.md index 86493268de1f..1f55fdddc5a5 100644 --- a/docs/faq/using-metabase/i-am-not-getting-email-notifications.md +++ b/docs/faq/using-metabase/i-am-not-getting-email-notifications.md @@ -1,3 +1,5 @@ -# I’m not getting the email notifications I expect to! +# I'm not getting the email notifications I expect to -Since Metabase is a self-hosted tool, things like email notifications are configured by someone within your organization — if you reach out to them, they should be able to check the configuration settings and make sure everything is set up correctly. It may help to point them to [this guide](../../administration-guide/02-setting-up-email.md) on enabling email functionality. \ No newline at end of file +Since Metabase is a self-hosted tool, email notifications must be configured by someone in your organization. They should be able to check the configuration settings and make sure everything is set up correctly; it may help to point them to [this guide][setting-up-email]. + +[setting-up-email]: ../../administration-guide/02-setting-up-email.html diff --git a/docs/operations-guide/environment-variables.md b/docs/operations-guide/environment-variables.md index be242ceba1c3..2fd6317191db 100644 --- a/docs/operations-guide/environment-variables.md +++ b/docs/operations-guide/environment-variables.md @@ -681,7 +681,7 @@ Server port, usually 389 or 636 if SSL is used. #### `MB_LDAP_SECURITY` -Type: string
+Type: string (`"none"`, `"ssl"`, `"starttls"`)
Default: `"none"` Use SSL, TLS or plain text. diff --git a/docs/operations-guide/log-configuration.md b/docs/operations-guide/log-configuration.md index 4c72b442e8ce..d7edb2c81768 100644 --- a/docs/operations-guide/log-configuration.md +++ b/docs/operations-guide/log-configuration.md @@ -1,39 +1,69 @@ -# Configuring Logging Level +# Metabase logs -By default, Metabase logs quite a bit of information. Luckily, Metabase uses [Log4j 2](https://logging.apache.org/log4j/2.x/) under the hood, meaning the logging is completely configurable. +Metabase logs quite a bit of information by default. It uses [Log4j 2][log4j] under the hood, so you can configure how much information Metabase logs. -Metabase's default logging configuration can be found [here](https://github.com/metabase/metabase/blob/master/resources/log4j2.xml). You can override this XML file and tell Metabase to use your own logging configuration file by passing a `-Dlog4j.configurationFile` argument when running Metabase: +## Configuring Logging Level - java -Dlog4j.configurationFile=file:/path/to/custom/log4j2.xml -jar metabase.jar +Here is Metabase's [default logging configuration][default-log-config]. You can override this XML file and tell Metabase to use your own logging configuration file by passing a `-Dlog4j.configurationFile` argument when running Metabase. For example, if your custom XML file is found in `/path/to/custom/log4j2.xml`, you can use it like so: -The easiest way to get started customizing logging would be to use a copy of default `log4j2.xml` file linked to above and adjust that to meet your needs. Keep in mind that you'll need to restart Metabase for changes to the file to take effect. +``` +java -Dlog4j.configurationFile=file:/path/to/custom/log4j2.xml -jar metabase.jar +``` + +To get started customizing the logs, make a [copy of the default `log4j2.xml` file][default-log-config] and adjust it to meet your needs. You'll need to restart Metabase for changes to the file to take effect. See Log4j's docs for info on [log levels][levels]. + +You can set different log levels for different areas of the application, e.g.,: + +``` + + + + + + + + + + + + + + + +``` -# Using Log4j 2 with docker +Check out [How to read the logs][read-logs]. -Before running the Metabase docker image, you'll need to pass the custom `log4j.configurationFile` argument. Add a `JAVA_OPTS=-Dlog4j.configurationFile=file:/path/to/custom/log4j2.xml` to the environment variables of the container, like this: +## Jetty logs - docker run -p 3000:3000 -v $PWD/logging_config:/metabase.db -e JAVA_OPTS=-Dlog4j.configurationFile=file:///metabase.db/log4j2.xml metabase/metabase` +You can configure Metabase's web server to provide more detail in the logs by setting the log level to `DEBUG`. Just keep in mind that Jetty's debug logs can be really chatty, which can make it difficult to find the data you're looking for. + +To get Jetty logs, add the following lines to the Log4J2 XML file in the node: -When using docker-compose: ``` -metabase: - image: metabase/metabase:v0.37.4 - container_name: metabase - hostname: metabase - volumes: - - /dev/urandom:/dev/random:ro - - $PWD/logging_config:/metabase.db - ports: - - 3000:3000 - environment: - - "JAVA_OPTS=-Dlog4j.configurationFile=file:///metabase.db/log4j2.xml" + ``` -**IMPORTANT**: when using containers, logs need to be written into the /metabase.db directory. It's the only directory the Metabase user can write to (the user here being the one that executes that Metabase JAR inside the container). +## Using Log4j 2 with Docker + +Before running the Metabase Docker image, you'll need to pass the custom `log4j.configurationFile` argument. Add a `JAVA_OPTS=-Dlog4j.configurationFile=file:/path/to/custom/log4j2.xml` to the environment variables of the container, like this: + +``` +docker run -p 3000:3000 -v $PWD/my_log4j2.xml:/tmp/my_log4j2.xml -e JAVA_OPTS=-Dlog4j.configurationFile=file:///tmp/my_log4j2.xml metabase/metabase` +``` -# Configuring Emoji Logging +## Disable emoji or colorized logging -By default Metabase will include emoji characters in logs. You can disable this by using the following environment variable: +By default Metabase will include emoji characters in logs. You can disable emoji by using the `MB_EMOJIN_IN_LOGS` environment variable: + +### Configuring Emoji Logging + +``` +export MB_EMOJI_IN_LOGS="false" +java -jar metabase.jar +``` - export MB_EMOJI_IN_LOGS="false" - java -jar metabase.jar +[default-log-config]: https://github.com/metabase/metabase/blob/master/resources/log4j2.xml +[levels]: https://logging.apache.org/log4j/2.x/manual/customloglevels.html +[log4j]: https://logging.apache.org/log4j/2.x/ +[read-logs]: ../troubleshooting-guide/server-logs.html diff --git a/docs/operations-guide/migrating-from-h2.md b/docs/operations-guide/migrating-from-h2.md index 2ff6c5538602..0f907db01c96 100644 --- a/docs/operations-guide/migrating-from-h2.md +++ b/docs/operations-guide/migrating-from-h2.md @@ -1,8 +1,25 @@ -# Migrating from using the H2 database to MySQL or Postgres +# Migrating from using the H2 database to Postgres or MySQL/MariaDB -If you decide to use the default application database (H2) when you initially start using Metabase, but later decide that you'd like to switch to a more production-ready database such as MySQL or Postgres, we make the transition easy for you. +If you decide to use the default application database (H2) when you initially start using Metabase, but later decide that you'd like to switch to a more production-ready database such as Postgres or MySQL/MariaDB, you're in the right place. -Metabase provides a custom migration command for upgrading H2 application database files by copying their data to a new database. Here's what you'll want to do: +## Before you migrate + +- Avoid upgrading and migrating at the same time, since it can cause problems if one of the database schemas doesn't match. +- You must be able to connect to the target Postgres or MySQL/MariaDB database in whatever environment you're running this migration command in. So, if you are attempting to move the data to a cloud database, make sure you can connect to that database. + +### Migrating when using Docker + +We recommend running the migration outside of Docker. You'll need to copy the H2 file out of the Docker container before migrating. For example, if the container is called metabase, you'd run: + +``` +docker cp metabase:/metabase.db/metabase.db.mv.db ./ +``` + +The above command would copy the database file to the directory you ran the command from. With your database file outside of the container, all you need to do is follow the "How to migrate" steps below. + +## How to migrate + +Metabase provides a custom migration command for upgrading H2 application database files by copying their data to a new database. Here's what you'll do: 1. Shutdown your Metabase instance so that it's not running. This ensures no accidental data gets written to the db while migrating. 2. Make a backup copy of your H2 application database by following the instructions in [Backing up Metabase Application Data](backing-up-metabase-application-data.md). Safety first! @@ -15,15 +32,23 @@ export MB_DB_PORT=5432 export MB_DB_USER= export MB_DB_PASS= export MB_DB_HOST=localhost -java -jar metabase.jar load-from-h2 /path/to/metabase.db # do not include .mv.db or .h2.db suffix +java -jar metabase.jar load-from-h2 /path/to/metabase.db # do not include .mv.db ``` -It is expected that you will run the command against a brand-new (empty!) database; Metabase will handle all of the work of creating the database schema and migrating the data for you. +Note that the file name of the database file itself might be `/path/to/metabase.db.mv.db`, but when running the `load-from-h2` command, you need to truncate the path to `/path/to/metabase.db`. + +Metabase expects that you'll run the command against a brand-new (empty) database; it'll create the database schema and migrate the data for you. + +### PostgreSQL notes + +- Minimum version is PostgreSQL 9.4, since the code that handles these migrations uses a command that is only available in version 9.4 or newer. + +### MySQL/MariaDB notes + +- MySQL minimum recommended version is 5.7.7. +- MariaDB minimum recommended version is 10.2.2. +- And the following database settings are required (the settings are the default in the above recommended versions): `utf8mb4_unicode_ci` collation, `utf8mb4` character set, and `innodb_large_prefix=ON`. -#### Notes +### Troubleshooting -- Avoid upgrading and migrating at the same time, since it can cause problems with one of database schemas not matching. -- It is required that you can connect to the target MySQL or Postgres database in whatever environment you are running this migration command in. So, if you are attempting to move the data to a cloud database, make sure you take that into consideration. -- For MySQL or MariaDB, the minimum recommended version is MySQL 5.7.7 and MariaDB 10.2.2, while the following is required: `utf8mb4_unicode_ci` collation, `utf8mb4` character set, and `innodb_large_prefix=ON`. -- For PostgreSQL, the minimum version is PostgreSQL 9.4, since the code that handles these migrations uses a command that is only available in version 9.4 or newer. -- H2 automatically adds a `.h2.db` or `.mv.db` extension to the database path you specify, so make sure the path to the DB file you pass to the command _does not_ include it. For example, if you have a file named `/path/to/metabase.db.h2.db`, call the command with `load-from-h2 /path/to/metabase.db`. +If you get an error, check out [this troubleshooting guide](../troubleshooting-guide/loading-from-h2.md). diff --git a/docs/operations-guide/running-metabase-on-heroku.md b/docs/operations-guide/running-metabase-on-heroku.md index a6967bd2a303..00645577350c 100644 --- a/docs/operations-guide/running-metabase-on-heroku.md +++ b/docs/operations-guide/running-metabase-on-heroku.md @@ -37,7 +37,7 @@ Now that you’ve installed Metabase, it’s time to [set it up and connect it t ### Troubleshooting * If your Metabase instance is getting stuck part way through the initialization process and only every shows roughly 30% completion on the loading progress. - * The most likely culprit here is a stale database migrations lock that was not cleared. This can happen if for some reason Heroku kills your Metabase dyno at the wrong time during startup. __To fix it:__ you can either clear the lock using the built-in [release-locks](../troubleshooting-guide/application-database.md) command line function, or if needed you can login to your Metabase application database directly and delete the row in the `DATABASECHANGELOGLOCK` table. Then just restart Metabase. + * The most likely culprit here is a stale database migrations lock that was not cleared. This can happen if for some reason Heroku kills your Metabase dyno at the wrong time during startup. __To fix it:__ you can either clear the lock using the built-in [release-locks](../troubleshooting-guide/loading-from-h2.md) command line function, or if needed you can login to your Metabase application database directly and delete the row in the `DATABASECHANGELOGLOCK` table. Then just restart Metabase. ## Deploying New Versions of Metabase @@ -51,7 +51,7 @@ Here's each step: * Clone the metabase-deploy repo to your local machine: ```bash -git clone https://github.com/metabase/metabase-deploy.git +git clone https://github.com/metabase/metabase-deploy.git cd metabase-deploy ``` @@ -97,7 +97,7 @@ git push master ### Database Syncs -You may want to ensure that your staging database is synced with production before you deploy a new version. Luckily with Heroku you can restore a backup from one app to another. +You may want to ensure that your staging database is synced with production before you deploy a new version. Luckily with Heroku you can restore a backup from one app to another. For example, assuming your production app is named `awesome-metabase-prod`, this command will create a backup: @@ -117,12 +117,12 @@ Once this is done, restart your staging app and begin testing. ### Pinning Metabase versions -For whatever reason, should you want to pin Metabase to a specific version, you can append the version number to the buildpack URL (as long as that tag exists in the [`metabase-buildpack`](https://github.com/metabase/metabase-buildpack) repository). +For whatever reason, should you want to pin Metabase to a specific version, you can append the version number to the buildpack URL (as long as that tag exists in the [`metabase-buildpack`](https://github.com/metabase/metabase-buildpack) repository). If you haven't cloned the `metabase-deploy` repository, this can be done with the Heroku CLI: ```bash -heroku buildpacks:set --index 2 https://github.com/metabase/metabase-buildpack#0.34.1 \ +heroku buildpacks:set --index 2 https://github.com/metabase/metabase-buildpack#0.34.1 \ --app ``` diff --git a/docs/troubleshooting-guide/application-database.md b/docs/troubleshooting-guide/application-database.md deleted file mode 100644 index 9cfa05834a5b..000000000000 --- a/docs/troubleshooting-guide/application-database.md +++ /dev/null @@ -1,54 +0,0 @@ -## Specific Problems: - - -### Metabase fails to start due to database locks - -Sometimes Metabase will fail to complete its startup due to a database lock that was not cleared properly. The error message will look something like: - - liquibase.exception.DatabaseException: liquibase.exception.LockException: Could not acquire change log lock. - -When this happens, go to a terminal where Metabase is installed and run: - - java -jar metabase.jar migrate release-locks - -in the command line to manually clear the locks. Then restart your Metabase instance. - -### Metabase H2 application database gets corrupted - -Because H2 is an on-disk database, it is sensitive to filesystem errors. Sometimes drives get corrupted, or the file doesn't get flushed correctly, which can result in a corrupted database. In these situations, you'll see errors on startup. These vary, but one example is -``` -myUser@myIp:~$ java -cp metabase.jar org.h2.tools.RunScript -script whatever.sql -url jdbc:h2:~/metabase.db -Exception in thread "main" org.h2.jdbc.JdbcSQLException: Row not found when trying to delete from index """"".I37: ( /* key:7864 */ X'5256470012572027c82fc5d2bfb855264ab45f8fec4cf48b0620ccad281d2fe4', 165)" [90112-194] - at org.h2.message.DbException.getJdbcSQLException(DbException.java:345) - [etc] -``` - -Not all H2 errors are recoverable (which is why if you're using H2, _please_ have a backup strategy for the application database file). To attempt to recover a corrupted H2 file, try the below. - -``` -java -cp metabase.jar org.h2.tools.Recover -mv metabase.db.mv.db metabase.old.db -touch metabase.db.mv.db -java -cp target/uberjar/metabase.jar org.h2.tools.RunScript -script metabase.db.h2.sql -url jdbc:h2:`pwd`/metabase.db -``` - -NOTE: If you are using a legacy Metabase H2 application database (where the database file is named 'metabase.db.h2.db'), use the below instead. - -``` -java -cp metabase.jar org.h2.tools.Recover -mv metabase.db.h2.db metabase.old.db -touch metabase.db.h2.db -java -cp target/uberjar/metabase.jar org.h2.tools.RunScript -script metabase.db.h2.sql -url jdbc:h2:`pwd`/metabase.db;MV_STORE=FALSE -``` - - -### Metabase fails to connect to H2 Database on Windows 10 - -In some situations the Metabase JAR needs to be unblocked so it has permissions to create local files for the application database. - -On Windows 10, if you see an error message like - - Exception in thread "main" java.lang.AssertionError: Assert failed: Unable to connect to Metabase DB. - -when running the JAR, you can unblock the file by right-clicking, clicking "Properties," and then clicking "Unblock." -See Microsoft's documentation [here](https://blogs.msdn.microsoft.com/delay/p/unblockingdownloadedfile/) for more details on unblocking downloaded files. diff --git a/docs/troubleshooting-guide/bugs.md b/docs/troubleshooting-guide/bugs.md index 24ad03bfee58..8e93f93cb93b 100644 --- a/docs/troubleshooting-guide/bugs.md +++ b/docs/troubleshooting-guide/bugs.md @@ -1,29 +1,17 @@ -## Reporting a bug +# Reporting a bug -If you come across something that looks like a bug, start by searching our [Github issues](https://github.com/metabase/metabase/issues) to see if it has already been reported. If it has, please let us know you're experiencing the same issue by reacting with a thumbs up emoji or adding a comment providing additional information. +If you come across something that looks like a bug, please start by searching our [Github issues][metabase-issues] to see if it has already been reported. If it has, please let us know you're experiencing the same issue by reacting with a thumbs up emoji or adding a comment providing additional information. -If the bug has not yet been reported, go ahead and [open a bug report](https://github.com/metabase/metabase/issues/new/choose). We suggest collecting the following information to help us reproduce the issue: +If the bug has not yet been reported, go ahead and [open a bug report][metabase-file-bug]. The following information will help us reproduce your issue: 1. Metabase server logs 2. Javascript console logs -3. Can it be reproduced on the sample dataset? +3. Whether it can be reproduced on the sample dataset 4. Your Metabase version -5. Where Metabase is running (Docker image, AWS Elastic Beanstalk, Heroku, Linux/Ubuntu, etc) -6. Which database is used -7. What browser version +5. Where Metabase is running (Docker image, AWS Elastic Beanstalk, Heroku, Linux/Ubuntu, etc.) +6. Which database you are using +7. What browser version you are using 8. Screenshots (if relevant) -## Helpful tidbits - -### Accessing the Metabase server logs - -While you can always look for the logs Metabase leaves on your server file system (or however you collect logs), if you are logged into Metabase with an admin account you can also access them from the Logs tab in the Troubleshooting section of the Admin Panel. To get to the Admin Panel, click the gear icon in the top-right of Metabase. - -### Checking for Javascript console errors - -Metabase will send debugging information and errors to your browser's developer console. To open the developer console, follow the instructions for your web browser of choice: - -- [Chrome](https://developers.google.com/web/tools/chrome-devtools/open#console) -- [Firefox](https://developer.mozilla.org/en-US/docs/Tools/Web_Console/Opening_the_Web_Console) -- [Safari](https://support.apple.com/guide/safari-developer/develop-menu-dev39df999c1/mac) -- [Edge](https://docs.microsoft.com/en-us/microsoft-edge/devtools-guide-chromium) +[metabase-file-bug]: https://github.com/metabase/metabase/issues/new/choose +[metabase-issues]: https://github.com/metabase/metabase/issues diff --git a/docs/troubleshooting-guide/cant-log-in.md b/docs/troubleshooting-guide/cant-log-in.md new file mode 100644 index 000000000000..b7d6dc7f3924 --- /dev/null +++ b/docs/troubleshooting-guide/cant-log-in.md @@ -0,0 +1,114 @@ +# People can't log in to Metabase + +You should be able to log in to Metabase, but: + +- you can't see the login page, or +- your credentials aren't accepted. + +## Do you know who is managing your login? + +**Background:** Metabase can manage accounts itself, or administrators can configure it to let people log in using credentials managed by some other service, such as Google, [SAML-based authentication][saml], or [LDAP][troubleshooting-ldap]. Metabase questions and dashboards can also be embedded in other websites. + +**Scenarios:** + +1. If Metabase is managing your account, or if your instance is using LDAP, you will typically log in directly using an email address and password. +2. If some other service (like Google) is managing your credentials, you will typically see a single button that launches a pop-up dialog when you log in. +3. If a Metabase question or dashboard is embedded in another website or web application, that site or application determines who you are. It may pass on your identity to Metabase to control what data you are allowed to view---please see [our troubleshooting guide for sandboxing][sandboxing] if you are having trouble with this. +4. If you are using Metabase Cloud, the password for the Metabase store (where you pay for things) is not automatically the same as the password for your Metabase instance (where you log in to look at data). + +If you are an administrator, you can go to **Admin Panel** and select **People**, then search for a user and look for an icon beside their name. If they log in using Google credentials, Metabase displays a Google icon. If they log in using an email address and password stored in Metababse, no icon is shown. Note that the type of user is set when the account is first created: if you create a user in Metabase, but that person then logs in via Google or some other form of SSO, the latter's icon will *not* show up next to their name. + +If you are an administrator and want to check SSO settings, go to **Admin Panel**, choose **Settings**, then select the **Authentication** tab. [This FAQ][auth] explains how to configure SSO for various providers. + +## Do you need to reset your password? + +**Root cause:** You have forgotten your password. + +**Steps to take:** + +1. As noted above, if you are logging in via Single Sign-On, your password is managed by that service, not by Metabase, so you need to reset your password there. +2. If you are using the desktop Mac App, click on the **Help** menu item and select `Reset Password`. +3. If you are an administrator and want to reset someone's passsword, go to **Admin Panel**, select **People**, click on the ellipsis "..." next to the person's account, and select `Reset Password`. +4. If you are using the web app as a normal user: + 1. Click the link in the lower-right of the login screen that reads, "I seem to have forgotten my password". + 2. If your Metabase administrator has [set up email][setting-up-email] you will receive a password reset email. + 3. If email has not been configured, you will need to contact a Metabase admin to perform a password reset. + +## If you are using Metabase Cloud, are you trying to use the correct password? + +**Root cause:** You are trying to log in to Metabase Cloud using the password you set for the Metabase store. + +**Steps to take:** + +1. Check which password you are using. +2. If you cannot remember the Metababse Cloud admin password, please see the next entry. + +## Do you need to reset the admin password? + +**Root cause:** You have forgotten the overall admin password for a Metabase instance. + +**Steps to take:** + +If you are using Metabase Cloud, contact support to have your admin password reset. + +If you're the administrator of a Metabase instance and have access to the server console, but have forgotten the password for the admin account, you can get Metabase to send you a password reset token: + +1. Stop the running Metabase application. +2. Restart Metabase with `reset-password email@example.com`, where "email@example.com" is the email associated with the admin account: + ``` + java -jar metabase.jar reset-password email@example.com + ``` +3. This will print out a random token like this: + ``` + ... + Resetting password for email@example.com... + + OK [[[1_7db2b600-d538-4aeb-b4f7-0cf5b1970d89]]] + ``` +4. Start Metabase normally again (*without* the `reset-password` option). +5. Navigate to it in your browser using the path `/auth/reset_password/:token`, where ":token" is the token that was generated from the step above. The full URL should look something like this: + ``` + https://metabase.example.com/auth/reset_password/1_7db2b600-d538-4aeb-b4f7-0cf5b1970d89 + ``` +6. You should now see a page where you can input a new password for the admin account. + +## Are you using the right URL for your Metabase? + +**Root cause:** the Metabase instance you are trying to log in to isn't where you think it is or isn't accessible. + +**Steps to take:** + +1. Check whether you need to include a port number as well as a hostname in the connection URL. For example, Metabase might be at `https://example.com:3000/` instead of `https://example.com/`. + - If you're an administrator, you'll have configured this. + - If you're not, please ask your admin. +2. Check whether your Metabase instance has moved. For example, if you were using a trial instance of Metabase, but you're now in production, the URL might have changed. + +## If Metabase is managing your password, has your account been deactivated? + +**Root cause:** Metabase doesn't delete accounts, but admins can deactivate them, and if your account is deactivated, you can't log in with it. + +**Steps to take:** + +For obvious reasons, regular users can't reactivate deactivated accounts. If you're an administrator and you want to do this for someone else: + +1. Go to **Admin Panel** and select **People**. +2. If no **Deactivated** tab is available, there are no deactivated accounts, so this isn't the problem. +3. If there *is* a **Deactivated** tab, look for the user who isn't able to log in. +4. Click on the recycle loop arrow to reactivate the account. + +## If you're logging in via LDAP, is LDAP configured correctly? + +**Root cause**: The LDAP connection is not configured correctly. + +**Steps to take:** + +1. Go to the **Admin Panel** and choose **Authentication**. +2. Make sure that LDAP is enabled +3. Make sure Metabase has the correct host, port, and login credentials for your LDAP server. You can test this by logging into LDAP directly using some other application, such as [Apache Directory Studio][ads]. + +[ads]: https://directory.apache.org/studio/ +[auth]: ../faq/setup/how-do-i-integrate-with-sso.html +[reset-password]: ../faq/using-metabase/how-do-i-reset-my-password.html +[saml]: ..//enterprise-guide/authenticating-with-saml.html +[setting-up-email]: ../administration-guide/02-setting-up-email.html +[troubleshooting-ldap]: ./ldap.html diff --git a/docs/troubleshooting-guide/cant-see-tables.md b/docs/troubleshooting-guide/cant-see-tables.md new file mode 100644 index 000000000000..6ae67852f06a --- /dev/null +++ b/docs/troubleshooting-guide/cant-see-tables.md @@ -0,0 +1,99 @@ +# I can't see my tables + +You have connected Metabase to a database, but: + +- you don't see the tables in the [Data Model][data-model] section of the Admin Panel, +- the tables don't appear in the [Data Browser][data-browser], +- the tables don't show up as possible data sources when you create a query using the Notebook Editor, or +- you can no longer see tables that you used to be able to see. + +If you can see the tables, but some of the rows or columns seem to be missing, please check out the [troubleshooting guide for sandboxing][sandboxing]. + +## Is your browser showing you a cached list of tables? + +**Root cause:** Sometimes browsers will show an old cached list of tables. + +**Steps to take:** Refresh your browser tab and check for your table or tables again. + +## Does the database exist? + +**Root cause:** The database doesn't exist. For example, you may have connected to a test database while doing an evaluation but are now in a production environment. + +**Steps to take:** + +1. Go to Admin > Databases. +2. Check that the database you're trying to query is listed. +3. Click on the database name and examine the settings. + +Exactly what settings you need will depend on your environment. To test that the settings are correct: + +1. Try to connect to the database using some other application (e.g., `psql` for PostgreSQL). + +If you can't connect to the database with another application, the problem is probably not with Metabase. Please check that the database server is running and that you have the correct host, port, username, password, and other settings. + +## Does the table exist? + +**Root cause:** The table you think you should be able to see does not exist (e.g., it has a different name than you expect). + +**Steps to take:** To test that the table you are trying to query actually exists and that you have permission to access it, use the SQL Editor to create and run a query like: + +``` +select * from SOMEWHERE +``` + +where `SOMEWHERE` is the table you think you should be able to see. Metabase should display an error message like: + +``` +Table "SOMEWHERE" not found +``` + +If you see this message, use another application (e.g., `psql` for PostreSQL) to send the same query to the database. If it also produces a "table not found" message, check the database schema and the spelling of the table name. + +Be sure to log in using the same credentials that Metabase uses. A common source of problems is that the Metabase "user" does not have the same privileges as a member of IT staff or a developer, so tables that are visible to the latter using external applications are not visible to Metabase. + +## Can the Metabase account access the table? + +**Root cause:** The login ID that Metabase uses to query the database doesn't have privileges to view the table. + +**Steps to take:** Use the SQL Editor to write and run a simple query like the one shown immediately above: + +``` +select * from SOMEWHERE +``` + +where `SOMEWHERE` is the table you think you should be able to see. If Metabase produces an error message saying the table can't be found, run the same query using another application. Again, make sure to log in using the same credentails that Metabase uses, not your regular account. + +## Does the person who cannot see the table have permission to view it? + +**Root cause:** Metabase uses a group-based permission model: people belong to groups, and administrators can set permissions so that some groups cannot see all of the tables. (It also allows administrators to control which rows or columns specific people can see---issues with that are covered in the troubleshooting guide for [sandboxing][sandboxing].) + +**Steps to take:** + +1. Log into Metabase using the ID of the person who cannot see the expected tables. +2. Confirm that the tables are not visible. +3. Log out, then log in using the administrator's credentials. + +If the administrator's account can see the tables but an individual person cannot: + +1. Go to Admin > Permissions and see if any groups have been denied access to the table. +2. If any groups have been denied access, go to Admin > People and look at the "Groups" column for the person who can't see the expected tables. If they're in a group that doesn't have access to the table, you may need to move them to another group or change table permissions. + +## Is Metabase's metadata out of sync with the state of the database? + +**Root cause:** In order to display available tables and columns in dropdown menus and previews, Metabase runs a query every hour to find out what tables are available and what columns are in each available table, and stores this information in its application database. + +1. If a table has been added or removed since the last time this "sync" operation ran, Metabase's information about the database will be outdated. +2. In some rare cases Metabase may time out while synchronizing with the database. For example, if you're using MongoDB and have very large (hundreds of kilobytes) JSON blobs, the sync operation may not complete in the allowed time. + +**Steps to take:** + +1. Run the "sync" process manually: + 1. Go to Admin Panel > Databases. + 2. Choose the database. + 3. Click on "Sync database schema now". +2. Go to Admin > Troubleshooting > Logs and see if there are any error messages saying that the "sync" operation could not run (e.g., because the network or the database itself was temporarily down). +3. If there are no suspicious error messages, log out of Metabase, close the browser tab, log back into Metabase in a new browser tab, and try to access your table again. + +[data-browser]: /learn/getting-started/data-browser.html +[data-model]: ../administration-guide/03-metadata-editing.html +[sandboxing]: ./sandboxing.html diff --git a/docs/troubleshooting-guide/cant-send-email.md b/docs/troubleshooting-guide/cant-send-email.md new file mode 100644 index 000000000000..fe39dcab5c76 --- /dev/null +++ b/docs/troubleshooting-guide/cant-send-email.md @@ -0,0 +1,41 @@ +# Metabase isn't sending email + +You have told Metabase to send email notifications, but: + +- the notifications aren't arriving. + +## Are the email credentials correct? + +**Root cause:** The host, port, email address, or password may have been set up incorrectly, or the email server's host and port have been set up incorrectly. + +**Steps to take:** + +1. In the Admin Panel, select **Email Settings** and check the settings. +2. If they seem correct, click **Send test email**. +3. Verify that the email is delivered to the test account. +4. If the message is not sent or an error message is displayed in Metabase, try to use the same account credentials in another email program and see if they work. If they do, you may have found a bug---please [report it][bugs]. + +## Is the mail server actually sending the message? + +**Root cause:** Some email delivery services have very specific rules regarding valid "from" addresses, or have test modes that restrict delivery. + +**Steps to take:** + +1. Check that your delivery service allows you to send email to the domain you're trying to get email sent to. (Exactly how to do this depends on the delivery service you're connecting to.) +2. Make sure you've whitelisted the "from" address that you're using for Metabase. +3. Check the mail server's logs for any error messages. +4. If you have access to your email delivery service's outbound queue or a dashboard, check that for errors as well. + +## Is the mail being sent but not arriving? + +**Root cause:** The message is being sent correctly, but isn't being received (at least, not where you expect it to be). + +**Steps to take:** + +1. Check whether email sent to other accounts is arriving, e.g., are colleagues receiving their notifications? +2. If so, check your spam folder, any forwarding rules you have set up, etc. +3. Check whether you're using the same email provider as the people who are receiving their messages. If not, the problem might be with deliverability rules---look into signing your emails with [DomainKeys Identified Mail][dkim] (DKIM). + +[bugs]: ./bugs.html +[dkim]: https://en.wikipedia.org/wiki/DomainKeys_Identified_Mail +[office-365-bug]: https://github.com/metabase/metabase/issues/4272 diff --git a/docs/troubleshooting-guide/datawarehouse.md b/docs/troubleshooting-guide/datawarehouse.md index cb9b08111bc3..d4c245f53be3 100644 --- a/docs/troubleshooting-guide/datawarehouse.md +++ b/docs/troubleshooting-guide/datawarehouse.md @@ -1,41 +1,42 @@ -## Troubleshooting your database connection +# Connecting to data warehouses with Metabase -If you're having trouble connecting to your data warehouse, run through these steps to identify the problem. - -1. Verify that the data warehouse server is running. See [the data warehouse server is down](#the-data-warehouse-server-is-down). -2. Try connecting to the data warehouse using another client from a machine you know should have access. See [the data warehouse server is denying connections from your IP address](#the-data-warehouse-server-is-denying-connections-from-your-ip-address). -3. Try connecting to the data warehouse from another client from the machine you're running Metabase on. -4. Add the connection in Metabase. -5. Examine the logs to verify that the sync process started and that no errors were thrown. You can view the logs in the Metabase process, or in the app itself in the Admin section -> Troubleshooting -> Logs. -6. Run a native `SELECT 1` query to verify the connection to the data warehouse. -7. If the sync process has completed, ask a [native question](../users-guide/writing-sql.md) to verify that you are able to use the database. +
+- [The data warehouse server is down](#server-down) +- [The data warehouse server is denying connections from your IP address](#server-denying-connections) +- [Incorrect credentials](#incorrect-credentials) +- [Connection timeout: your question took too long](#connection-timeout-took-too-long) +- [Connections cannot be acquired from the underlying database](#connections-cannot-be-acquired) +
-## Specific Problems +If you're having trouble connecting to your data warehouse, run through these steps to identify the problem. -- [The data warehouse server is down](#the-data-warehouse-server-is-down) -- [The data warehouse server is denying connections from your IP address](#the-data-warehouse-server-is-denying-connections-from-your-ip-address) -- [Incorrect credentials](#incorrect-credentials) -- [Connection timeout: your question took too long](#connection-timeout-your-question-took-too-long) +1. Is the data warehouse server running ([see below](#server-down))? +2. Can you connect to the data warehouse using another client from a machine you know should have access ([see below](#server-denying-connections))? +3. Can you connect to the data warehouse from another client from the machine you're running Metabase on? +4. Have you added the connection in Metabase? +5. Have you examined the logs to verify that the sync process started and that no errors were thrown? (You can view the logs in the Metabase process, or in the app itself by going to the Admin Panel, selecting "Troubleshooting", and then selecting "Logs".) +6. Have you run a native `SELECT 1` query to verify the connection to the data warehouse? +7. If the sync process has completed, can you ask a [native question][native-question] to verify that you are able to use the database? -### The data warehouse server is down +

The data warehouse server is down

-**How to detect this:** As silly as this sounds, occasionally database servers go down. If you're using a hosted database service, go to its console and verify that its status is Green. If you have direct access to a command line interface, log in and make sure that it is up and running and accepting queries. +**How to detect this:** Database servers occasionally go down. If you're using a hosted database service, go to its console and verify its status. If you have direct access to a command-line interface, log in and make sure that it's up and running and accepting queries. -**How to fix this:** It's out of the scope of this troubleshooting guide to get your data warehouse server back up. Check with whomever set it up for you! +**How to fix this:** It's out of the scope of this troubleshooting guide to get your data warehouse server back up---please check with whomever set it up for you. -### The data warehouse server is denying connections from your IP address +

The data warehouse server is denying connections from your IP address

-**How to detect this:** If you can access the server from a bastion host, or another machine, use `nc` on Linux (or your operating system's equivalent) to verify that you can connect to the host on a given port. Different databases use different ports, but an example for a default PostgreSQL configuration (which listens on port 5432) would be: +**How to detect this:** If you can access the server from a bastion host or another machine, use the `nc` command (or your operating system's equivalent) to verify that you can connect to the host on a given port. Different databases use different ports; for a default PostgreSQL configuration (which listens on port 5432), the command would be: ``` -nc -v your-db-host 5432` +nc -v your-db-host 5432 ``` -**How to fix this:** It's out of the scope of this troubleshooting guide to change your network configuration. Talk to whomever is responsible for the network your data warehouse is running on. +**How to fix this:** It's out of the scope of this troubleshooting guide to change your network configuration---please check with whomever is responsible for the network your data warehouse is running on. -### Incorrect credentials +

Incorrect credentials

-**How to detect this:** If you've verified that you can connect to the host and port on the data warehouse, the next step is to check your credentials. Again, connecting to a data warehouse depends on your database server software, but for PostgreSQL, the below uses a command-line interface (`psql`) to connect to your data warehouse. +**How to detect this:** If you've verified that you can connect to the data warehouse's host and port, the next step is to check your credentials. Again, connecting to a data warehouse depends on your database server software; for PostgreSQL, a command like the one shown below will do the job: ``` psql -h HOSTNAME -p PORT -d DATABASENAME -U DATABASEUSER` @@ -45,28 +46,33 @@ If your credentials are incorrect, you should see an error message letting you k **How to fix this:** If the database name or the user/password combination are incorrect, ask the person running your data warehouse for correct credentials. -### Connection timeout: your question took too long - -**How to detect this:** If you see the error message, "Your question took too long," something in your setup timed out. Depending on the specifics of your deployment, this could be a timeout in: +

Connection timeout: your question took too long

-- Your load balancer -- Your reverse proxy server (e.g., Nginx) -- Jetty -- Your database -- Your cloud service: AWS's Elastic Beanstalk, EC2, Heroku, Google App Engine, etc. +**How to detect this:** If you see the error message, "Your question took too long," something in your setup timed out. Depending on the specifics of your deployment, the problem could be in: -**How to fix this:** Fixing this depends on your specific setup. Here are some potentially helpful resources: +- your load balancer; +- your reverse proxy server (e.g., Nginx); +- Jetty; +- your database; or +- your cloud service, such as AWS's Elastic Beanstalk, EC2, Heroku, or Google App Engine. -- [Configuring Jetty connectors](https://www.eclipse.org/jetty/documentation/current/configuring-connectors.html) -- [EC2 Troubleshooting](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/TroubleshootingInstancesConnecting.html) -- [Elastic Load Balancing Connection Timeout Management](https://aws.amazon.com/blogs/aws/elb-idle-timeout-control/) -- [Heroku timeouts](https://devcenter.heroku.com/articles/request-timeout) -- [App Engine: Dealing with DeadlineExceededErrors](https://cloud.google.com/appengine/articles/deadlineexceedederrors) +**How to fix this:** Fixing this depends on your specific setup. These resources may help: -### Error message: "Connections cannot be acquired from the underlying database!" +- [Configuring Jetty connectors][configuring-jetty] +- [EC2 Troubleshooting][ec2-troubleshooting] +- [Elastic Load Balancing Connection Timeout Management][elb-timeout] +- [Heroku timeouts][heroku-timeout] +- [App Engine: Dealing with DeadlineExceededErrors][app-engine-timeout] -**How to detect this:** Metabase fails to connect to your data warehouse and the Metabase server logs include the error message `Connections cannot be acquired from the underlying database!` +

Connections cannot be acquired from the underlying database

-**How to fix this:** Navigate to the options for your data warehouse and locate the Additional JDBC Connection Strings option, then add `trustServerCertificate=true` as an additional string. +**How to detect this:** Metabase fails to connect to your data warehouse and the Metabase server logs include the error message `Connections cannot be acquired from the underlying database! +**How to fix this:** Navigate to the options for your data warehouse and locate the "Additional JDBC Connection Strings" option, then add `trustServerCertificate=true` as an additional string. +[app-engine-timeout]: https://cloud.google.com/appengine/articles/deadlineexceedederrors +[configuring-jetty]: https://www.eclipse.org/jetty/documentation/current/configuring-connectors.html +[ec2-troubleshooting]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/TroubleshootingInstancesConnecting.html +[elb-timeout]: https://aws.amazon.com/blogs/aws/elb-idle-timeout-control/ +[heroku-timeout]: https://devcenter.heroku.com/articles/request-timeout +[native-question]: ../users-guide/writing-sql.html diff --git a/docs/troubleshooting-guide/docker.md b/docs/troubleshooting-guide/docker.md index 5d2f0fcd54b1..fd25f9e93fcc 100644 --- a/docs/troubleshooting-guide/docker.md +++ b/docs/troubleshooting-guide/docker.md @@ -1,40 +1,59 @@ -While Docker simplifies a lot of aspects of running Metabase, there are a number of potential pitfalls to keep in mind. +# Running Metabase on Docker -If you are having issues with Metabase under Docker, we recommend going through the troubleshooting process below. Then look below for details about the specific issue you've found. +
+- [Metabase container exits without starting the server](#container-exits-without-starting-server) +- [Metabase container is running but the server is not](#container-running-but-server-is-not) +- [Not connecting to a remote application database](#not-connecting-to-remote-app-db) +- [The Metabase server isn't able to connect to a MySQL or PostgreSQL database](#metabase-cant-connect-to-mysql-or-postgresql) +- [The Metabase application database is not being persisted](#app-db-not-being-persisted) +- [The internal port isn't being remapped correctly](#internal-port-not-remapped-correctly) +- [Metabase can't write or read to/from a file or directory](#cant-write-read-file-or-dir) +
-## Troubleshooting Process +Docker simplifies many aspects of running Metabase, but there are some pitfalls to keep in mind. If you have trouble with Metabase under Docker, try going through the troubleshooting process below, then look below for details about the specific issue you've found. -1. Check that the container is running -2. Check that the server is running inside the container -3. Check whether Metabase is using the correct application database -4. Check that you can connect to the Docker host on the Metabase port -5. Check that you can connect to the container from the Docker host -6. Check that you can connect to the server from within the container +1. Is the container running? +2. Is the server running inside the container? +3. Is Metabase using the correct application database? +4. Can you connect to the Docker host on the Metabase port? +5. Can you connect to the container from the Docker host? +6. Can you connect to the server from within the container? -## Specific Problems +You may find these commands useful along the way. To get to the shell in the Metabase container: -### Metabase container exits without starting the server +``` +docker exec -ti CONTAINER_NAME bash +``` -Run `docker ps` to see if the Metabase container is currently running. If it is move on to the next step. +And to get the logs for the Metabase container: -If `docker ps` does not show the running container, then list the stopped containers by running: +``` +docker logs -f CONTAINER_NAME +``` -`docker ps -a | grep metabase/metabase` +

Metabase container exits without starting the server

-And look for the container that exited most recently. Note the container ID. -Look at that container's logs with: +**How to detect this:** Run `docker ps` to see if the Metabase container is currently running. If it is, move on to the next step. -`Docker logs CONTAINER_ID` +If `docker ps` does not show the running container, then list the stopped containers by running: -### Metabase Container is running but the Server is not +``` +docker ps -a | grep metabase/metabase +``` -#### How to detect this: +Look for the container that exited most recently and make a note of the container ID. Look at that container's logs with: -Run `docker ps` to make sure the container is running +``` +Docker logs CONTAINER_ID +``` -The server should be logging to the Docker container logs. Check this by running: +

Metabase container is running but the server is not

-`docker logs CONTAINER_NAME` +**How to detect this:** Run `docker ps` to make sure the container is running. The server should be logging to the Docker container logs. Check this by running: + +``` +docker logs CONTAINER_NAME +``` You should see a line like this at the beginning: @@ -42,37 +61,36 @@ You should see a line like this at the beginning: 05-10 18:11:32 INFO metabase.util :: Loading Metabase... ``` -and eventually: +Further down, you should eventually see a line like: ``` 05-10 18:12:30 INFO metabase.core :: Metabase Initialization COMPLETE ``` -If you see the below lines: +If you see the lines below: ``` 05-15 19:07:11 INFO metabase.core :: Metabase Shutting Down ... 05-15 19:07:11 INFO metabase.core :: Metabase Shutdown COMPLETE ``` -#### How to fix this: +then Metabase has shut itself down. -Check this for errors about connecting to the application database. -Watch the logs to see if Metabase is still being started: +**How to fix this:** Check the Docker container logs for errors about connecting to the application database. Watch the logs to see if Metabase is still being started; the command: -`Docker logs -f CONTAINER_ID` +``` +Docker logs -f CONTAINER_ID +``` will let you see the logs as they are printed. -If the container is being killed before it finished starting it could be a health check timeout in the orchestration service used to start the container, such as Docker Cloud, or Elastic Beanstalk. +If the container is being terminated before it finished starting, the problem could be a health check timeout in the orchestration service used to start the container, such as Docker Cloud or Elastic Beanstalk. -If the container is not being killed from the outside, and is failing to start anyway, this problem is probably not specific to Docker. If you are using a Metabase-supplied image, you should [open a GitHub issue](https://github.com/metabase/metabase/issues/new/choose). +If the container is _not_ being terminated from the outside, but is failing to start anyway, this problem is probably not specific to Docker. If you're using a Metabase-supplied image, please [open a GitHub issue](https://github.com/metabase/metabase/issues/new/choose). -### Not connecting to a remote application database +

Not connecting to a remote application database

-#### How to detect this: - -If this is a new Metabase instance, then the database you specified via the environment variables will be empty. If this is an existing Metabase instance with incorrect environment parameters, the server will create a new H2 embedded database to use for application data and you’ll see lines similar to these: +**How to detect this:** If this is a new Metabase instance, then the database you specified via the environment variables will be empty. If this is an existing Metabase instance with incorrect environment parameters, the server will create a new H2 embedded database to use for application data and you’ll see lines similar to these in the log: ``` 05-10 18:11:40 INFO metabase.core :: Setting up and migrating Metabase DB. Please sit tight, this may take a minute... @@ -81,85 +99,64 @@ If this is a new Metabase instance, then the database you specified via the envi 05-10 18:11:40 INFO metabase.db :: Verify Database Connection ... ✅ ``` -#### How to fix this: - -Double check you are passing environments to Docker in the correct way. -You can list the environment variables for a container with this command: - -`docker inspect some-postgres -f '{% raw %}{{ .Config.Env }}{% endraw %}'` - -### The Metabase server isn’t able to connect to a MySQL or PostgreSQL database +**How to fix this:** Check that you are passing environments to Docker correctly. You can list the environment variables for a container with this command: -#### How to detect this: - -The logs for the Docker container return an error message after the “Verifying Database Connection” line. +``` +docker inspect some-postgres -f '{% raw %}{{ .Config.Env }}{% endraw %}' +``` -#### How to fix this: +

The Metabase server isn't able to connect to a MySQL or PostgreSQL database

-Try to connect with `mysql` or `psql` commands with the connection string parameters you are passing in [via the environment variables](../operations-guide/configuring-application-database.md). +**How to detect this:** The logs for the Docker container return an error message after the "Verifying Database Connection" line. -If you can’t connect to the database, the problem is due to either the credentials or connectivity. Verify that the credentials are correct. If you are able to log in with those credentials from another machine then try to make the same connection from the host running the Docker container. +**How to fix this:** Try to connect using the `mysql` or `psql` command with the connection string parameters you are passing in [via the environment variables][configuring-application-database]. If you can't connect to the database, the problem is due to either the credentials or connectivity. To verify that the credentials are correct, log in with those credentials from another machine and then try to make the same connection from the host running the Docker container. One easy way to run this is to use Docker to start a container that has the appropriate client for your database. For Postgres this would look like: -`docker run --name postgres-client --rm -ti --entrypoint /bin/bash postgres` - -Then from within that container try connecting to the database host using the client command in the container such as `psql`. If you are able to connect from another container on the same host, then try making that connection from within the Metabase Docker container itself: - -`docker exec -ti container-name bash` - -And try to connect to the database host using the `nc` command and check if the connection can be opened: +``` +docker run --name postgres-client --rm -ti --entrypoint /bin/bash postgres +``` -`nc -v your-db-host 5432` +From within that container, try connecting to the database host using the client command in the container such as `psql`. If you are able to connect from another container on the same host, then try making that connection from within the Metabase Docker container itself: -This will make it clear if this is a network or authentication problem. +``` +docker exec -ti container-name bash +``` -### The Metabase application database is not being persisted +You can also try to connect to the database host using the `nc` command and check if the connection can be opened: -#### How to detect this: +``` +nc -v your-db-host 5432 +``` -This occurs if you get the Setup screen every time you start the application. The most common root cause is not giving the Docker container a persistent filesystem mount to put the application database in. +These steps will help you determine whether this the problem is with the network or with authentication. -#### How to fix this: +

The Metabase application database is not being persisted

-Make sure you are giving the container a [persistent volume](../operations-guide/running-metabase-on-docker.html#mounting-a-mapped-file-storage-volume) +**How to detect this:** This is occurring if you are getting the Setup screen every time you start the application. The most common cause is not giving the Docker container a persistent filesystem mount to put the application database in. -### The internal port isn’t being remapped correctly +**How to fix this:** Make sure you are giving the container a [persistent volume][persistent-volume]. -#### How to detect this: +

The internal port isn't being remapped correctly

-Run `docker ps` and look at the port mapping -Run `curl http://localhost:port-number-here/api/health`. This should return a response with a JSON response like: +**How to detect this:** Run `docker ps` and look at the port mapping, then run `curl http://localhost:port-number-here/api/health`. This should return a JSON response that looks like: ``` {"status":"ok"} ``` -#### How to fix this: - -Make sure to include a `-p 3000:3000` or similar remapping in the `docker run` command you execute to start the Metabase container image. - -### Metabase can't write or read to/from a file or directory - -#### How to detect this: +**How to fix this:** Make sure to include `-p 3000:3000` or similar port remapping in the `docker run` command you use to start the Metabase container image. -A message in the logs will clearly indicate an IOError/Permission denied from Java, or errors from SQLite with the `org.sqlite.core.NativeDB._open_utf8` form. +

Metabase can't write or read to/from a file or directory

-#### How to fix this: +**How to detect this:** A message in the logs will clearly indicate an IOError or "Permission denied" from Java, or errors from SQLite containing `org.sqlite.core.NativeDB._open_utf8`. -Ensure that the user who is running Metabase has permission to read and write to the file or directory: +**How to fix this:** Ensure that the user who is running Metabase has permission to read and write to the file or directory: - If you are running Metabase as a JAR file in your local machine or server, check the user who is running the Java process. - If you're running Metabase from the Docker container, make sure you're using the `/metabase.db` directory. -If you're running Metabase from the JAR in any *nix (Unix like) operating system, in order to see which user is running Metabase, you have to open a terminal and type `ps -uA | grep metabase`. - -## Helpful tidbits - -### How to get to the shell in the Metabase container - -`docker exec -ti CONTAINER_NAME bash` - -### How to get the logs for the Metabase container +If you're running Metabase from the JAR in any Unix-like operating system, you can see which user is running Metabase by opening a terminal and typing `ps -uA | grep metabase`. -`docker logs -f CONTAINER_NAME` +[configuring-application-database]: ../operations-guide/configuring-application-database.html +[persistent-volume]: ../operations-guide/running-metabase-on-docker.html#mounting-a-mapped-file-storage-volume diff --git a/docs/troubleshooting-guide/email.md b/docs/troubleshooting-guide/email.md deleted file mode 100644 index aeac1772bbef..000000000000 --- a/docs/troubleshooting-guide/email.md +++ /dev/null @@ -1,31 +0,0 @@ - - -## Troubleshooting Process - -1. Verify the Email Account Credentials - a. In the admin panel email settings, click "Send test email", and verify that the email is delivered to the test account - b.If the email is not sent or returns an error, try to use the same account credentials in another program and see if they work. If they do, it might be a bug, please report it at github.com/metabase/metabase/issues/new - -2. Is the email being sent? - a. Check the server logs for any error messages - b. If there are any error messages, they are usually helpful :wink: - c. If you have access to your email delivery service's outbound queue or a dashboard, check that for errors. - d. Some email delivery services have very specific rules regarding valid "from" addresses, make sure you've whitelisted the "from" address you're using in Metabase - e. Some email delivery services have test modes or otherwise restricted delivery. Double check that your delivery service allows you to send email to the domain you're trying to get email sent to. - -3. If the email is being sent, but you're not getting it, is anyone else getting theirs? - a. If so, check your spam folder, any forwarding rules, etc - b. If someone at another email provider is getting emails, this is probably due to deliverability rules, and you should look into signing your emails with DKIM, etc. - -4. For user accounts specifically, did you previously create an account under this email and then delete it? This occasionally results in that email address being "claimed". - -5. Make sure that the HOSTNAME is being set correctly. EC2 instances in particular have those set to the local ip, and some email delivery services such as GMail will error out in this situation. - -## Specific Problems: - -### Specific Problem: - -### Metabase can't send email via Office365 - -We see users report issues with sending email via Office365. We recommend using a different email delivery service if you can. -https://github.com/metabase/metabase/issues/4272 diff --git a/docs/troubleshooting-guide/filters.md b/docs/troubleshooting-guide/filters.md new file mode 100644 index 000000000000..90d364e45b96 --- /dev/null +++ b/docs/troubleshooting-guide/filters.md @@ -0,0 +1,64 @@ +# My dashboard filters don't work + +You've tried to add a [filter widget][filter-widget-gloss] to your dashboard, but: + +- the question you want to connect the filter to doesn't show up, or +- the filter doesn't show a dropdown list of possible values when you use it, or +- the filter removes all of the rows from the table no matter what value you set it to. + +If you've created a [linked filter][linked-filter-gloss], please see [this troubleshooting guide][troubleshoot-linked-filters] instead. + +## Is the dashboard filter actually connected to your question? + +**Root cause:** The filter isn't connected to any cards on the dashboard, or connected to the wrong field. + +**Steps to take:** + +1. In dashboard edit mode, click on the gear icon next to the filter. Check that each card you want to wire up to the filter has a column selected. +2. If no columns are available to select on that card, you may need to change the filter type, from say a text filter to a date filter, to connect the filter to the card. +3. Check that the filter widget is connected to the column you want to filter on each relevant card. + +## If the card you're trying to filter is written in SQL, does its SQL query contain a variable? + +**Root cause**: If your SQL question doesn't contain a variable, the filter can't insert the value into the query to filter the results. + +**Steps to take**: + +1. Check that your SQL query contains at least [one variable][sql-variable] for the filter to insert the value. These can be plain variables, or [Field Filters][field-filter], with names enclosed in double curly braces `{% raw %}{{variable_name}}{% endraw %}`, typically in a `WHERE` clause. + +If you built your question in the Query Builder, Metabase knows which columns you're using, and which columns you can connect to different types of filters. So you can add a dashboard filter and refer to columns in the question's results without creating variables explicitly. + +## Are you seeing a different kind of input widget than you expected? + +For example, you want a dropdown but you're seeing a search box or a text input box. + +**Root cause:** Metabase only displays a dropdown list of possible values for a variable if it knows that the field in question is a category rather than (for example) an arbitrary number or arbitrary text. However, if the number of unique categories exceeds 100 values, Metabase will display a search box with autocomplete instead of a dropdown. + +**Steps to take:** + +1. Go to the **Admin Panel** and select the **Data Model** tab. +2. Select the database, schema, table, and field in question. +3. Click the gear-icon to view all the field's settings. +4. Set **Field Type** to "Category" and **Filtering on this field** to "A list of all values." +5. Click the button **Re-scan this field** in the bottom. + +If you created the question in SQL, then you only get a dropdown if the filter is a Field Filter _and_ the Filtering on this field option is set to your preferred input type: A list of all values (dropdown list) _and_ the number of unique values is less than 100. + +## Has someone renamed or deleted columns in the database? + +**Root cause:** Someone has changed the database schema, e.g., renamed or deleted a column in a table. + +**Steps to take:** + +If a filter that used to work no longer seems to, or seems to eliminate all of the rows: + +1. [Re-sync][sync-scan] Metabase with the database (i.e., refresh Metabase's understanding of the database's structure). +2. Compare the names of the fields used in the question with the actual names of the fields in the database. +3. Modify the question to match the current database schema. + +[field-filter]: /learn/sql-questions/field-filters.html +[filter-widget-gloss]: /glossary.html#filter_widget +[linked-filter-gloss]: /glossary.html#linked_filter +[sql-variable]: /learn/sql-questions/sql-variables.html +[sync-scan]: ./sync-fingerprint-scan.html +[troubleshoot-linked-filters]: ./linked-filters.html diff --git a/docs/troubleshooting-guide/index.md b/docs/troubleshooting-guide/index.md index c247e8a19063..a3aac9e94b86 100644 --- a/docs/troubleshooting-guide/index.md +++ b/docs/troubleshooting-guide/index.md @@ -1,19 +1,102 @@ -## What are you having trouble with? +# What are you having trouble with? -### [Logging in](loggingin.md) +This page collects resources for getting you unstuck. -### [Running Metabase](running.md) +## Troubleshooting guides -### [Running Metabase on Docker](docker.md) +Problems, their causes, how to detect them, and how to fix them. -### [The Metabase Application Database](application-database.md) +### Using Metabase -### [Connecting to databases and data warehouses with Metabase](datawarehouse.md) +- [People can't log in to Metabase][login]. -### [Incorrect results due to time zones](timezones.md) +- [Saving questions or dashboards][proxies]. -### [Problems with saving questions/dashboards or blank page](proxies.md) +- [My dashboard is slow][slow-dashboard]. -### [LDAP](ldap.md) +- [My dashboard filters don't work][filters]. -### [I think I found a bug](bugs.md) +- [My dashboard's linked filters don't work][linked-filters]. + +### Setup and administration + +- [Running the Metabase JAR][running]. + +- [Running Metabase on Docker][docker]. + +- [Connecting to data warehouses with Metabase][datawarehouse]. + +- [Setting up LDAP][ldap]. + +- [Metabase isn't sending email][not-sending-email]. + +- [Using or migrating from an H2 application database][appdb]. + +- [The dates and times in my questions and charts are wrong][incorrect-times]. + +- [I can't see my tables][cant-see-tables]. + +- [Managing data sandboxing][sandbox]. + +- [Fixing missing or out-of-sync tables and columns][sync-fingerprint-scan]. + +## Think you found a bug? + +Let us know by [filing a bug report][bugs]. + +## Metabase server and console logs + +Metabase will log errors, both on the server and in the browser console, depending on where the error occurs, which can help you track down an issue. Administrators will have access to the server logs, and everyone with a browser can open the developer tools to see the console logs. + +**Accessing the Metabase server logs**: You can look for the logs that Metabase leaves on the server's file system (or wherever else you collect logs). If you're logged into Metabase with an Admin account, you can also access the logs by clicking on the **gears icon** in the top right of the main nav, selecting **Admin**, clicking on the **Troubleshooting** tab, then viewing the **Logs** tab. Check out [How to read the server logs][server-logs]. + +**Checking for Javascript console errors:** Metabase will send debugging information and errors to your browser's developer console. To open the developer console, follow the instructions for web browser: + +- [Chrome][chrome] +- [Firefox][firefox] +- [Safari][safari] +- [Edge][edge] + +## Metabase tutorials + +For tutorials that walk you through how to use Metabase features, check out [Learn Metabase][learn]. + +## Metabase forum + +To see if someone else has run into a similar issue, check out [our forum on Discourse][forum]. + +## Frequently asked questions + +For quick answers to common questions, check out our [Frequently Asked Questions][faq]. + +## Upgrading Metabase + +Metabase adds new features and squashes bugs with each release. [Upgrading to the latest and greatest][upgrade] may resolve your issue. If you're using [Metabase Cloud][cloud], we'll handle the upgrades for you. You can checkout the [release notes][releases] to see what's new. + +[appdb]: ./loading-from-h2.html +[bugs]: ./bugs.html +[cant-see-tables]: ./cant-see-tables.html +[chrome]: https://developers.google.com/web/tools/chrome-devtools/open#console +[cloud]: https://www.metabase.com/start/ +[datawarehouse]: ./datawarehouse.html +[docker]: ./docker.html +[edge]: https://docs.microsoft.com/en-us/microsoft-edge/devtools-guide-chromium +[faq]: /faq +[filters]: ./filters.html +[firefox]: https://developer.mozilla.org/en-US/docs/Tools/Web_Console/Opening_the_Web_Console +[forum]: https://discourse.metabase.com/ +[incorrect-times]: ./timezones.html +[ldap]: ./ldap.html +[learn]: https://www.metabase.com/learn +[linked-filters]: ./linked-filters.html +[login]: ./cant-log-in.html +[not-sending-email]: ./cant-send-email.html +[proxies]: ./proxies.html +[releases]: https://github.com/metabase/metabase/releases +[running]: ./running.html +[safari]: https://support.apple.com/guide/safari-developer/develop-menu-dev39df999c1/mac +[server-logs]: ./server-logs.html +[sandbox]: ./sandboxing.html +[slow-dashboard]: ./my-dashboard-is-slow.html +[sync-fingerprint-scan]: ./sync-fingerprint-scan.html +[upgrade]: ../operations-guide/upgrading-metabase.html diff --git a/docs/troubleshooting-guide/installing.md b/docs/troubleshooting-guide/installing.md deleted file mode 100644 index a0db941f4fbf..000000000000 --- a/docs/troubleshooting-guide/installing.md +++ /dev/null @@ -1,11 +0,0 @@ -## Troubleshooting Process -1. - -## Specific Problems: - -### Specific Problem: -xxx -#### How to detect this - -xxx -#### How to fix this - -xxx \ No newline at end of file diff --git a/docs/troubleshooting-guide/ldap.md b/docs/troubleshooting-guide/ldap.md index d9c572f5026b..a917ce1eac4f 100644 --- a/docs/troubleshooting-guide/ldap.md +++ b/docs/troubleshooting-guide/ldap.md @@ -1,6 +1,17 @@ -### LDAP sample configuration +# LDAP + +
+- [LDAP sample configuration](#ldap-sample-configuration) +- [Related software for troubleshooting](#related-software-for-troubleshooting) +- [Current limitations](#current-limitations) +
+ +Metabase can use LDAP for authentication. [This article][ldap-learn] explains how to set it up, and the guide below will help you troubleshoot if anything goes wrong. You may also want to check [our troubleshooting guide for logging in](./loggingin.html). + +

LDAP sample configuration

+ +You can test Metabase with LDAP by using this `docker-compose` definition: -You can test Metabase with LDAP by using this docker-compose definition: ``` version: '3.7' services: @@ -48,18 +59,23 @@ networks: driver: bridge ``` -If you don't pass environment variables to Metabase and you want to configure the environment manually, you can go to Admin->Settings->Authentication-> LDAP configuration and enter the following values: +If you don't pass environment variables to Metabase and you want to configure the environment manually, you can go to the Admin Panel, selectin "Settings", select "Authentication", and then select "LDAP Configuration" and enter the following values: + +- `USERNAME OR DN`: `cn=admin,dc=example,dc=org` +- `PASSWORD`: `adminpassword` +- `USER SEARCH BASE`: `ou=users,dc=example,dc=org` +- `USER FILTER`: `(&(objectClass=inetOrgPerson)(|(uid={login})))` +- `GROUP SEARCH BASE`: `cn=readers` + +For the `USER FILTER`, you can leave the default value, which will look for the user ID in both the `uid` or `email` field. -USERNAME OR DN: `cn=admin,dc=example,dc=org` -PASSWORD: `adminpassword` -USER SEARCH BASE: `ou=users,dc=example,dc=org` -USER FILTER: `(&(objectClass=inetOrgPerson)(|(uid={login})))` // you can leave the default value which will look for the user id both in the uid or email field -GROUP SEARCH BASE: `cn=readers` + -### Related software for troubleshooting +If you run into an issue, check that you can login to your LDAP directory and issue queries using software like [Apache Directory Studio][apache-directory-studio]. It will let you see the whole LDAP tree and view the logs of your LDAP application to see queries run. -If you run into an issue, check that you can login and use your LDAP directory with software like [Apache Directory Studio](https://directory.apache.org/studio/). You can use Directory Studio to see the whole LDAP tree and view the logs of your LDAP application to see the queries run. +

Current limitations

-### Current limitations +- When using Metabase Enterprise with a MySQL database and LDAP enabled, make sure that you disable synchronization of binary fields from your LDAP directory by using the `MB_LDAP_SYNC_USER_ATTRIBUTES_BLACKLIST` environment variable. If you do not, you may hit the 60K field size limitation of the text field in MySQL, which will prevent you from creating users or those users from logging in. -- When using Metabase Enterprise with a MySQL database and LDAP enabled, make sure that you disable the sync of binary fields from your LDAP directory by using the `MB_LDAP_SYNC_USER_ATTRIBUTES_BLACKLIST` environment variable, as you may hit the 60K field size limitation of the text field in MySQL, which will prevent the creation or log-in of your users. +[apache-directory-studio]: https://directory.apache.org/studio/ +[ldap-learn]: /learn/permissions/ldap-auth-access-control.html diff --git a/docs/troubleshooting-guide/linked-filters.md b/docs/troubleshooting-guide/linked-filters.md new file mode 100644 index 000000000000..2bcb5b5693b2 --- /dev/null +++ b/docs/troubleshooting-guide/linked-filters.md @@ -0,0 +1,60 @@ +# My linked filters don't work + +You have created a [linked filter][linked-filter-gloss] so that (for example) if a dashboard contains both a "State" and a "City" filter, the "City" filter only shows cities in the state selected by the "State" filter. However: + +- your cards are showing "No result" when you apply the linked filter, +- your linked filter seems to have no effect, or +- your linked filter widget does not display a dropdown of filtered values. + +If you are having problems with a regular [filter widget][filter-widget-gloss], please see [this guide](./filters.html). In order to fix problems with linked filters, you need a clear understanding of how they work: + +## Do you understand the directionality of linked filters? + +**Root cause:** Linked filters are one of the more complex features of Metabase, and many problems stems from misunderstanding their operation. + +**Steps to take:** Check that you understand the points below, and that your linked filter is set up with them in mind. + +1. A filter isn't part of a specific question. Instead, a filter is added to a dashboard and its value is used to fill in variables in questions. + +2. In order for Metabase to display a dropdown list of possible filter values, it must know that the column corresponds to a category. This happens automatically if the question is created from tables via the Notebook Editor, since Metabase has knowledge about the table and columns from synchronization. + +3. If the question that contains the variable is written in SQL, on the other hand, the author of the question must have selected "Field Filter". Also, the field referenced must be set as a category in the Data Model in order for Metabase to show a dropdown list of values. + +## Are the filters linked in the correct direction? + +**Root cause:** The most common cause is that the filters have been linked in the wrong direction. If you want the values shown by Filter B to be restricted by the setting of Filter A, you have to change the settings for Filter B, not Filter A---i.e., the downstream filter has the setting, not the upstream filter. + +**Steps to take:** + +1. Remove the existing linkage and create a new one in the opposite direction. + +## Do some rows actually satisfy the full filter condition? + +**Root cause:** There aren't any rows that satisfy all the conditions in a linked filter. Continuing with the city and state example, if you manually enter the name of a city that isn't in the selected state, no record will satisfy both conditions. + +**Steps to take:** + +1. Create a question that only uses the first filter and check that it produces some rows. (If it does not, adding a second filter isn't going to make any rows appear.) +2. Create a question that you think should produce the same result as the combination of linked filter settings that isn't producing any data. If it produces the result you expect, check for typing mistakes and that you are using [the correct type of join][join-types]. + +## Do all rows that pass the first test also pass the second? + +**Root cause:** In some cases all of the rows that satisfy the first filter's condition also satisfy the second filter's condition, so the second filter has no effect. + +**Steps to take:** + +1. Create a question that includes the first filter condition directly (i.e., in the question rather than using a variable), then add the second filter's condition. If the result set does not change, the problem is in the logic rather than in the filters. + +## Does the linked filter widget display a dropdown of filtered values? + +**Root cause:** In order for a linked filter widget to display the correct subset of values as a dropdown, an explicit [foreign key][foreign-key-gloss] definition must be set up---linking the filters does not by itself tell Metabase about the relationship. + +**Steps to take:** + +1. Check that Metabase's data model for your database includes the foreign key relationship. + +[filter-widget-gloss]: /glossary.html#filter_widget +[foreign-key-gloss]: /glossary.html#foreign_key +[join-types]: /learn/sql-questions/sql-join-types.html +[learn-linking]: /learn/dashboards/linking-filters.html +[linked-filter-gloss]: /glossary.html#linked_filter diff --git a/docs/troubleshooting-guide/loading-from-h2.md b/docs/troubleshooting-guide/loading-from-h2.md new file mode 100644 index 000000000000..d17fd4c57126 --- /dev/null +++ b/docs/troubleshooting-guide/loading-from-h2.md @@ -0,0 +1,138 @@ +--- +redirect_from: + - ./loading-from-h2.html +--- + +# Using or migrating from an H2 application database + +You have installed Metabase, but: + +- You're trying to migrate the application database from H2 to another database and something has gone wrong, +- You're trying to downgrade rather than upgrade, +- Metabase logs a `liquibase` error message when you try to run it, +- Metabase logs another error message that mentions `H2` or `h2` while it is running, or +- You're on Windows 10 and get a warning about file permissions. + +## Are you currently using H2 as your application database? + +**Root cause:** Metabase stores information about users, questions, and so on in a database of its own called the "application database", or "app database" for short. By default Metabase uses [H2][what-is-h2] for the app database, but we don't recommended it for production---because it's an on-disk database, it's sensitive to filesystem errors, such as a drive being corrupted or a file not being flushed properly. + +**Steps to take:** + +1. To check what you're using as the app database, go to **Admin Panel**, open the **Troubleshooting** tab, scroll down to "Diagnostic Info", and look for the `application-database` key in the JSON it displays. +2. See [Migrating from H2][migrate] for instructions on how to migrate to a more robust app database. + +## Are you trying to migrate the application database from H2 to something else? + +**Root cause:** You are trying to [migrate][migrate] the app database from H2 to a production database such as PostgreSQL or MySQL/MariaDB using the `load-from-h2` command, but this has failed because the database filename is incorrect with an error message like: + +``` +Command failed with exception: Unsupported database file version or invalid file header in file +``` + +**Steps to take:** + +1. Create a copy of the exported H2 database (see [Backing up Metabase Application Data][backup]). _Do not proceed until you have done this_ in case something goes wrong. + +2. Check that the H2 database file you exported is named `metabase.db.mv.db`. + +3. H2 automatically adds `.mv.db` extension to the database path you specify on the command line, so make sure the path to the DB file you pass to the command does _not_ include the `.mv.db` extension. For example, if you've exported an application database, and you want to load the data from that H2 database into a PostgreSQL database using `load-from-h2`, your command will look something like: + + ``` + export MB_DB_TYPE=postgres + export MB_DB_DBNAME=metabase + export MB_DB_PORT=5432 + export MB_DB_USER= + export MB_DB_PASS= + export MB_DB_HOST=localhost + java -jar metabase.jar load-from-h2 /path/to/metabase.db # do not include .mv.db + ``` + +If you're using [Metabase Enterprise Edition][enterprise], you can use [serialization][serialization-docs] to snapshot your application database. Serialization is useful when you want to [preload questions and dashboards][serialization-learn] in a new Metabase instance. + +## Are you trying to downgrade? + +**Root cause:** Metabase does not support downgrading (i.e., reverting to an early version of the application). + +**Steps to take:** + +1. Shut down Metabase. +2. Restore the backup copy of the app database you made before trying to upgrade or downgrade. +3. Restore the JAR file or container of the older version you want to revert to. +4. Restart Metabase. + +## Is the app database locked? + +**Root cause:** Sometimes Metabase fails to start up because an app database lock did not clear properly during a previous run. The error message looks something like: + +``` +liquibase.exception.DatabaseException: liquibase.exception.LockException: Could not acquire change log lock. +``` + +**Steps to take:** + +1. Open a shell on the server where Metabase is installed and manually clear the locks by running: + + ``` + java -jar metabase.jar migrate release-locks + ``` + +2. Once this command completes, restart your Metabase instance normally (_without_ the `migrate release-locks` flag). + +## Is the app database corrupted? + +**Root cause:** H2 is less reliable than production-quality database management systems, and sometimes the database itself becomes corrupted. This can result in loss of data in the app database, but can _not_ damage data in the databases that Metabase is connected. + +**Steps to take:** Error messages can vary depending on how the app database was corrupted, but in most cases the log message will mention `h2`. A typical command and message are: + +``` +myUser@myIp:~$ java -cp metabase.jar org.h2.tools.RunScript -script whatever.sql -url jdbc:h2:~/metabase.db +Exception in thread "main" org.h2.jdbc.JdbcSQLException: Row not found when trying to delete from index """"".I37: ( /* key:7864 */ X'5256470012572027c82fc5d2bfb855264ab45f8fec4cf48b0620ccad281d2fe4', 165)" [90112-194] + at org.h2.message.DbException.getJdbcSQLException(DbException.java:345) + [etc] +``` + +**How to fix this:** not all H2 errors are recoverable (which is why if you're using H2, _please_ have a backup strategy for the application database file). + +If you are running a recent version and using H2, the app database is stored in `metabase.db.mv.db`. - Open a shell on the server where the Metabase instance is running and attempt to recover the corrupted H2 file by running the following four commands: + +``` +java -cp metabase.jar org.h2.tools.Recover + +mv metabase.db.mv.db metabase-old.db.mv.db + +touch metabase.db.mv.db + +java -cp target/uberjar/metabase.jar org.h2.tools.RunScript -script metabase.db.h2.sql -url jdbc:h2:`pwd`/metabase.db +``` + +## Are you running Metabase with H2 on Windows 10? + +**Root cause:** In some situations on Windows 10, the Metabase JAR needs to have permissions to create local files for the application database. When running the JAR, you'll see an error message like this: + +``` +Exception in thread "main" java.lang.AssertionError: Assert failed: Unable to connect to Metabase DB. +``` + +**Steps to take:** + +1. Right-click on the Metabase JAR file (_not_ the app database file). +2. Select "Properties". +3. Select "Unblock." + +## Is the application database taking too long to load? + +**Root cause:** You're using H2 as your app database, and the app database is so large that it can't be loaded in less than 5 seconds (which is the default timeout value). You'll see the message "Timeout" appear in the console when you try to start Metabase. + +**Steps to take:** + +1. Use a production-quality database such as PostgreSQL for the app database (preferred). +2. Go to the **Admin Panel** and increase the timeout setting for the app database. +3. Move Metabase to a faster server (in particular, a server with faster disks). + +[backup]: ../operations-guide/backing-up-metabase-application-data.md +[enterprise]: /enterprise/ +[migrate]: ../operations-guide/migrating-from-h2.md +[serialization-docs]: ../enterprise-guide/serialization.md +[serialization-learn]: /learn/administration/serialization.html +[what-is-h2]: ../faq/setup/what-is-h2.md diff --git a/docs/troubleshooting-guide/loggingin.md b/docs/troubleshooting-guide/loggingin.md index 9352ce4f0093..94baa9926fd0 100644 --- a/docs/troubleshooting-guide/loggingin.md +++ b/docs/troubleshooting-guide/loggingin.md @@ -1,40 +1,29 @@ -## Troubleshooting Process +# Logging in -1. Try to log in with a local account -2. Try to log in with a Google Auth SSO account -3. Example JavaScript and Server logs if you are not able to log in. +
+- [Forgotten password](#forgotten-password) +- [Invalid Google Auth token](#invalid-google-auth-token) +
-## Specific Problems: -### Forgotten Password +People can log in to Metabase in several different ways, each of which may require different background knowledge or a different line of investigation to fix if there are problems. If you are having problems, try going through the troubleshooting process below: -[This FAQ](../faq/using-metabase/how-do-i-reset-my-password.md) will tell you what to do in the event of a forgotten password. +1. Try to log in with a local account. +2. Try to log in with a Google Auth SSO account. +3. Check JavaScript and server logs. -### Invalid Google Auth Token: +You may also want to check [our troubleshooting guide for LDAP](./ldap.html). -Sometimes your token from Google will expire. +

Forgotten password

-#### How to detect this: +[This FAQ][reset-password] will tell you what to do if someone has forgotten their password. -Open up the JavaScript console. Try to log in with Google Auth, see if there are any error messages in the JavaScript console indicating an invalid account. +

Invalid Google Auth token

-Also open up your server logs, and see if there are any errors related to authentication. If there are, try recreating the token. +When you sign in with Google Auth, it creates a token to prove that you have authenticated. If this token becomes invalid for any reason (such as a change in configuration or a timeout) then you won't be able to log in with it. -#### How to fix this: +**How to detect this:** Open the JavaScript console in your browser. Try to log in with Google Auth and see if there are any error messages in the JavaScript console indicating an invalid account. You can also open your server logs and see if there are any errors related to authentication. If there are, try recreating the token. -Remove the old token from the Google Auth SSO tab in the Admin Panel and create a new one. If the root cause was an invalid auth token, this should fix the problem. +**How to fix this:** Remove the old token from the Google Auth SSO tab in the Admin Panel and create a new one. If the root cause was an invalid auth token, this should fix the problem. -## Helpful tidbits - -### Accessing the Metabase server logs - -While you can always look for the logs Metabase leaves on your server file system (or however you collect logs), if you are logged into Metabase with an admin account you can also access them from the Logs tab in the Troubleshooting section of the Admin Panel. To get to the Admin Panel, click the gear icon in the top-right of Metabase. - -### Checking for Javascript console errors - -Metabase will send debugging information and errors to your browser's developer console. To open the developer console, follow the instructions for your web browser of choice: - -- [Chrome](https://developers.google.com/web/tools/chrome-devtools/open#console) -- [Firefox](https://developer.mozilla.org/en-US/docs/Tools/Web_Console/Opening_the_Web_Console) -- [Safari](https://support.apple.com/guide/safari-developer/develop-menu-dev39df999c1/mac) -- [Edge](https://docs.microsoft.com/en-us/microsoft-edge/devtools-guide-chromium) +[reset-password]: ../faq/using-metabase/how-do-i-reset-my-password.html diff --git a/docs/troubleshooting-guide/my-dashboard-is-slow.md b/docs/troubleshooting-guide/my-dashboard-is-slow.md new file mode 100644 index 000000000000..473bd6788b2f --- /dev/null +++ b/docs/troubleshooting-guide/my-dashboard-is-slow.md @@ -0,0 +1,128 @@ +# My dashboard is slow + +You have created a dashboard that shows the right things but: + +- it takes longer to load than you think it should, or +- some cards load quickly but others take much longer, but +- the dashboard and/or the slow cards do load eventually. + +Our articles on [Metabase at scale][metabase-at-scale] and [making dashboards faster][faster-dashboards] will help set the stage for the steps below. + +## Did it used to be fast enough, but has recently slowed down? + +**Root cause:** Metabase is not an island, complete unto itself, so if the dashboards and the cards it shows haven't changed, the slowdown is probably due to a change in the environment. + +**Steps to take:** + +1. Check if the volume of data being analyzed has grown recently---for example, are some of your tables much larger than they used to be? If so, you may need to move your database server to a more powerful machine. +2. Check if more people are accessing the data warehouse, or if the same set of users are running more queries. (You can find this out by looking at the database server's logs.) If this is the case, you can either move the data warehouse to a more powerful machine or institute quotas to manage the volume of access. +3. Check if queries from Metabase are being queued for execution, i.e., whether they are having to wait their turn to execute. If so, you may be able to raise the priority of Metabase queries (though this will necessarily come at the cost of slowing others down). + +## Do you have too many cards in your dashboard? + +**Root cause:** When Metabase displays a dashboard, it re-runs all of the questions. We do our best to do this concurrently, but a dashboard with a hundred cards is going to be slower than one with a single question. And if your dashboard contains filters, then each time someone changes a filter setting, all of the cards that depend on it have to re-execute. + +**Steps to take:** + +1. Move cards from this dashboard into other dashboards until this dashboard's performance is acceptable. +2. Use [custom destinations][custom-destinations] to link cards in the main dashboard to the other dashboards. + +Note: this is a good way to design dashboards even when there aren't performance issues. A dashboard with a hundred cards isn't just slow, it's also hard to understand. Breaking things up improves comprehension as well as performance. Please read [this article][faster-dashboards] for tips on making dashboards more performant, and [this one][bi-best-practices] for designing good dashboards in general. + +## Is the database overloaded by other traffic? + +**Root cause:** Metabase is usually not the only application using your database, and you may not be the only person using Metabase. If someone else has opened a dashboard that launches a couple of dozen long-running queries, everyone else may then have to wait until database connections become free. + +**Steps to take:** + +1. Check the performance logs of the database server or the machine it's running on often reveals that the real problem is caused by some third-party application. +2. The next steps depend on what those other applications are, how frequently they are making queries, whether the database can be replicated or the load can be moved onto other systems, and so on. If some of the applications involved are primarily being used for batch processing (e.g., daily or weekly reports), you can also check when those jobs are scheduled to ensure that they don't overlap. + +Note: you may also see your database being overloaded if you're using the same database for Metabase's app database and for your own data. We strongly recommend that you don't do this in a production system or if you have more than a handful of users. If you are doing this, set up a second database for Metabase's own use. + +## Are some of your queries intrinsically slow? + +**Root cause:** Joining half a dozen tables, each with a few million rows, simply takes a lot of time. While we do our best to create fully-formed SQL queries from graphical questions, SQL snippets, and questions that use other questions as starting points, it's a hard problem---particularly across as many databases as we support. + +**Steps to take:** + +1. Run each question in the dashboard on its own to see which ones are slow. +2. Look at the size of the result set for each question that is slow. If it contains hundreds of columns or many thousands of rows, see if you can modify the question to return a smaller result set. +3. Run the same SQL that Metabase is running but using a different tool. For example, if you have created a question using the Notebook Editor and you are using Postgres as your database, you can view the SQL, copy it, and run it from the command line using `psql`. (If you have written the question in SQL you can just copy and paste it.) If the query runs noticeably faster this way than it does when you run it through Metabase, the problem is almost certainly one of the ones described above. +4. Run the query with a JDBC-based tool like [DBeaver][dbeaver], which is also Java-based and uses most of the same database drivers as Metabase. Using this, and running the test query from the same machine that Metabase is running on, can help you determine if the problem is a network bottleneck. +5. If the query runs slowly when sent by another tool, see if you can write a SQL query that calculates the same result as the question you have built in Metabase, but does so more quickly. + +## Is Metabase generating inefficient SQL? + +**Root cause:** Metabase saves questions created graphically in Metabase Query Language (MBQL), then translate MBQL into queries for particular back-end databases. It creates the most efficient queries it can, but for the sake of portability, it doesn't take advantage of every database's idiosyncracies, so sometimes a GUI question will be slower than the equivalent hand-written SQL. + +**Steps to take:** + +1. Check if you have the most recent version of Metabase: we fix problems as they're reported, and updating Metabase may make your problem go away. +2. You can use your SQL in place of the code we generate, and [make its result available][organizing-sql] to people who prefer the Notebook Editor as a starting point for their questions. +3. Please [file a bug report][bugs] to help us figure out how to generate better SQL. + +## Do you have the right database schema? + +**Root cause:** If the database schema is poorly designed, questions cannot run quickly. + +**Steps to take:** The difference between OLTP and OLAP, and how to design database schemas to support them, are out of scope for this troubleshooting guide. In brief, you must look at the data schema of the data warehouse, or ask the database administrator whether the database is designed for online transaction processing (OLTP) or online analytical processing (OLAP). Metabase is an OLAP application; if the database schema is designed for OLTP, you may need to create views that reorganize the data. + +Similarly, you probably don't need indexes for simple tables with a few tens of thousands of rows, but you almost certainly *do* if you have a few million rows. All of this is very dependent on the underlying database: Redshift can easily handle millions of rows with thousands of columns, but MySQL or PostgreSQL may require a star schema designed for OLAP to deliver the performance you need. + +## Is Metabase not caching the answer to your question? + +**Root cause:** By default caching is disabled so that we always re-run every question. However, if your data is only being updated every few seconds or minutes, you will improve performance by enabling caching. Note that: + +1. Ad hoc queries and exports are *not* cached, so doing a lot of either can impact performance. +2. Since cached values are stored in the application database they will still be there if Metabase restarts. +3. Each question (and any filter combination) is its own query, so if different users are viewing the same question with different filters, each will have to load once before it's cached. This is particularly obvious with [data sandboxing][data-sandboxing]: filtering the data based on the user's identity means that each user's question is slightly different. + +**Steps to take:** + +1. Go to Admin Panel > Settings > Caching to see if caching is enabled. +2. Determine whether the question *can* be cached. We hash the query string, so (for example) if results are being filtered by a user ID, every person who views the dashboard will be sending a slightly different question to the database, and the results will not be cached. +3. [This guide][admin-caching] explains how to change the minimum query duration (we cache anything that takes longer than that to run) and the maximum cache size for each query result. You may need to experiment with these values over several days to find the best balance. If the problem appears to be caused by a high proportion of sandboxed queries, check that the cache is large enough to store all of their results. + +## Are values repeatedly being converted on the fly? + +**Root cause:** Low performance can be caused by incorrect typing of columns, e.g., by storing a numeric value as a string. When this happens, the query converts values on the fly each time it runs. (Even a handwritten query will be slow if it has to do this conversion every time.) + +**Steps to take:** + +1. Go to Admin > Data Model and look at the raw data types of the columns being used in your question. If (for example) a column is stored as a string when the question needs a number or a timestamp, you may need to convert the column in place. +2. Alternatively, you can create a new table with the converted values in the right type. This takes more storage, and needs to be updated as new data arrives, but will not affect any other applications that are using the same database. + +## Is the dashboard fast when you view it in Metabase but slow when you've embedded it? + +**Root cause:** You can embed the charts and dashboards you create in Metabase in other websites in [several ways][embedding]. If people have to authenticate in order to view the dashboard in that other website, then you can pass their credentials to Metabase and use them to filter data. If they don't have to authenticate, though, then you can't do that, which means you may return a larger result set than you expect. + +**Steps to take:** This is almost always a result of a design error---if are only allowed to see a subset of the data when they're in Metabase, they shouldn't be able to see more (or all) of the data when the same question is displayed in an external web page. Please read [the introduction to embedding][embedding] to see how you can pass credentials from your site to Metabase. + +## Is Metabase and/or the data warehouse running on under-powered hardware? + +**Root cause:** If you run Metabase or the underlying data warehouse on a ten-year-old machine with only 1 GByte of RAM, it may not be able to keep up with your demands. + +**Steps to take:** + +1. Check the performance logs for the server where the database is running to see if it is hitting CPU or memory limits. +2. Check the performance logs for the server where Metabase is running. +3. If either is a bottleneck, upgrade to a more powerful server or one with more memory. + +## Does Metabase appear to freeze when you save a question that has not yet been run? + +**Root cause:** If you save a question that has never been executed, Metabase runs the question while saving it, which can make the UI look frozen. + +**Steps to take:** This is [a bug][freeze-bug] and we are working to fix it. Until it's corrected, the workaround is to run the question before saving it. However, it's very likely that the root cause is one of the more common problems described above. + +[admin-caching]: ../administration-guide/14-caching.html +[bi-best-practices]: /learn/dashboards/bi-dashboard-best-practices.html +[bugs]: ./bugs.html +[custom-destinations]: /learn/dashboards/custom-destinations.html +[data-sandboxing]: /learn/permissions/data-sandboxing-row-permissions.html +[dbeaver]: https://dbeaver.io/ +[embedding]: /learn/embedding/embedding-charts-and-dashboards.html +[faster-dashboards]: /learn/administration/making-dashboards-faster.html +[freeze-bug]: https://github.com/metabase/metabase/issues/14957 +[metabase-at-scale]: /learn/administration/metabase-at-scale.html +[organizing-sql]: /learn/sql-questions/organizing-sql.html diff --git a/docs/troubleshooting-guide/proxies.md b/docs/troubleshooting-guide/proxies.md index dc6cb91246b9..ac64aca80b80 100644 --- a/docs/troubleshooting-guide/proxies.md +++ b/docs/troubleshooting-guide/proxies.md @@ -1,33 +1,35 @@ -If you're experiencing problems where attempting to save a question or dashboard sometimes fails, or Metabase only loads a blank page, this might be caused by the use of a proxy. +# Can't save questions or dashboards, or getting a blank page -A proxy could include other functions like a web application firewall (WAF), content optimization, or cache. +
+- [Saving questions or dashboards fails](#saving-questions-or-dashboards-fails) +- [Seeing a blank page instead of the Metabase interface](#seeing-blank-page) +
-Examples of proxies that are known to cause issues with Metabase: +If attempting to save a question or dashboard sometimes fails, or Metabase only loads a blank page, the problem might be the use of a proxy. A proxy could include other functions like a web application firewall (WAF), content optimization, or cache. Examples of proxies that are known to cause issues with Metabase include: - Cloudflare's Rocket Loader and WAF - Azure's WAF -- PageSpeed module for e.g., Apache +- PageSpeed module for Apache - Some anti-virus browser extensions or add-ons -## Specific Problems +

Saving questions or dashboards fails

-### Saving questions/dashboards fails - -If saving questions/dashboards fails with the save button changing to "Save Failed," or perhaps with the error, "Sorry you do not have permission to see that," this might be caused by either a WAF like Cloudflare or Azure. +If saving questions or dashboards fails and the save button displays "Save Failed," or if you get the error, "Sorry you do not have permission to see that," the problem might be with a WAF like Cloudflare or Azure. - When the save fails, check the Console tab of your browser's Developer Tools for any errors. -- You should also check the Network tab in the Developer Tools to view the network request. It will usually fail with error code 403, indicating the error is coming from the WAF and not Metabase. +- You should also check the Network tab in the Developer Tools in your browser to view the network request. It will usually fail with error code 403, indicating the error is coming from the WAF and not Metabase. + Clicking on the request will show more information, and looking at the headers will usually indicate where it originated from. Some WAFs have dynamic protection, which means that the problem might only occur after an upgrade of Metabase, and might go away after a few days. The solution is to disable the WAF for Metabase. Some services will show which rules were triggered, so it might be enough to disable those rules. -### Seeing a blank page instead of the Metabase interface +

Seeing a blank page instead of the Metabase interface

-If Metabase just loads a blank page instead of the interface, then it is usually caused by content optimization like PageSpeed or Cloudflare's Rocket Loader. +If Metabase displays a blank page instead of its interface, the problem is usually with content optimization like PageSpeed or Cloudflare's Rocket Loader. - Check the Console tab of your browser's Developer Tools for any errors involving Content Security Policy (CSP). -- See if Metabase has been able to deliver the HTML code by right clicking the blank page and selecting "View page source." It might look like gibberish, but it should say `Metabase` near line 25. +- See if Metabase has been able to deliver the HTML code by right clicking on the blank page and selecting "View page source." It might look like gibberish, but it should say `Metabase` near line 25. -The solution is to disable the content optimization for Metabase. +The solution is to disable content optimization for Metabase. diff --git a/docs/troubleshooting-guide/running.md b/docs/troubleshooting-guide/running.md index bbdac6f4f1a5..b9b5c90a6017 100644 --- a/docs/troubleshooting-guide/running.md +++ b/docs/troubleshooting-guide/running.md @@ -1,32 +1,47 @@ +# Running Metabase -## Specific Problems: +
+- [Metabase fails to start due to Heap Space OutOfMemoryErrors](#heap-space-outofmemoryerrors) +- [Diagnosing memory issues causing OutOfMemoryErrors](#diagnosing-outofmemoryerrors) +- [Metabase cannot read or write from a file or folder (IOError)](#cannot-read-write-ioerror) +
-### Metabase fails to start due to Heap Space OutOfMemoryErrors +Metabase runs on the Java Virtual Machine (JVM), and depending on how it's configured, it may use the server's filesystem to store some information. Problems with either the JVM or the filesystem can therefore prevent Metabase from running. -Normally, the JVM can figure out how much RAM is available on the system and automatically set a sensible upper bound for heap memory usage. On certain shared hosting -environments, however, this doesn't always work perfectly. If Metabase fails to start with an error message like +

Metabase fails to start due to Heap Space OutOfMemoryErrors

- java.lang.OutOfMemoryError: Java heap space +The JVM can normally figure out how much RAM is available on the system and automatically set a sensible upper bound for heap memory usage. On certain shared hosting environments, however, this doesn't always work as desired. The usual symptom of this is an error message like: -You'll just need to set a JVM option to let it know explicitly how much memory it should use for the heap space: +``` +java.lang.OutOfMemoryError: Java heap space +``` - java -Xmx2g -jar metabase.jar +If you are seeing this, you need to set a JVM option to tell Java know explicitly how much memory it should use for the heap. For example, your Java runtime might use the `-X` flag to do this: -Adjust this number as appropriate for your shared hosting instance. Make sure to set the number lower than the total amount of RAM available on your instance, because Metabase isn't the only process that'll be running. Generally, leaving 1-2 GB of RAM for these other processes should be enough; for example, you might set `-Xmx` to `1g` for an instance with 2 GB of RAM, `2g` for one with 4 GB of RAM, `6g` for an instance with 8 GB of RAM, and so forth. You may need to experiment with these settings a bit to find the right number. +``` +java -Xmx2g -jar metabase.jar +``` -As above, you can use the environment variable `JAVA_OPTS` to set JVM args instead of passing them directly to `java`. This is useful when running the Docker image, -for example. +Adjust the memory allocation upward until Metabase seems happy, but make sure to keep the number lower than the total amount of RAM available on your machine, because Metabase won't be the only process running. Leaving 1--2 GB of RAM for other processes is generally enough, so you might set `-Xmx` to `1g` on a machine with 2 GB of RAM, `2g` on one with 4 GB of RAM, and so on. You may need to experiment with this settings to find one that makes Metabase and everything else play nicely together. - docker run -d -p 3000:3000 -e "JAVA_OPTS=-Xmx2g" metabase/metabase +You can also use the environment variable `JAVA_OPTS` to set JVM args instead of passing them directly to `java`. This is particularly useful when running the Docker image: -### Diagnosing memory issues causing OutOfMemoryErrors +``` +docker run -d -p 3000:3000 -e "JAVA_OPTS=-Xmx2g" metabase/metabase +``` -If the Metabase instance starts and runs for a significant amount of time before running out of memory, there might be an event (i.e. a large query) triggering the `OutOfMemoryError`. One way to help diagnose where the memory is being used is to enable heap dumps when an OutOfMemoryError is triggered. To enable this, you need to add two flags to the `java` invocation: +

Diagnosing memory issues causing OutOfMemoryErrors

- java -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/path/to/a/directory -jar metabase-jar +If the Metabase instance starts and runs for a significant amount of time before running out of memory, there might be a specific event, such as a large query, triggering the `OutOfMemoryError`. One way to diagnose where the memory is being used is to enable heap dumps when an `OutOfMemoryError` is triggered. To enable this, you need to add two flags to the `java` invocation: -The `-XX:HeapDumpPath` flag is optional, with the current directory being the default. When an `OutOfMemoryError` occurs, it will dump an `hprof` file to the directory specified. These can be large (i.e. the size of the `-Xmx` argument) so ensure your disk has enough space. These `hprof` files can be read with many different tools, such as `jhat` included with the JDK or the [Eclipse Memory Analyzer Tool](https://www.eclipse.org/mat/). +``` +java -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/path/to/a/directory -jar metabase-jar +``` -### Metabase cannot read or write from a file or folder (IOError) +The `-XX:HeapDumpPath` flag specifies where to put the dump---the current directory is the default. When an `OutOfMemoryError` occurs, it will dump an `hprof` file to the directory specified. These can be very large (i.e., the size of the `-Xmx` argument) so ensure your disk has enough space. These `hprof` files can be read with many different tools, such as `jhat` (which is included with the JDK) or the [Eclipse Memory Analyzer Tool][eclipse-memory-analyzer]. -If you find an error regarding file permissions, like Metabase being unable to read a SQLite database or a custom GeoJSON, check out the section "Metabase can't read to/from a file or directory" in our [Docker troubleshooting guide](docker.md). +

Metabase cannot read or write from a file or folder (IOError)

+ +If you see an error regarding file permissions, like Metabase being unable to read a SQLite database or a custom GeoJSON map file, check out the section "Metabase can't read to/from a file or directory" in our [Docker troubleshooting guide](./docker.html). + +[eclipse-memory-analyzer]: https://www.eclipse.org/mat/ diff --git a/docs/troubleshooting-guide/sandboxing.md b/docs/troubleshooting-guide/sandboxing.md new file mode 100644 index 000000000000..0eef2c1b6d6b --- /dev/null +++ b/docs/troubleshooting-guide/sandboxing.md @@ -0,0 +1,172 @@ +# Troubleshooting sandbox access to rows and columns + +[Sandboxing data][sandboxing-your-data] gives some people access to only a subset of the data. (The term comes from the practice of putting children in a sandbox to play safely.) To implement sandboxing, Metabase runs a query that filters rows and/or selects a subset of columns from a table based on [the person's permissions][permissions]; the person's query then runs on the initial query's result (i.e., it runs on the sandboxed data). + +These articles will help you understand how sandboxing works: + +- [Data Sandboxing: setting row-level permissions][row-permissions]. +- [Advanced data sandboxing: limiting access to columns][column-permissions]. + +If you can't see tables at all, check out [I can't see my tables][cant-see-tables]. + +## People can't see **rows** in a table they _should_ be able to see + +### Is a sandbox filtering rows by a user attribute? + +**Root cause:** A sandbox is using user attribute to filter rows. + +**Steps to take:** + +This is expected behavior: using a user attribute to filter rows for a sandboxed table is how sandboxing works. But if you _don't_ want Metabase to filter those rows, you'll need to either: + +- Remove the sandbox (which would grant full access to all rows to everyone with access to that table). Go to **Admin** > **Permissions**, and change the access level for the table. +- Add the person to a group (or create a group) with unrestricted access to the table. Check out [Guide to data permissions][data-permissions]. + +## People can see **rows** they're _not_ supposed to see + +There are several reasons people could be seeing rows that they're not supposed to see. + +### Are those people also in groups with permission to view the entire table? + +**Root cause:** People are in groups with permissions to view the table, and therefore can see all rows, not just the sandboxed rows. + +**Steps to take:** + +For the person in question, check to see which groups they belong to. Do any of the groups have access to the table you're trying to sandboxed? If so, remove them from that group. Remember that everyone is a member of the "All users" group; which is why we recommend you revoke permissions from the all users group, and create new groups to selectively apply permissions to your data sources. + +### Is the question available via Signed Embedding or Public Sharing? + +**Root cause**: The question is public. [Public questions][public-sharing], even those that use [Signed Embedding][signed-embedding], can't be sandboxed. If someone doesn't have to log into Metabase to view the question, Metabase doesn't have user attributes or group information available for filtering the data, so all results will be shown. + +**Steps to take**: + +You should _avoid_ public sharing when you are sandboxing data. See [public sharing][public-sharing]. + +### Is the question written in SQL? + +**Root cause**. People with SQL access to a database cannot be sandboxed. They have as much access to the database as the user account used to connect Metabase to the database. Even if you hide tables in Metabase, someone with SQL access to a database would still be able to query those tables. Which is also to say that SQL questions cannot be sandboxed. Sandboxing exclusively applies to questions composed in the query builder (even though you can use a SQL question to create a sandbox, e.g., to create a result set of a table that excludes some columns) +. + +**Steps to take** + +- Don't try to sandbox a question written in SQL, because you can't. + +- If you want to sandbox access, avoid adding the person to a group with SQL access to that table (or any other more permissive access to that table, for that matter). + +- If you want to give them SQL access, but still limit what the person can see, you'll need to set up permissions in your database, and connect that database via the user account with that restricted access. You can connect the same database to Metabase multiple times, each with different levels of access, and expose different connections to different groups. But again, you won't be able to sandbox the data from a person with SQL access. + +### Is the question retrieving data from a non-SQL data source? + +**Root cause:** Data sandboxes do not support non-SQL databases. + +**Steps to take:** + +There is not much you can do here: if you need to sandbox data, [you can't use these databases][unsupported-databases]. + +### If using Single Sign-on (SSO), are user attributes correct? + +**Root cause**: If people are logging in with SSO, but the expected attributes aren't being saved and made available, sandboxing will deny access. + +**Steps to take**: + +Our docs on [Authenticating with SAML][authenticating-with-saml] and [Authenticating with JWT][jwt-auth] explain how to use your identity provider to pass user attributes to Metabase, which (the user attributes) can be used to sandbox data. + +## People can see **columns** they're _not_ supposed to see + +### Did the administrator forget to set up a sandbox? + +**Root cause:** The administrator didn't restrict access to the underlying table when setting up sandboxing. + +**Steps to take**: + +1. Go into **Admin Panel** > **Permissions** for the table in question. +2. Check that the sandbox exists, and that the question used to sandbox the table excludes the columns you don't want people to see. + +### Does the question used to set up the sandbox include the columns? + +**Root cause:** The question used to create the sandbox includes the columns they're not supposed to see. + +**Steps to take**: + +Make sure that you're using a SQL question to create the sandbox, and that you're not including columns you should be excluding. + +If you build a question using the query builder (i.e., use a simple or custom question), you may unintentionally pull in additional columns. You can check exactly which columns are included by viewing the question in the Notebook Editor and clicking on the **View the SQL** button. But again: if you use SQL questions to sandbox the data, this problem goes away. + +## Is the person in a group with unrestricted access to view the table? + +**Root cause:** You've sandboxed the table with the question, but the person is also in an group with unrestricted access to the table. If a person is in multiple groups, they'll get the most permissive access to a data source across all of their groups. + +**Steps to take**: + +Remove the person from all groups with unrestricted access to the sandboxed table. If they need some permissions from those other groups, you'll need to create a new group with a new set of permissions that only has sandboxed access to the table in question. + +## People can't see **columns** they _should_ be able to see + +### Do they only have sandboxed access to the table? + +**Root cause:** They only have access to a sandboxed version of the table, where only some columns are shown. + +**Steps to take**: + +Add these people to a group (or create a new group) that has permissions to view the table. + +### Has an administrator hidden fields in the table? + +**Root cause:**: An administrator has hidden fields in the table. + +**Steps to take:** + +Go to **Admin** > **Data model** and find the table. Check to make sure that the fields you want to make visible are not hidden. + +### Is a field remapped to display info from a restricted table? + +**Root cause:** If a table which the person _does_ have sandboxed access has a field that uses remapping to display information from another table which the person does _not_ have sandboxed access to, they won't be able to see the table. For example, if you have remapped an ID field to display a product's name instead, but the person does not have access to the product table, they won't be able to see the column. + +**Steps to take:** + +1. Go to **Admin Panel** > **Data model** for the fields in question. +2. If the value is remapped from a restricted table, change it so that Metabase will use the original value from the table. See [Metadata editing][data-model] for more information. + +### Is the question available via signed embedding? + +**Root cause**: [Signed embedding][signed-embedding] will show all results by default. While it's possible to control filtering with [locked parameters][locked-parameters], signed embedding depends only on the token generated by the including page, not whether someone is logged into Metabase. + +**Steps to take**: + +Since someone must log in so that Metabase can apply sandboxed views to that person, avoid using signed embedding when you want to restrict row or column access to a table. + +## People can't see data they're supposed to be able to see + +Someone is supposed to be able to view some of the values in a table in their queries, but are denied access or get an empty set of results where there should be data. + +**Root cause**: The administrator restricted access to the table. Administrators usually restrict access to tables as part of sandboxing; if the restrictions are too tight by mistake (e.g., "no access") then people might not be able to see any data at all. + +**Steps to take:** + +1. Check the access level for the groups by going to **Admin Panel** and viewing **Permissions** for the table in question. +2. If the person isn't in a group with access to that table, add them to a group that does, or create a new group with access to that table and add them to that new group. + +## Is the person who can't see the sandboxed data in multiple groups? + +**Root cause:** We only allow one sandbox per table: if someone is a member of two or more groups with different permissions, every rule for figuring out whether access should be allowed or not is confusing. We therefore only allow one rule. + +**Steps to take:** + +The administrator can [create a new group][groups] to capture precisely who's allowed access to what. + +[authenticating-with-saml]: ../enterprise-guide/authenticating-with-saml.html +[locked-parameters]: /learn/embedding/embedding-charts-and-dashboards.html#hide-or-lock-parameters-to-restrict-what-data-is-shown +[column-permissions]: /learn/permissions/data-sandboxing-column-permissions.html +[data-model]: ../administration-guide/03-metadata-editing.html +[data-permissions]: /learn/permissions/data-permissions.html +[groups]: ../administration-guide/05-setting-permissions.html#group +[jwt-auth]: ../enterprise-guide/authenticating-with-jwt.html +[permissions]: /learn/permissions/data-permissions.html +[prepared-statement]: /glossary.html#prepared_statement +[public-sharing]: ../administration-guide/12-public-links.html +[row-permissions]: /learn/permissions/data-sandboxing-row-permissions.html +[sandboxing-your-data]: ../enterprise-guide/data-sandboxes.html +[signed-embedding]: /learn/embedding/embedding-charts-and-dashboards.html#enable-embedding-in-other-applications +[cant-see-tables]: cant-see-tables.html +[sandbox-limitations]: ../enterprise-guide/data-sandboxes.html#current-limitations +[unsupported-databases]: ../enterprise-guide/data-sandboxes.html#data-sandboxes-do-not-support-non-sql-databases diff --git a/docs/troubleshooting-guide/server-logs.md b/docs/troubleshooting-guide/server-logs.md new file mode 100644 index 000000000000..42b81997625d --- /dev/null +++ b/docs/troubleshooting-guide/server-logs.md @@ -0,0 +1,28 @@ + +# How to read the server logs + +Here's an example log from running a query: + +``` +2021-07-07 15:53:18,560 DEBUG middleware.log :: POST /api/dataset 202 [ASYNC: completed] 46.9 ms (17 DB calls) App DB connections: 1/10 Jetty threads: 3/50 (4 idle, 0 queued) (72 total active threads) Queries in flight: 0 (0 queued); h2 DB 4 connections: 0/1 (0 threads blocked) +``` + +Let's unpack the log: + +- **Time of log:** `2021-07-07 15:53:18,560`. +- **Log level:** `DEBUG`. There are different types of log levels. To learn more, check out [Metabase logs][log-level]. +- **Namespace:**. `middleware.log`. You can tweak your logging level to get more or less information from this namespace. +- **Method:** `POST`. The HTTP method verb, like POST, PUT, GET, DELETE. +- **Path:** `/api/dataset`. The handling URL. Note that URL parameters aren't included, which can make debugging certain issues a little tricky. +- **Code:** `202`. The HTTP status code. +- **ASYNC:** `[ASYNC: completed]`. Whether Metabase could deliver the results to the browser. If Metabase couldn't deliver the results, for example if someone starts a query and closes their browser before the query finishes, the ASYNC status will say "cancelled". +- **Response time:** `46.9 ms`. The time Metabase takes to handle the request (from when Metabase receives the request until it's returned results back to the browser). +- **Database calls:** `(17 DB calls)`. The number of query statements used, which in addition to calls to the queried data source(s), includes calls to the Metabase application database. +- **Application database connections:** `App DB connections: 1/10`. The number of active connections, and the available pool of connections. +- **Jetty threads:** `Jetty threads: 3/50 (4 idle, 0 queued)`. List the number of active threads, and the total pool of threads available. The `(4 idle, 0 queued)` are the spare hot threads, and the number of threads queued. If you find you're maxing out the number threads in your pool, check out [Metabase at scale][scale]. +- **Java threads:** `(72 total active threads)`. The total number of threads Metabase is using. +- **Queries in flight:** `Queries in flight: 0 (0 queued)`. The number of active and queued queries across all database sources connected to Metabase. We recommend checking the **Database info** below for troubleshooting issues with the database related to the request. +- **Database info**:`h2 DB 4 connections: 0/1 (0 threads blocked)`. Shows database type, database ID, connections active/pool (and queue). This info is specific to the database related to the request (in this case a `POST` request), and not to the overall queries in flight. + +[log-level]: ../operations-guide/log-configuration.html +[scale]: /learn/administration/metabase-at-scale.html \ No newline at end of file diff --git a/docs/troubleshooting-guide/sync-fingerprint-scan.md b/docs/troubleshooting-guide/sync-fingerprint-scan.md new file mode 100644 index 000000000000..3f034641a7e2 --- /dev/null +++ b/docs/troubleshooting-guide/sync-fingerprint-scan.md @@ -0,0 +1,88 @@ +# Synchronizing with the database + +
+- [Metabase can't sync, fingerprint, or scan](#cant-sync-fingerprint-scan) +- [Metabase isn't showing all of the values I expect to see](#not-showing-all-values) +- [I cannot force Metabase to sync or scan using the API](#cant-force-with-api) +- [Sync and scan take a very long time to run](#sync-scan-long-time) +
+ +Metabase needs to know what's in your database in order to show tables and fields, populate dropdown menus, and suggest good visualizations, but loading all the data would be very slow (or simply impossible if you have a lot of data). It therefore does three things: + +1. Metabase periodically asks the database what tables are available, then asks which columns are available for each table. We call this *syncing*, and it happens [hourly or daily][sync-frequency] depending on how you've configured it. It's very fast with most relational databases, but can be slower with MongoDB and some [community-built database drivers][community-db-drivers]. + +2. Metabase *fingerprints* the column the first time it synchronizes. Fingerprinting fetches the first 10,000 rows from each column and uses that data to guesstimate how many unique values each column has, what the minimum and maximum values are for numeric and timestamp columns, and so on. Metabase only fingerprints each column once, unless the administrator explicitly tells it to fingerprint the column again, or in the rare event that a new release of Metabase changes the fingerprinting logic. + +3. A *scan* is similar to fingerprinting, but is done every 24 hours (unless it's configured to run less often or disabled). Scanning looks at the first 5000 distinct records ordered ascending, when a field is set to "A list of all values" in the Data Model, which is used to display options in dropdowns. If the textual result of scanning a column is more than 10 kilobytes long, for example, we display a search box instead of a dropdown. + +

Metabase can't sync, fingerprint, or scan

+ +If the credentials Metabase is using to connect to the database don't give it privileges to read the tables, the first sign will often be a failure to sync, which would then also stop fingerprint and scan. + +**How to detect this:** You can't see any of the tables in the database, or columns that have just been added to your data source don't show up in Metabase. + +**How to fix this:** [This guide][troubleshooting-db-connection] explains how to troubleshoot database connections. The relevant steps for solving this problem are: + +1. Sometimes browsers will show an old cached list of tables or columns. Refreshing the page will update the cache. +2. If you've just set up a new database in Metabase, the sync process might still be running---it's normally fast, but it can sometimes take a while. You can follow its progress in Admin > Troubleshooting > Logs. +3. If you've just added a table or a column, Metabase might not have synced yet. You can manually run the sync process by going to the Admin Panel, selecting "Databases", choosing your database, and clicking on "Sync database schema now". +4. To see if the problem is caused by lack of database privileges, try running a query like the one below for each table you think you should be able to access : + +``` +SELECT * +FROM table +LIMIT 1 +``` + +Note that we only get the first 10,000 documents when scanning a MongoDB collection, so if you're not seeing some new fields, those fields might not exist in the documents we looked at. Please see [this discussion][metabase-mongo-missing] for more details. + +

Metabase isn't showing all of the values I expect to see

+ +**How to detect this:** + +1. The UI isn't displaying some of the values you expect to see in a dropdown menu. +2. The UI is showing a search box for selecting values where you expect a dropdown menu. + +**How to fix this:** + +1. Go to the Admin Panel and select the **Data Model** tab. +2. Select the database, schema, table, and field in question. +3. Click the gear-icon to view all the field's settings. +4. Set **Field Type** to "Category" and **Filtering on this field** to "A list of all values." +5. Click the button **Re-scan this field** in the bottom. + +

I cannot force Metabase to sync or scan using the API

+ +Metabase syncs and scans regularly, but if the database administrator has just changed the database schema, or if a lot of data is added automatically at specific times, you may want to write a script that uses the [Metabase API][api-learn] to force sync or scan to take place right away. [Our API][metabase-api] provides two ways to do this: + +1. Using an endpoint with a session token: `/api/database/:id/sync_schema` or `api/database/:id/rescan_values`. These do the same things as going to the database in the Admin Panel and choosing **Sync database schema now** or **Re-scan field values now** respectively. In this case you have to authenticate with a user ID and pass a session token in the header of your request. + +2. Using an endpoint with an API key: `/api/notify/db/:id`. This endpoint was made to notify Metabase to sync after an [ETL operation][etl] finishes. In this case you must pass an API key by defining the `MB_API_KEY` environment variable. + +**How to detect this:** Your script fails to run. + +**How to fix this:** + +1. Make sure you are able to sync and scan manually via the Admin Panel. +2. Make sure you're using the correct URL to send the request to Metabase. +3. Check the error message returned from Metabase. +4. Check the credentials you're using to authenticate and make sure they identify your script as a user with administrative privileges. + +

Sync and scan take a very long time to run

+ +**How to detect this:** Sync and scan take a long time to complete. + +**How to fix this:** +1. For sync, delays are usually caused by a large database with hundreds of schema, thousands of table and with hundreds of columns in each table. If you only need a subset of those tables or columns in Metabase, then restricting the privileges used to connect to the database will make sure that Metabase can only sync a limited subset of the database. +2. Scanning normally takes longer than sync, but you can reduce the number of fields Metabase will scan by changing the number of fields that have the **Filtering on this field** option set to "A list of all values". Setting fields to either "Search box" or "Plain input box" will exclude those fields from scans. + +You can "fix" this by disabling scan entirely by going to the database in the Admin Panel and telling Metabase, "This is a large database," and then going to the Scheduling tab. However, sync is necessary: without it, Metabase won't know what tables exist or what columns they contain. + +[api-learn]: /learn/administration/metabase-api.html +[bugs]: ./bugs.html +[community-db-drivers]: ../developers-guide-drivers.html +[etl]: /glossary.html#etl +[metabase-api]: ../api-documentation.html +[metabase-mongo-missing]: ../administration-guide/databases/mongodb.html#i-added-fields-to-my-database-but-dont-see-them-in-metabase +[sync-frequency]: ../administration-guide/01-managing-databases.html#choose-when-metabase-syncs-and-scans +[troubleshooting-db-connection]: ./datawarehouse.html diff --git a/docs/troubleshooting-guide/timezones.md b/docs/troubleshooting-guide/timezones.md index 8f01e794f839..e65759a5ac55 100644 --- a/docs/troubleshooting-guide/timezones.md +++ b/docs/troubleshooting-guide/timezones.md @@ -1,67 +1,78 @@ -## Overview -The source of "wrong" numbers in charts or reports is often due to an underlying time zone issue. This type of issue is extremely common, both in Metabase and in many other analytics tools and services. The best way to avoid surprising time zone behavior is by selecting the "Report Time Zone" setting in the General settings tab of the Admin Panel. The Report Time Zone ensures that the time zone of query results matches the time zone used by the database for its date calculations. A Report Time Zone is currently supported on the following databases: +# The dates and times in my questions and charts are wrong -- Druid -- MySQL -- Oracle -- PostgreSQL -- Presto -- Vertica +You are doing calculations with dates and times, or displaying them in charts, but: -If you're using a database that doesn't support a Report Time Zone, it's best to ensure that the Metabase instance's time zone matches the time zone of the database. The Metabase instance's time zone is the Java Virtual Machine's time zone, typically set via a `-Duser.timezone<..>` parameter or the `JAVA_TIMEZONE` environment variable. How the time zone is set will depend on how you launch Metabase. Note that the Metabase instance's time zone doesn't impact any databases that use a Report Time Zone. +- the values appear to be wrong, or +- summary values are wrong. +## Is the problem due to time zones? -## Troubleshooting Process +**Root cause:** Dates and times are stored using different time zones, but some or all of those time zones aren't taken into account when doing calculations (i.e., the problem is inconsistent data). -When you suspect a you have a time zone issue, you should collect a bit of information about your overall system. +**Steps to take:** -1. What is the time zone of the data you think is being displayed improperly? (I.e., in the database itself.) -2. Are you using an explicit time zone setting on each timestamp, or are the timestamps being stored without a timestamp? (E.g., `Dec 1, 2019 00:00:00Z00` is an explicitly timestamped value, but `Dec 1, 2019` has an implied time zone.) -2. What time zone is the database server set to? -3. What time zone is the server that is running Metabase set to? -4. What is your Reporting Time zone setting? -5. What is your browser time zone setting? +To fix this problem you'll need answers to these questions: -With this information collected, you can dig into the actual "mistakes" you are seeing. Most often these occur in an aggregation. It is useful to simplify the aggregation as much as you can while still seeing the "mistake." For example, if you are looking at a "Net Negative Churn by Quarter" report that is based on an underlying table consisting of orders, see if the "count of orders by Quarter" has similarly incorrect behavior. If so, troubleshoot the second, simpler question. +1. What is the correct time zone of the data you think is being displayed improperly (i.e., what's the right answer)? +2. Is there an explicit time zone setting on every timestamp, or are some or all timestamps being stored without a time zone? For example, `Dec 1, 2019 00:00:00Z00` includes the time zone (shown after the `Z`), but `Dec 1, 2019` doesn't. +3. What time zone is the database server using? +4. What time zone is Metabase using? -Once you have simplified a question as much as possible, you can try to understand exactly what time zone conversion is causing the underlying problem. In the below, we assume that you are looking at a time series with daily values. If your error is happening with weeks, or other time granularities, perform the same sequence of steps but translating "day" to your specific granularity. +Once you have these answers, look for cases like these: + +1. Your question or chart is comparing or sorting values with inconsistent or missing time zones. For example, if a flight's departure and arrival times are reported in local time, it can appear to arrive before it has left. +2. Your question is aggregating timetsamps with different time zones: for example, the "daily" totals for your website's traffic include more than 24 hours worth of data because you are using the local dates from East Asia, Europe, and the Americas. + +Once you think you have identified a problem, drill down to understand exactly what time zone conversion is causing the underlying problem. For example, suppose you're looking at a time series with daily values; if your error is happening with weekly totals, you can: 1. Pick a specific day where you know the number is incorrect. 2. Click on the data point in a chart, or a cell in a result table, and select "View these X." -3. Open this question in two other tabs in your browser, but with the date filters changed such that one tab has the rows in the underlying table from the _previous_ day, and the other table has the rows in the underlying table from the _next_ day. -4. Check that the date field being used to group the result in the underlying display is correct. If it is different from what you have stored in the database, or what you have in another tool, then the timestamp is being transformed across the board into something incorrect. This is often the case when you are using a date or time lacking an explicit time zone. -5. If the underlying timestamps are correct (which is often the case if you are using dates or times with explicit time zones), it is likely that the individual times are being grouped into days in a different time zone than the one you want. -6. To find out which time zone they are being transformed into, tweak the times on the date filters on the question you are looking at by moving the start time and start date backwards by an hour until you either get the correct number or you have gone back by 12 hours. +3. Open this question in two other tabs in your browser. Change the date filters so that one tab has the rows in the underlying table from the _previous_ day, and the other table has the rows in the underlying table from the _next_ day. +4. Check that the date field being used to group the result in the underlying display is correct. If it is different from what you have stored in the database, or what you have in another tool, then the timestamp is being transformed incorrectly across the board. This often happens when you use a date or time lacking an explicit time zone. +5. If the underlying timestamps are correct (which they should if they have explicit time zones), the individual times are probably being grouped into days in a different time zone than the one you want. +6. To find out which time zone they are being transformed to, tweak the times on the date filters on the question you are looking at by moving the start time and start date backwards by an hour until you either get the correct number or you have gone back by 12 hours. (If any of your time zones include India, Newfoundland, or another jurisdiction with a half-step time zone, you may need to do this in half-hour increments.) 7. If that doesn't work, try moving the start and end times forward by an hour until you either get the correct number of you've gone forward by 12 hours. -8. Additionally, if any of your time zones include India, you will need to do this by half hour increments as well. -9. If by this point you have reached a correct number, that means your time zone was converted by the number of hours forward or backwards you manually set the filter. If not, then the problem might not be a direct time zone issue. +8. If by this point you have the correct value, it means your time zone was converted by the number of hours forward or backwards you manually set the filter. If that's the case, check whether the offset you've come up with matches either the time zone of the data warehouse or the timezone of Metabase itself. + +## Is the Report Time Zone set incorrectly? + +**Root cause:** Wrong numbers in questions or charts can be caused by a mis-match in the time zone being used by Metabase and the time zone being used by the data warehouse. + +**Steps to take:** + +1. Go to the Admin Panel, select the **Localization** tab, and check the **Report Time Zone** setting, which controls the timezone Metabase uses when connecting to the database. This setting is currently supported on: + - Druid + - MySQL + - Oracle + - PostgreSQL + - Presto + - Vertica +2. If you're using a database that doesn't support a Report Time Zone, ensure that Metabase's time zone matches that of the database. Metabase's time zone is the Java Virtual Machine's time zone, typically set via a `-Duser.timezone<..>` parameter or the `JAVA_TIMEZONE` environment variable; exactly how it is set will depend on how you launch Metabase. Note that Metabase's time zone doesn't impact any databases that use a Report Time Zone. + +## Are SQL queries not respecting the Reporting Time Zone setting? + +**Root cause:** We don't currently apply a reporting time zone to the results of SQL queries. -Now that you have the time zone adjustment, look at the list of time zone questions in the first set of steps and think about where this could have occurred. +**Steps to take:** -For example, let's say have a PST server time zone, and a GMT reporting time zone. If you had to manually go back 9 hours to get correct numbers, that suggests that the conversion is not happening for some reason -- this suggests you are using timestamps without a time zone. +1. Set a reporting time zone explicitly in your SQL query. -You can see a number of common problems below. If none of them apply, please [open a bug report](https://github.com/metabase/metabase/issues/new/choose) with the above information (time zones, and the results of the second troubleshooting process) as well as your Metabase, OS, and web browser versions. +## Are dates without an explicit time zone being converted to another day? -## Specific Problems: +**Root cause:** You are grouping by a date (rather than by a time) that doesn't have a time zone attached to it. -### SQL queries are not respecting the Reporting Time Zone setting -#### How to detect this: -You are not able to click on a cell in a result table or a chart. +**Steps to take:** -#### How to fix this: -We do not currently apply a reporting time zone to the results of SQL queries, so you should set one manually. +1. Look at every time field your question uses in the [Data Model Reference][data-model] and see if any of them are simply a "Date" field. +2. If so, make sure the server time zone reflects the reporting time zone, because when a query is run on Metabase, the server applies the configured time zone to that date. -### Dates without an explicit time zone are being converted to another day -#### How to detect this: -This occurs when you are grouping by a date (vs. a time) that does not have a time zone attached to it. Look at every time field your question uses in the Data Model Reference, and see if any of them are simply a "Date" field. +## Are you mixing explicit and implicit time zones? -#### How to fix this: -You will need to make sure the server time zone reflects the reporting time zone, because when a query is run on Metabase the server applies the time zone to which it is set to that date. +**Root cause:** You're comparing or doing arithmetic on two dates where one has an explicit time zone and one doesn't. +** Steps to take:** -### Mixing explicit and implicit time zones -#### How to detect this: -This often happens if you compare or perform arithmetic on two dates where one has an explicit time zone and one does not. This typically involves a question that uses multiple fields (e.g., when you filter on one timestamp and group by another). Check the time zones of each of the dates or times you are using in your question. +1. This typically happens with a question that uses multiple fields: for example, you're filtering on one timestamp and grouping by another. Check the time zones of each of the dates or times you are using in your question. +2. You''ll need to explicitly set the time zone for any value that doesn't have an explicit time zone. This will need to be done either in a SQL query or by transforming the data in your database to ensure both timestamps have time zones. -#### How to fix this: -You will need to explicitly cast the time zone that does not have an explicit time zone. This will need to be done either in a SQL query or by transforming the data in your database to ensure both timestamps have time zones. +[data-model]: ../users-guide/12-data-model-reference.html diff --git a/docs/users-guide/01-what-is-metabase.md b/docs/users-guide/01-what-is-metabase.md index 1a5060db0e9d..2569ce330ce8 100644 --- a/docs/users-guide/01-what-is-metabase.md +++ b/docs/users-guide/01-what-is-metabase.md @@ -46,9 +46,9 @@ To make a dashboard or pulse, click the plus (+) icon in the top-right of the ma ### Use search to quickly find things -![Search results](images/sharing-answers/search-results.gif) +![Search results](images/basic-exploration/search-results.gif) -The search bar at the top of the screen helps you find tables, dashboards, collections, saved questions, metrics, segments, and pulses in an instant. +The search bar at the top of the screen helps you find tables, dashboards, collections, saved questions, metrics, segments, and pulses in an instant. ## A primer on databases diff --git a/docs/users-guide/03-basic-exploration.md b/docs/users-guide/03-basic-exploration.md index 5c08786457aa..ed44f41cfccf 100644 --- a/docs/users-guide/03-basic-exploration.md +++ b/docs/users-guide/03-basic-exploration.md @@ -14,7 +14,7 @@ To learn more, see [Exploring data with Metabase's data browser](https://www.met #### Exploring collections -Collections in Metabase are a lot like folders. They're where all your team's dashboards and charts are kept. To explore a collection, just click on one in the **Our analytics** section of the home page, or click on `Browse all items` to see everything. +[Collections][collections] in Metabase are a lot like folders. They're where all your team's dashboards and charts are kept. To explore a collection, just click on one in the **Our analytics** section of the home page, or click on `Browse all items` to see everything. ![A collection](./images/collection-detail.png) @@ -24,7 +24,7 @@ Collections have a list of any other items that are saved within them, and you c #### Exploring dashboards -Dashboards are a set of questions and text cards that you want to be able to refer back to regularly. [Learn more about dashboards](07-dashboards.md). +[Dashboards][dashboards] are a set of questions and text cards that you want to be able to refer back to regularly. If you click on a part of a chart, such as a bar in a bar chart, or a dot on a line chart, you'll see a the **Action menu**, with actions you can take to dive deeper into that result, branch off from it in a different direction, or create an [X-ray](14-x-rays.md) to see an automatic exploration of the data. ![Drill through](images/drill-through/drill-through.png) @@ -57,8 +57,21 @@ When you're looking at the detail view of a question, you can use all the same a One of our personal favorite ways to explore is with the **Distribution** option. This will show you how many rows there are in a given table, grouped by the column you clicked on. So if you have a Users table, if you click on an Age column and select Distribution, you'll see a bar chart with the count of users you have in each age bracket. +### Search + +![Search results](./images/basic-exploration/search-results.gif) + +Use the search bar to find dashboards, questions, collections, and pulses. You can select from the typeahead's dropdown results, or hit enter to view a search results page. You can also activate the search bar from anywhere by pressing the `/` key. + +Searches take into account items’ titles, descriptions, and other metadata — you can even search the contents of your SQL queries. For example, you can search for things like `SELECT escape_pod FROM mothership` and find that one question you worked on six months ago. The results will display an item’s description, which collection it’s saved in, what kind of object it is, and whether it’s pinned. Note that you'll only ever see items in collections you have permission to view. + +![Search results](./images/basic-exploration/search-results.png) + --- ## Next: Asking custom questions So what do you do if you can't find an existing dashboard or question that's exactly what you're looking for? Let's learn about [asking our own new questions](04-asking-questions.md). + +[collections]: collections.md +[dashboards]: 07-dashboards.md \ No newline at end of file diff --git a/docs/users-guide/04-asking-questions.md b/docs/users-guide/04-asking-questions.md index c7a883041b11..298dc57de824 100644 --- a/docs/users-guide/04-asking-questions.md +++ b/docs/users-guide/04-asking-questions.md @@ -111,23 +111,17 @@ Click on a record's ID number (or primary key) to see more information about a g ![Record details](./images/notebook/record-details.png) -## Downloading Your Results - -You can download or export the results of a question by clicking on the Download arrow in the lower right of the screen. Results can be downloaded into .csv, .xlsx, or .json files. The maximum download size is 1 million rows. - -![Download Button](./images/download-button.png) - ## Starting new explorations from saved questions -Each time you start modifying a saved question, Metabase will create a new question for you. It'll give the new question a placeholder title, and let you know which question you started from. +Each time you start modifying a saved question, Metabase will create a new question for you. It'll give the new question a placeholder title, and let you know which question you started from. ![Starting from a saved question](./images/notebook/started-from-saved-question.png) So feel free to play around with any saved question, as you won't have any effect on the existing question. When you hit **Save** on the question, you can choose either to save as a new question (the default), or you can overwrite the existing question you started from. -You can also explicitly **Duplicate this question** from the edit menu (the pencil icon). +## Editing and sharing questions -![Duplicate a question](./images/notebook/duplicate-question.png) +Check out [sharing answers](06-sharing-answers.md). --- diff --git a/docs/users-guide/05-visualizing-results.md b/docs/users-guide/05-visualizing-results.md index e0771f8afb59..2e81299260b0 100644 --- a/docs/users-guide/05-visualizing-results.md +++ b/docs/users-guide/05-visualizing-results.md @@ -21,7 +21,7 @@ In Metabase, an answer to a question can be visualized in a number of ways: - [Funnel](#funnel) - [Map](#maps) -To change how the answer to your question is displayed, click on the **Visualization** button in the bottom-right of the screen to open the visualization sidebar. +To change how the answer to your question is displayed, click on the **Visualization** button in the bottom-left of the screen to open the visualization sidebar. ![Visualization options](images/VisualizeChoices.png) @@ -31,7 +31,7 @@ Once a question returns results, you can save the question, download the results ## Visualization types and options -Each visualization type has its own advanced options. Click the **Settings** button next to the Visualization button to see your options. The options panel also automatically opens up whenever you pick a new visualization type. +Each visualization type has its own advanced options. Click the **Settings** button next to the Visualization button to see your options. The options panel also automatically opens up whenever you pick a new visualization type. Not sure which visualization type to use? Check out [Which chart should you use?](https://www.metabase.com/learn/basics/visualizing-data/guide.html). @@ -148,7 +148,7 @@ This auto-pivoting is distinct from the pivot table visualization, which we cove ### Pivot table -Pivot tables allow you swap rows and columns, group data, and include subtotals in your table. You can group one or more metrics by one or more dimensions. +Pivot tables allow you swap rows and columns, group data, and include subtotals in your table. You can group one or more metrics by one or more dimensions. Pivot tables are not currently available for the following databases in Metabase: @@ -163,7 +163,7 @@ In the settings for the Pivot Table visualization, you can assign fields to one - Fields to use for the table **rows** - Fields to use for the table **columns** -- Fields to use for the table **values** +- Fields to use for the table **values** Let's say we ask the following question in the notebook editor: @@ -175,7 +175,7 @@ From the `Orders` table, we've summarized by the count of orders and the average We've assigned the fields `User → State` and `Created At` to table rows, and assigned the `Product -> Category` field to generate our columns: Doohickey, Gadget, and so on. We can drag and drop dimensions between the row and column buckets, and add aggregations to the table values bucket. For example, if we assign a field to the columns bucket, Metabase will pivot that field and render each unique value of that field as a column heading. -You can put multiple fields in the "rows" and "columns" buckets, but note that the order of the fields changes how Metabase displays the table: each additional field will nest within the previous field. +You can put multiple fields in the "rows" and "columns" buckets, but note that the order of the fields changes how Metabase displays the table: each additional field will nest within the previous field. Where it makes sense, Metabase will automatically include subtotals for grouped rows. For example, as in the image above, because we've grouped our rows first by `State`, then by `Created At`, Metabase will list each year for each `State`, and aggregate the metric(s) for that subgroup. For orders placed in Wisconsin, Metabase would sum the count of orders for each category, and find the average annual order total in each product category in Wisconsin. @@ -327,6 +327,6 @@ Learn more about [visualizing data with maps](https://www.metabase.com/learn/bas --- -## Next: Sharing and organizing questions +## Next: saving and editing your questions -Now let's learn about [sharing and organizing your saved questions](06-sharing-answers.md). +Now let's learn about [saving and editing your questions](06-sharing-answers.md). diff --git a/docs/users-guide/06-sharing-answers.md b/docs/users-guide/06-sharing-answers.md index 006fc2b00eb0..29d17565aa7e 100644 --- a/docs/users-guide/06-sharing-answers.md +++ b/docs/users-guide/06-sharing-answers.md @@ -1,77 +1,80 @@ -## Sharing and organizing your questions and answers +# Saving and editing your questions -### How to save a question +## How to save a question -Whenever you’ve arrived at an answer that you want to save for later, click the **SAVE** button in the top right of the screen. This will also save the visualization option you’ve chosen for your answer. +Whenever you’ve arrived at an answer that you want to save for later, click the **Save** button in the top right of the screen. This will also save the visualization option you’ve chosen for your answer. ![Save button](images/sharing-answers/save-button.png) -A pop-up box will appear, prompting you to give your question a name and description, and to pick which collection to save it in. Note that your administrator might have set things up so that you're only allowed to save questions in certain collections, but you can always save things in your Personal Collection. After saving your question, you'll be asked if you want to add it to a new or existing dashboard. +A pop-up box will appear, prompting you to give your question a name and description, and to pick which [collection](#collection) to save it in. Note that your administrator might have set things up so that you're only allowed to [save questions in certain collection](#collection-permissions), but you can always save things in your Personal Collection. After saving your question, you'll be asked if you want to add it to a new or existing dashboard. -Now, whenever you want to refer to your question again you can find it by searching for it in the search bar at the top of Metabase, or by navigating to the collection where you saved it. To edit your question, go to it and click the pencil icon in the top-right. +Now, whenever you want to refer to your question again you can find it by searching for it in the search bar at the top of Metabase, or by navigating to the collection where you saved it. -### Sharing questions with public links +## Downloading Your Results -If your Metabase administrator has enabled [public sharing](../administration-guide/12-public-links.md) on a saved question or dashboard, you can go to that question or dashboard and click on the sharing icon to find its public links. Public links can be viewed by anyone, even if they don't have access to Metabase. You can also use the public embedding code to embed your question or dashboard in a simple web page or blog post. - -![Share icon](images/sharing-answers/share-icon.png) - -### Organizing and finding your saved questions +You can export the results of a question by clicking on the __Download arrow__ (a down arrow in a cloud) in the lower right of the screen. Results can be downloaded into .csv, .xlsx, or .json files. The maximum download size is 1 million rows. Exported .xlsx files preserve the formatting defined in the question: date and currency formats are kept throughout, as well as column ordering and visibility. Files names for the exported question will include a slug of the question title, so you can easily distinguish files when exporting multiple questions. -After your team has been using Metabase for a while, you’ll probably end up with lots of saved questions. Metabase has several ways to help you organize things and find what you’re looking for. +## Editing your question -![Our analytics](images/sharing-answers/our-analytics-page.png) +Once you save your question, a down arrow will appear to the right of the question's title. Clicking on the down arrow will bring up the **Question detail sidebar**, which gives you some options: -#### Collections +![Question detail sidebar](images/sharing-answers/question-details-sidebar.png) -Collections are the main way to organize questions, as well as dashboards and pulses. [Administrators can give you different kinds of access](../administration-guide/06-collections.md) to each collection: +- **Edit details** (Pencil icon). Change the title of the question, and add some description for context. Adding a description will also make the question easier to find using the search bar. You can also select more options to [cache the results of the question](#caching-results). +- **Add to dashbboard** (Dashboard icon with plus symbol). See [dashboards][dashboards]. +- **Move** (Document icon with right arrow). Relocate the question to a different [collection](#collections). +- **Duplicate** (Square with little square). Create a copy of the question. Keep in mind that whenever you start editing a saved question, Metabase will create a copy of the question. You can either save your edits as a new question, or overwrite the original saved question. +- **Archive** (Folder with down arrow). See [Archiving items](#archiving-items). -- **View access:** you can see the collection and its contents, but you can't modify anything or put anything new into the collection. -- **Curate access:** you can edit, move, or archive the collection and its contents. You can also move or save new things in it and create new collections inside of it, and can also pin items in the collection to the top of the screen. Only administrators can edit permissions for collections, however. -- **No access:** you can't see the collection or its contents. If you have access to a dashboard, but it contains questions that are saved in a collection you don't have access to, those questions will show a permissions notification instead of the chart or table. +### Caching results -#### Your personal collection +{% include plans-blockquote.html %} -In addition to the collections you and your teammates have made, you'll also always have your own personal collection that only you and administrators can see. To find it, click on the "browse all items" button on the homepage and click on "my personal collection" in the list of collections. +If your results don't change frequently, you may want to cache your results, that is: store your results in Metabase so that the next time you visit the question, Metabase can retrieve the stored results rather than query the database again. For example, if your data only updates once a day, there's no point in querying the database more than once a day, as they data won't have changed. Returning cached results can be significantly faster, as the database won't have to redo the work to answer your query. -You can use your personal collection as a scratch space to put experiments and explorations that you don't think would be particularly interesting to the rest of your team, or as a work-in-progress space where you can work on things and then move them to a shared place once they're ready. +To cache results, click on the down arrow next to the question's title to open the __Question detail sidebar__, then click on the __Pencil icon__ to __Edit details__. In the Modal that pops up, in the bottom left, select __More options__. There you'll be able to tell Metabase how long it should cache the question's results. This caching will only apply to this specific question; admins can [configure database-wide caching settings][caching] in the __Admin panel__. -#### Pinned items +Admins can still set global caching, but setting a cache duration on a specific question will override that global setting–useful for when a particular question has a different natural cadence. -![Pins](images/sharing-answers/pinned-items.png) +### Question moderation -In each collection, you can pin important or useful dashboards or questions to make them stick to the top of the screen. Pinned items will also be displayed as large cards to make them stand out well. If you have Curate permissions for a collection, you can pin and un-pin things, and drag and drop pins to change their order. +{% include plans-blockquote.html %} -Any dashboards that are pinned in the main "Our analytics" collection will also show up on the homepage. +Administrators can **Verify** a question by clicking on the **Verify checkmark** in the **Moderation** section of the **Question detail sidebar**. Verifying a question is a simple way for an administrator to signal that they've reviewed the question and deemed it to be trustworthy. That is: the question is filtering the right columns, or summarizing the right metrics, and querying records from the right tables. -#### Search +Once verified, the question will have a verified icon next to the question's title. -![Search results](./images/sharing-answers/search-results.gif) +![Verified icon](images/sharing-answers/verified-icon.png) -Use the search bar to find dashboards, questions, collections, and pulses. You can select from the typeahead's dropdown results, or hit enter to view a search results page. You can also activate the search bar from anywhere by pressing the `/` key. +Verified questions are also more likely to show up higher in search suggestions and search results. -Searches take into account items’ titles, descriptions, and other metadata — you can even search the contents of your SQL queries. For example, you can search for things like `SELECT escape_pod FROM mothership` and find that one question you worked on six months ago. The results will display an item’s description, which collection it’s saved in, what kind of object it is, and whether it’s pinned. Note that you'll only ever see items in collections you have permission to view. +If someone modifies a verified question, the question will lose its verified status, and an administrator will need to review and verify the question again to restore its verified status. -![Search results](./images/sharing-answers/search-results.png) +### Question history -#### Moving +You can see the history of a question, including edits and verifications, in the **History** section of the **Question detail sidebar**. -To move a question, dashboard, or pulse into a collection, or from one collection to another, just click and drag it onto the collection where you want it to go. You can also click on the `…` menu to the right of the question and pick the Move action. If you're trying to move several things at once, click on the items' icons to select them, then click the Move action that pops up at the bottom of the screen. +Below each edit entry in the timeline, you can click on **Revert** to reinstate the question at the time of the edit. -![Selecting questions](images/sharing-answers/question-checkbox.png) +## Sharing questions with public links -Note that you have to have Curate permission for the collection that you're moving a question into _and_ the collection you're moving the question out of. +If your Metabase administrator has enabled [public sharing](../administration-guide/12-public-links.md) on a saved question or dashboard, you can go to that question or dashboard and click on the sharing icon to find its public links. Public links can be viewed by anyone, even if they don't have access to Metabase. You can also use the public embedding code to embed your question or dashboard in a simple web page or blog post. -#### Archiving +![Share icon](images/sharing-answers/share-icon.png) -Sometimes questions outlive their usefulness and need to be sent to Question Heaven. To archive a question or dashboard, just click on the `…` menu that appears on the far right when you hover over a question and pick the Archive action. You'll only see that option if you have "curate" permission for the current collection. You can also archive multiple items at once, the same way as you move multiple items. Note that archiving a question removes it from all dashboards or Pulses where it appears, so be careful! +To share a question, click on the arrow pointing up and to the right in the bottom right of the question. -You can also archive _collections_ if you have curate permissions for the collection you're trying to archive, the collection _it's_ inside of, as well as any and all collections inside of _it_. Archiving a collection archives all of its contents as well. +## Setting up alerts -If you have second thoughts and want to bring an archived item back, you can see all your archived questions from the archive; click the menu icon in the top-right of any collection page to get to the archive. To unarchive a question, hover over it and click the unarchive icon that appears on the far right. +You can set up questions to run periodically and notify you if the results are interesting. Check out [Alerts][alerts]. --- -## Next: creating dashboards +## Next: collections + +Next, we'll learn about how to organize our questions in [collections][collections]. -Next, we'll learn about [creating dashboards and adding questions to them](07-dashboards.md). +[alerts]: 15-alerts.md +[caching]: ../administration-guide/14-caching.md +[collections]: collections.md +[dashboards]: 07-dashboards.md diff --git a/docs/users-guide/07-dashboards.md b/docs/users-guide/07-dashboards.md index d8c4cf6f926d..fa4f40dbddb5 100644 --- a/docs/users-guide/07-dashboards.md +++ b/docs/users-guide/07-dashboards.md @@ -1,26 +1,33 @@ -## Dashboards +# Dashboards ![Interactive dashboard](images/dashboards/interactive-dashboard.png) -### What is a dashboard? +Quick links: -**Dashboards** group questions and present them on a single page. You can think of dashboards as shareable reports that feature a set of related questions. +- [Dashboard filters](08-dashboard-filters.md) +- [Dashboard subscriptions](dashboard-subscriptions.md) +- [Make your dashboards interactive](interactive-dashboards.md) +- [Learn how to build great dashboards](https://www.metabase.com/learn/dashboards/index.html) -A dashboard comprises a set of cards arranged on a grid. These cards can be questions - such as tables, charts, or maps - or cards can be text boxes. +## What is a dashboard? -You can add filter widgets to dashboards that filter data across multiple questions, and customize what happens when people click on a chart or a table. +**Dashboards** group questions and present them on a single page. You can think of dashboards as shareable reports that feature a set of related questions. You can set up [subscriptions to dashboards](dashboard-subscriptions.md) via email or Slack to receive the exported results of the dashboard's questions. + +A dashboard comprises a set of cards arranged on a grid. These cards can be questions - such as [tables, charts, or maps](05-visualizing-results.md) - or they can be [text boxes](/learn/dashboards/markdown.html). + +You can add [filter widgets to dashboards](08-dashboard-filters.md) that filter data identically across multiple questions, and [customize what happens when people click on a chart or a table](interactive-dashboards.md). You can make as many dashboards as you want. Go nuts. -### How to create a dashboard +## How to create a dashboard -In the top right of the screen, click the **+** icon to open the **Create** menu, and select **New Dashboard**. Give your new dashboard a name and a description, choose which [collection](06-sharing-answers.md#collections) the dashboard should go in, then click **Create**, and Metabase will take you to your shiny new dashboard. +In the top right of the screen, click the **+** icon to open the **Create** menu, and select **New Dashboard**. Give your new dashboard a name and a description, choose which [collections](collections.md) the dashboard should go in, then click **Create**, and Metabase will take you to your shiny new dashboard. ![Create Dashboard](images/dashboards/DashboardCreate.png) If you don't want to build a dashboard from scratch, or want to experiment by making changes to an existing dashboard without affecting the original, you can **duplicate** an existing dashboard. From an existing dashboard, click on the **...** menu in the upper right, and select **Duplicate**. -### Adding saved questions to a dashboard +## Adding saved questions to a dashboard There are two ways to add questions to a dashboard: from the dashboard, or from the question you want to add. @@ -32,7 +39,7 @@ Once you add a question to your dashboard, it’ll look something like this: ![First Dashboard](images/dashboards/FirstDashboard.png) -### Adding headings or descriptions with text cards +## Adding headings or descriptions with text cards Another neat thing you can do is add text cards to your dashboards. Text cards allow you to include descriptions, explanations, notes, or even images and GIFs to your dashboards. You can also use text cards to create separations between sections of charts in your dashboards, or include links to other dashboards, questions, or websites. @@ -54,7 +61,7 @@ Click the **eye** icon to see what your formatted Markdown will look like when y To learn more, see [Fun with Markdown in your dashboards](https://www.metabase.com/blog/markdown-in-dashboards/index.html). -### Arranging cards +## Arranging cards Each question on a dashboard is in its own card that you can move around or resize as you see fit. Just click the **pencil** icon in the top right of a dashboard to enter the dashboard's editing interface. @@ -68,13 +75,13 @@ Once you're in edit mode, you'll see a grid appear. You can move and resize the Metabase will automatically update a question's display to make sure your data looks great at any size you choose. -### Changing a question's visualization settings +## Changing a question's visualization settings You can change a question's visualization settings on a dashboard (to add a goal line, for example,) without affecting the original question. Click on the **pencil** icon to enter dashboard edit mode, hover over the question you want to edit, and click on the palette icon to edit the question's visualization's settings. ![Visualization settings](images/dashboards/visualization-settings.png). -### Finding dashboards +## Finding dashboards You can search for any dashboard (or question, collection, or pulse) by its title in the big search box at the top of Metabase. @@ -82,7 +89,7 @@ After a while, your team might accumulate a lot of dashboards. To make it easier ![Pinning a dashboard in a collection](images/dashboards/pinning-dashboard.png) -### Fullscreen dashboards +## Fullscreen dashboards After you've made your ideal dashboard, you may want to put the dashboard on a TV to help keep your team up to date throughout the day. @@ -92,7 +99,7 @@ Once you've entered fullscreen mode, you can also switch the dashboard into "Nig ![Night mode](images/dashboards/DashboardNightMode.png) -### Auto refresh +## Auto refresh If your data updates frequently, you can set up your dashboard to refresh automatically by clicking on the **clock** icon. @@ -104,6 +111,14 @@ Enabling auto refresh will re-run all the queries on the dashboard at the interv Combining fullscreen mode and auto refresh is a great way to keep your team in sync with your data throughout the day. +## Caching dashboards + +{% include plans-blockquote.html %} + +If your results don't change frequently, you may want to cache your results, that is: store your results in Metabase so that the next time you visit the dashboard, Metabase can retrieve the stored results rather than query the database(s) again. For example, if your data only updates once a day, there's no point in querying the database more than once a day, as they data won't have changed. Returning cached results can be significantly faster, as the database won't have to redo the work to answer your query. + +You can set cache duration for a dashboard by clicking on the _..._ > __Edit dashboard details__ > __More options__. + ## Sharing dashboards with public links If your Metabase administrator has enabled [public sharing](../administration-guide/12-public-links.md) on a saved question or dashboard, you can go to that question or dashboard and click on the **sharing** icon to find its public links. @@ -112,7 +127,7 @@ If your Metabase administrator has enabled [public sharing](../administration-gu Public links can be viewed by anyone, even if they don't have access to Metabase. You can also use the public embedding code to embed your question or dashboard in a simple web page or blog post. Check out examples of simple apps with embedded dashboards in our [embedding-reference-apps repository](https://github.com/metabase/embedding-reference-apps). To learn more about [embedding](../administration-guide/13-embedding.md), check out our article on [How to use Metabase to deliver analytics to your customers](https://www.metabase.com/blog/external-facing-analytics/index.html), as well as an article on how to combine branding, Single Sign-On, full app embedding, and data sandboxing to deliver [multi-tenant, self-service analytics](https://www.metabase.com/blog/embedding/index.html). -### Configuring a dashboard through its URL +## Configuring a dashboard through its URL You can amend the URL of a dashboard to automatically enter fullscreen, enable night mode, or auto-refresh the dashboard. Customizing the dashboard's URL allows you to configure the dashboard - even when you do not have any input access to the device where the dashboard will be displayed, like scripted screens, for example. @@ -124,10 +139,10 @@ The part that says `refresh=60` sets the dashboard to automatically refresh ever There is one important limitation with the `fullscreen` option: for security reasons, many browsers require user interaction to initiate fullscreen. In those browsers, using the `fullscreen` option will enable the fullscreen UI in Metabase, but it won't expand the browser content to fill the screen. To ensure the dashboard occupies the entire screen, either activate fullscreen by clicking the button in the UI, or use the `fullscreen` URL option and launch the browser in fullscreen or kiosk mode. -### Archiving a dashboard +## Archiving a dashboard Archiving a dashboard removes the dashboard from searches and collections. Archiving a dashboard does not archive the individual saved questions on it — it just archives the dashboard. - + To archive a dashboard, click the **pencil** icon to enter edit mode, then click the **...** menu, and select **Archive**. To view all archived items, click the **menu** icon in the top-right of any collection page. You can **unarchive** a dashboard by clicking the icon of the box with the upward arrow next to that dashboard. @@ -138,7 +153,7 @@ To make a great dashboard, you first need to decide what you want the dashboard Some tips: -- **Emphasize the most important questions**. To draw people’s attention to what matters most, place the most important saved question cards near the top of the dashboard, and/or make them bigger than the other cards, +- **Emphasize the most important questions**. To draw people’s attention to what matters most, place the most important saved question cards near the top of the dashboard, and/or make them bigger than the other cards, - **Keep dashboards focused**. If you have more than 10 cards on a dashboard, think about breaking the dashboard into two separate ones. You don't want to overwhelm people with too much information, and each dashboard should revolve around one theme or topic. Remember — you can make as many dashboards as you want, so you don’t have to cram everything into just one. - **Add filters to your dashboard**. [Adding filters](08-dashboard-filters.md) to dashboards makes them more useful. For example, instead of your dashboard being full of questions that are restricted to a specific time span, you can make more general questions and use dashboard filters to change the time span you're looking at. - **Make your dashboards interactive.** [Customize what happens when users click on a chart or table in your dashboard](interactive-dashboards.md). diff --git a/docs/users-guide/08-dashboard-filters.md b/docs/users-guide/08-dashboard-filters.md index 42fd2a930827..9edd5fab72c1 100644 --- a/docs/users-guide/08-dashboard-filters.md +++ b/docs/users-guide/08-dashboard-filters.md @@ -51,7 +51,7 @@ The ID filter provides a simple input box where you can type the ID of a user, o The Other Categories filter is a flexible filter type that will let you create either a dropdown menu or an input box to filter on any category field in your cards. -**Note:** If you're trying to filter Native/SQL questions, you'll need to [add a bit of additional markup to your query](13-sql-parameters.md) in order to use a dashboard filter with that question. For an in-depth article on this, check out [Adding filters to dashboards with SQL questions](https://www.metabase.com/blog/dashboard-filters/index.html). +**Note:** If you're trying to filter Native/SQL questions, you'll need to [add a bit of additional markup to your query](13-sql-parameters.md) in order to use a dashboard filter with that question. For an in-depth article on this, check out [Adding filters to dashboards with SQL questions](https://www.metabase.com/learn/dashboards/filters.html). ### Example filter diff --git a/docs/users-guide/collections.md b/docs/users-guide/collections.md new file mode 100644 index 000000000000..f19ac9e2aa5a --- /dev/null +++ b/docs/users-guide/collections.md @@ -0,0 +1,59 @@ +## Collections + + After your team has been using Metabase for a while, you’ll probably end up with lots of saved questions. + +![Our analytics](images/collections/our-analytics-page.png) + +Collections are the main way to organize questions, as well as dashboards and pulses. You can think of them like folders or directories. You can nest collections in other collections, and move collections around. One thing to note is that a single item, like a question or dashboard, can only be in one collection at a time (excluding parent collections). + +### Collection types + +- **Regular collections**. They're just basic collections. You can put stuff in them. +- **Official collections**. These are special collections, in that they have a badge to let people know that the items in this collection are the ones people should be looking at (or whatever "official" means to you). Questions and dashboards in official collections are also more likely to show up at the top of search results. + +![Official collections](images/collections/official-collection.png) + +### Collection permissions + +[Administrators can give you different kinds of access](../administration-guide/06-collections.md) to each collection: + +- **View access:** you can see the collection and its contents, but you can't modify anything or put anything new into the collection. +- **Curate access:** you can edit, move, or archive the collection and its contents. You can also move or save new things in it and create new collections inside of it, and can also pin items in the collection to the top of the screen. Only administrators can edit permissions for collections, however. +- **No access:** you can't see the collection or its contents. If you have access to a dashboard, but it contains questions that are saved in a collection you don't have access to, those questions will show a permissions notification instead of the chart or table. + +### Your personal collection + +In addition to the collections you and your teammates have made, you'll also always have your own personal collection that only you and administrators can see. To find it, click on the "browse all items" button on the homepage and click on "my personal collection" in the list of collections. + +You can use your personal collection as a scratch space to put experiments and explorations that you don't think would be particularly interesting to the rest of your team, or as a work-in-progress space where you can work on things and then move them to a shared place once they're ready. + +### Pinned items + +![Pins](images/collections/pinned-items.png) + +In each collection, you can pin important or useful dashboards or questions to make them stick to the top of the screen. Pinned items will also be displayed as large cards to make them stand out well. If you have Curate permissions for a collection, you can pin and un-pin things, and drag and drop pins to change their order. + +Any dashboards that are pinned in the main "Our analytics" collection will also show up on the homepage. + +### Moving items from collection to collection + +To move a question, dashboard, or pulse into a collection, or from one collection to another, just click and drag it onto the collection where you want it to go. You can also click on the `…` menu to the right of the question and pick the Move action. If you're trying to move several things at once, click on the items' icons to select them, then click the Move action that pops up at the bottom of the screen. + +![Selecting questions](images/collections/question-checkbox.png) + +Note that you have to have Curate permission for the collection that you're moving a question into _and_ the collection you're moving the question out of. + +### Archiving items + +Sometimes questions outlive their usefulness and need to be sent to Question Heaven. To archive a question or dashboard, just click on the `…` menu that appears on the far right when you hover over a question and pick the Archive action. You'll only see that option if you have "curate" permission for the current collection. You can also archive multiple items at once, the same way as you move multiple items. Note that archiving a question removes it from all dashboards or Pulses where it appears, so be careful! + +You can also archive _collections_ if you have curate permissions for the collection you're trying to archive, the collection _it's_ inside of, as well as any and all collections inside of _it_. Archiving a collection archives all of its contents as well. + +If you have second thoughts and want to bring an archived item back, you can see all your archived questions from the archive; click the menu icon in the top-right of any collection page to get to the archive. To unarchive a question, hover over it and click the unarchive icon that appears on the far right. + +## Next: creating dashboards + +Next, we'll learn about [creating dashboards and adding questions to them][dashboards]. + +[dashboards]: 07-dashboards.md + diff --git a/docs/users-guide/custom-questions.md b/docs/users-guide/custom-questions.md index 67c92e141a43..511e7d11e4ad 100644 --- a/docs/users-guide/custom-questions.md +++ b/docs/users-guide/custom-questions.md @@ -1,8 +1,8 @@ -## Creating custom questions with the notebook editor +# Creating custom questions with the notebook editor If you have a question that's a bit more involved than a [simple question](04-asking-questions.md), you can create a custom question using the notebook editor. You can get there by clicking the Ask a Question button in the top nav bar and selecting Custom Question. If you started from a Simple question or a saved question, you can get back to the custom question notebook editor by clicking the icon in the top-right of the screen. -### The parts of the notebook +## The parts of the notebook ![The notebook](./images/notebook/notebook-ui.png) @@ -10,7 +10,7 @@ The notebook is made up of a sequence of individual steps. Under each step you'l ![Previewing results](./images/notebook/preview-table.png) -#### Picking your starting data +### Picking your starting data This first step is required, and is where you pick the data that you want to base your question on. In most cases you'll pick one of the tables in your database, but you can also choose a previously saved question's result as the starting point for your new question. What this means in practice is that you can do things like use complex SQL queries to create new tables that can be used as starting data in a question just like any other table in your database. @@ -24,7 +24,7 @@ There are some kinds of saved questions that can't be used as source data: - questions that use `Cumulative Sum` or `Cumulative Count` aggregations - questions that have columns that are named the same or similar thing, like `Count` and `Count 2` -#### Filtering +### Filtering ![Filtering](./images/notebook/filter-step.png) @@ -38,7 +38,7 @@ You can add subsequent filter steps after every Summarize step. This lets you do If you have a more complex filter you're trying to express, you can pick "Custom Expression" from the add-filter menu create a filter expression. You can use comparison operators like greater than (>) or less than (<), as well as spreadsheet-like functions. For example, `[Subtotal] > 100 OR median([Age]) < 40`. [Learn more about writing expressions](./expressions.md) -#### Summarizing +### Summarizing ![Summarizing](./images/notebook/summarize-step.png) @@ -48,13 +48,13 @@ If you summarize and add a grouping you can then summarize _again_. You can also ![Multiple summarize steps](./images/notebook/multiple-summarize-steps.png) -**Custom expressions** +### Custom expressions ![Custom expression](./images/expressions/aggregation-expression.png) Custom expressions allow you to use spreadsheet-like functions and simple arithmetic within or between aggregation functions. For example, you could do `Average(sqrt[FieldX]) + Sum([FieldY])` or `Max(floor([FieldX] - [FieldY]))`, where `FieldX` and `FieldY` are fields in the currently selected table. [Learn more about writing expressions](./expressions.md) -#### Creating custom columns +### Creating custom columns ![Custom column](./images/expressions/custom-column.png) @@ -62,17 +62,17 @@ Custom columns are helpful when you need to create a new column based on a calcu You can use the following math operators in your formulas: `+`, `–`, `*` (multiplication), and `/` (division), along with a whole host of spreadsheet-like functions. You can also use parentheses to clarify the order of operations. You can [learn more about writing expressions here](./expressions.md). -#### Sorting results +### Sorting results ![Sorting](./images/notebook/sort-step.png) The sorting step lets you pick one or more columns to sort your results by. For each column you pick, you can also choose whether to sort ascending or descending; just click the arrow to change from ascending (up arrow) to descending (down arrow). -#### Setting a row limit +### Setting a row limit The row limit step lets you limit how many rows you want from the previous results. When used in conjunction with sorting, this can let you do things like create a top-10 list, by first sorting by one of the columns in your result, then adding a row limit of 10. Unlike other steps, the row limit step can only be added at the end of your question. -#### Joining data +### Joining data ![Joining](./images/notebook/join-step.png) @@ -97,7 +97,7 @@ Here are the basic types of joins: **A left outer join example:** If Table A is Orders and Table B is Customers, and assuming you do a join where the `customer_id` column in Orders is equal to the `ID` column in Customers, when you do a left outer join your results will be a full list of all your orders, and each order row will also display the columns of the customer who placed that order. Since a single customer can place many orders, a given customer's information might be repeated many times for different order rows. If there isn't a corresponding customer for a given order, the order's information will be shown, but the customer columns will just be blank for that row. -##### Multiple stages of joins +#### Multiple stages of joins In many cases you might have tables A, B, and C, where A and B have a connection, and B and C have a connection, but A and C don't. If you want to join A to B to C, all you have to do is add multiple join steps. Click on Join Data, join table A to table B, then click the Join Data step below that completed join block to add a second join step, and join the results of your last join to table C. @@ -105,6 +105,12 @@ In many cases you might have tables A, B, and C, where A and B have a connection See [Joins in Metabase](https://www.metabase.com/blog/joining-tables/index.html) to learn more. +#### Joining on multiple conditions + +Your joins can also include multiple conditions to refine your results. Metabase will combine multiple conditions using the `AND` operator. + +![Joining tables on multiple columns](./images/notebook/joining-on-multiple-columns.png) + ### Viewing the SQL that powers your question Under the hood, all Metabase questions are SQL (gasp!). If you're curious to see the SQL that will get run when you ask your question, you can click the little console icon in the top-right of the notebook editor. In the modal that opens up, you'll also be given the option to start a new query in the SQL editor, using this generated SQL as a starting point. It's a nice little shortcut to have Metabase write some boilerplate SQL for you, but then allows you to tweak and customize the query. diff --git a/docs/users-guide/images/sharing-answers/search-results.gif b/docs/users-guide/images/basic-exploration/search-results.gif similarity index 100% rename from docs/users-guide/images/sharing-answers/search-results.gif rename to docs/users-guide/images/basic-exploration/search-results.gif diff --git a/docs/users-guide/images/sharing-answers/search-results.png b/docs/users-guide/images/basic-exploration/search-results.png similarity index 100% rename from docs/users-guide/images/sharing-answers/search-results.png rename to docs/users-guide/images/basic-exploration/search-results.png diff --git a/docs/users-guide/images/collections/official-collection.png b/docs/users-guide/images/collections/official-collection.png new file mode 100644 index 000000000000..4203643e894d Binary files /dev/null and b/docs/users-guide/images/collections/official-collection.png differ diff --git a/docs/users-guide/images/sharing-answers/our-analytics-page.png b/docs/users-guide/images/collections/our-analytics-page.png similarity index 100% rename from docs/users-guide/images/sharing-answers/our-analytics-page.png rename to docs/users-guide/images/collections/our-analytics-page.png diff --git a/docs/users-guide/images/sharing-answers/pinned-items.png b/docs/users-guide/images/collections/pinned-items.png similarity index 100% rename from docs/users-guide/images/sharing-answers/pinned-items.png rename to docs/users-guide/images/collections/pinned-items.png diff --git a/docs/users-guide/images/sharing-answers/question-checkbox.png b/docs/users-guide/images/collections/question-checkbox.png similarity index 100% rename from docs/users-guide/images/sharing-answers/question-checkbox.png rename to docs/users-guide/images/collections/question-checkbox.png diff --git a/docs/users-guide/images/download-button.png b/docs/users-guide/images/download-button.png deleted file mode 100644 index 20e449cf7ecf..000000000000 Binary files a/docs/users-guide/images/download-button.png and /dev/null differ diff --git a/docs/users-guide/images/notebook/duplicate-question.png b/docs/users-guide/images/notebook/duplicate-question.png deleted file mode 100644 index 3328760eefe3..000000000000 Binary files a/docs/users-guide/images/notebook/duplicate-question.png and /dev/null differ diff --git a/docs/users-guide/images/notebook/join-a-b-c.png b/docs/users-guide/images/notebook/join-a-b-c.png index 728273c982a9..b34d61f7173e 100644 Binary files a/docs/users-guide/images/notebook/join-a-b-c.png and b/docs/users-guide/images/notebook/join-a-b-c.png differ diff --git a/docs/users-guide/images/notebook/joining-on-multiple-columns.png b/docs/users-guide/images/notebook/joining-on-multiple-columns.png new file mode 100644 index 000000000000..217c0838a9b9 Binary files /dev/null and b/docs/users-guide/images/notebook/joining-on-multiple-columns.png differ diff --git a/docs/users-guide/images/notebook/notebook-ui.png b/docs/users-guide/images/notebook/notebook-ui.png index 03a94b8fb0d5..19547c84ce1a 100644 Binary files a/docs/users-guide/images/notebook/notebook-ui.png and b/docs/users-guide/images/notebook/notebook-ui.png differ diff --git a/docs/users-guide/images/sharing-answers/question-details-sidebar.png b/docs/users-guide/images/sharing-answers/question-details-sidebar.png new file mode 100644 index 000000000000..da8eccc711bf Binary files /dev/null and b/docs/users-guide/images/sharing-answers/question-details-sidebar.png differ diff --git a/docs/users-guide/images/sharing-answers/verified-icon.png b/docs/users-guide/images/sharing-answers/verified-icon.png new file mode 100644 index 000000000000..7ab54270f31a Binary files /dev/null and b/docs/users-guide/images/sharing-answers/verified-icon.png differ diff --git a/docs/users-guide/start.md b/docs/users-guide/start.md index f5a3368cdb02..5a75cb6c2092 100644 --- a/docs/users-guide/start.md +++ b/docs/users-guide/start.md @@ -17,6 +17,7 @@ **Sharing results** - [Sharing and organizing your saved questions](06-sharing-answers.md) +- [Collections](collections.md) - [Creating dashboards](07-dashboards.md) - [Adding filters to dashboards](08-dashboard-filters.md) - [Making dashboards interactive](interactive-dashboards.md) diff --git a/docs/users-guide/writing-sql.md b/docs/users-guide/writing-sql.md index 664e3cd03f45..5562107c964c 100644 --- a/docs/users-guide/writing-sql.md +++ b/docs/users-guide/writing-sql.md @@ -37,8 +37,7 @@ You can use [SQL snippets](sql-snippets.md) to save, reuse, and share SQL code a ### Learn more -- [Best practices for writing SQL queries](https://www.metabase.com/blog/sql-best-practices/index.html). - +- [Best practices for writing SQL queries](https://www.metabase.com/learn/sql-questions/sql-best-practices.html) --- diff --git a/enterprise/README.md b/enterprise/README.md index 272bec096ade..6295a227d0ce 100644 --- a/enterprise/README.md +++ b/enterprise/README.md @@ -19,28 +19,15 @@ MB_EDITION=ee yarn build-hot ### Back-end -You need to add the `:ee` profile to the leiningen command to run Metabase Enterprise Edition. +You need to add the `:ee` alias to the Clojure CLI command to run Metabase Enterprise Edition. ```clj -lein with-profile +ee run -``` - -```clj -lein with-profile +ee uberjar -``` - -```clj -lein with-profile +ee repl -``` - -In Emacs/CIDER you can customize the `lein repl` command used to start the REPL by passing a prefix argument, e.g. - -```emacs-lisp -C-u M-x cider-jack-in -``` +# Start a local Metabase server that includes EE sources +clojure -M:ee:run -or, programatically: +# start a REPL that includes EE sources. +clojure -A:ee -```emacs-lisp -(cider-jack-in '(4)) +# start a REPL that includes EE sources & test namespaces. +clojure -A:dev:ee:ee-dev ``` diff --git a/enterprise/backend/README.md b/enterprise/backend/README.md new file mode 100644 index 000000000000..ae772308474d --- /dev/null +++ b/enterprise/backend/README.md @@ -0,0 +1,45 @@ +### EE Code Structure Notes + +EE namespaces follow the pattern work like this. + +EE namespace = take the equivalent OSS namespace and replace `metabase.` with `metabase-enterprise.` where +`` is the premium token feature that one must have to use this feature. + +For example, Sandboxing-related API endpoints for Tables go in `metabase-enterprise.sandboxes.api.table` and +Sandboxing-related models (e.g. GTAP) go in `metabase-enterprise.sandboxes.models`. Sandboxing-specific code for +existing models follow this same pattern, e.g. Sandboxing-specific code for Tables goes in +`metabase-enterprise.sandboxes.models.table`. + +Groups of API routes should be defined in namespaces like we do in OSS, for example +`metabase-enterprise.content-management.api.review` for ModerationReview-related endpoints. All endpoints for a +specific feature are combined into a single `routes` handler in a `metabase-enterprise..api.routes` namespace +similar to how OSS routes are combined in `metabase.api.routes`. Finally, all EE routes are combined into a single +handler in `metabase-enterprise.api.routes`; this handler is included in `metabase.api.routes/routes` if EE code is +available. + +Please keep these rules in mind when adding new EE namespaces. In general, new namespaces **SHOULD NOT** be added +directly under `metabase-enterprise` unless they apply to the Enterprise codebase as a whole; put them under the +appropriate `metabase-enterprise.` directory instead. + +### Naming EE API routes + +To make things consistent EE-only API routes should follow the same pattern and be given route names that correspond +to their namespaces (i.e., are prefixed with `ee/`). For example, an `:advanced-config`-only +route to delete User subscriptions should be named something like + +``` +DELETE /api/ee/advanced-config/user/:id/subscriptions +``` + +rather than + +``` +DELETE /api/user/:id/subscriptions +``` + +Not all EE endpoints follow this pattern yet, but they should; please feel free to fix stuff as you come across it if +I don't get to it first. + +### Questions :interrobang: + +Ping me (`@cam`) if you have any questions. diff --git a/enterprise/backend/src/metabase_enterprise/advanced_config/models/pulse_channel.clj b/enterprise/backend/src/metabase_enterprise/advanced_config/models/pulse_channel.clj new file mode 100644 index 000000000000..58f544822b31 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/advanced_config/models/pulse_channel.clj @@ -0,0 +1,43 @@ +(ns metabase-enterprise.advanced-config.models.pulse-channel + (:require [clojure.string :as str] + [metabase.models.setting :as setting :refer [defsetting]] + [metabase.public-settings.premium-features :as premium-features] + [metabase.util :as u] + [metabase.util.i18n :refer [deferred-tru tru]])) + +(defsetting subscription-allowed-domains + (deferred-tru "Allowed email address domain(s) for new DashboardSubscriptions and Alerts. Does not affect existing subscriptions.") + :visibility :public + ;; this is a comma-separated string but we're not using `:csv` because it gets serialized to an array which makes it + ;; inconvenient to use on the frontend. + :type :string) + +(defn- allowed-domains-set + "Parse [[subscription-allowed-domains]] into a set. `nil` if the Setting is not set or empty." + [] + (some-> (subscription-allowed-domains) + (str/split #",") + set + not-empty)) + +(defn validate-email-domains + "Check that `email-addresses` associated with a [[metabase.models.pulse-channel]] are allowed based on the value of + the [[subscription-allowed-domains]] Setting, if set. This function no-ops if `subscription-allowed-domains` is + unset or if we do not have a premium token with the `:advanced-config` feature. + + This function is called by [[metabase.models.pulse-channel/validate-email-domains]] when Pulses are created and + updated." + [email-addresses] + (when (premium-features/enable-advanced-config?) + (when-let [allowed-domains (allowed-domains-set)] + (doseq [email email-addresses + :let [domain (u/email->domain email)]] + (assert (u/email? email) + (tru "Invalid email address: {0}" (pr-str email))) + (when-not (contains? allowed-domains domain) + (throw (ex-info (tru "You cannot create new subscriptions for the domain {0}. Allowed domains are: {1}" + (pr-str domain) + (str/join ", " allowed-domains)) + {:email email + :allowed-domains allowed-domains + :status-code 403}))))))) diff --git a/enterprise/backend/src/metabase_enterprise/api/routes.clj b/enterprise/backend/src/metabase_enterprise/api/routes.clj new file mode 100644 index 000000000000..836fde5ad86d --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/api/routes.clj @@ -0,0 +1,26 @@ +(ns metabase-enterprise.api.routes + "API routes that are only available when running Metabase® Enterprise Edition™. Even tho these routes are available, + not all routes might work unless we have a valid premium features token to enable those features. + + These routes should generally live under prefixes like `/api/ee//` -- see the + `enterprise/backend/README.md` for more details." + (:require [compojure.core :as compojure] + [metabase-enterprise.api.routes.common :as ee.api.common] + [metabase-enterprise.audit-app.api.routes :as audit-app] + [metabase-enterprise.content-management.api.routes :as content-management] + [metabase-enterprise.sandbox.api.routes :as sandbox])) + +(compojure/defroutes ^{:doc "API routes only available when running Metabase® Enterprise Edition™."} routes + ;; The following routes are NAUGHTY and do not follow the naming convention (i.e., they do not start with + ;; `/ee//`). + ;; + ;; TODO -- Please fix them! + content-management/routes + sandbox/routes + ;; The following routes are NICE and do follow the `/ee//` naming convention. Please add new routes here + ;; and follow the convention. + (compojure/context + "/ee" [] + (compojure/context + "/audit-app" [] + (ee.api.common/+require-premium-feature :audit-app audit-app/routes)))) diff --git a/enterprise/backend/src/metabase_enterprise/api/routes/common.clj b/enterprise/backend/src/metabase_enterprise/api/routes/common.clj new file mode 100644 index 000000000000..436478e56633 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/api/routes/common.clj @@ -0,0 +1,43 @@ +(ns metabase-enterprise.api.routes.common + "Shared stuff used by various EE-only API routes." + (:require [metabase.public-settings.premium-features :as premium-features] + [metabase.util.i18n :refer [tru]])) + +(defn +require-premium-feature + "Wraps Ring `handler`. Check that we have a premium token with `feature` (a keyword; see [[metabase.public-settings.premium-features]] for a + current known features) or return a 401 if it is not. + + (context \"/whatever\" [] (+require-premium-feature :sandboxes whatever/routes)) + + Very important! Make sure you only wrap handlers inside [[compojure.core/context]] forms with this middleware (as in + example above). Otherwise it can end up causing requests the handler would not have handled anyway to fail. + Use [[when-premium-feature]] instead if you want the handler to apply if we have the premium feature but pass-thru + if we do not." + [feature handler] + (fn [request respond raise] + (if-not (premium-features/has-feature? feature) + (respond {:body (tru "This API endpoint is only enabled if you have a premium token with the {0} feature." + feature) + ;; 402 Payment Required + :status 402}) + (handler request respond raise)))) + +(defn ^:deprecated +when-premium-feature + "Wraps Ring `handler`. Only applies handler if we have a premium token with `feature`; if not, passes thru to the next + handler. + + (+when-premium-feature :sandboxes (+auth table/routes)) + + This is typically used to _replace_ OSS versions of API endpoints with special implementations that live in EE-land. + If the endpoint **only** exists in EE you should use [[+require-premium-feature]] instead which will give the API + user a useful error message if the endpoint is not available because they do not have the token feature in + question, rather than a generic 'endpoint does not exist' 404 error. + + In general, it's probably better NOT to swap out API endpoints, because it's not obvious at all that it happened, + and it makes it hard for us to nicely structure our contexts in [[metabase-enterprise.api.routes/routes]]. So only + do this if there's absolutely no other way (which is probably not the case)." + [feature handler] + (fn [request respond raise] + (if-not (premium-features/has-feature? feature) + (respond nil) + (handler request respond raise)))) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/database_detail.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/database_detail.clj deleted file mode 100644 index bed468bd5979..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/database_detail.clj +++ /dev/null @@ -1,35 +0,0 @@ -(ns metabase-enterprise.audit.pages.database-detail - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase.util.schema :as su] - [ring.util.codec :as codec] - [schema.core :as s])) - -(s/defn ^:internal-query-fn audit-log - [database-id :- su/IntGreaterThanZero] - {:metadata [[:started_at {:display_name "Viewed on", :base_type :type/DateTime}] - [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :query}] - [:query_hash {:display_name "Query Hash", :base_type :type/Text}] - [:query {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] - [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user}] - [:user {:display_name "Queried by", :base_type :type/Text, :remapped_from :user_id}] - [:schema {:display_name "Schema", :base_type :type/Text}] - [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table}] - [:table {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}]] - :results (common/reducible-query - {:select [:qe.started_at - [:card.id :card_id] - [:qe.hash :query_hash] - [(common/card-name-or-ad-hoc :card) :query] - [:u.id :user_id] - [(common/user-full-name :u) :user] - :t.schema - [:t.id :table_id] - [:t.name :table]] - :from [[:query_execution :qe]] - :where [:= :qe.database_id database-id] - :join [[:metabase_database :db] [:= :db.id :qe.database_id] - [:core_user :u] [:= :qe.executor_id :u.id]] - :left-join [[:report_card :card] [:= :qe.card_id :card.id] - [:metabase_table :t] [:= :card.table_id :t.id]] - :order-by [[:qe.started_at :desc]]}) - :xform (map #(update (vec %) 2 codec/base64-encode))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/queries.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/queries.clj deleted file mode 100644 index 9c4344ae2288..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/queries.clj +++ /dev/null @@ -1,134 +0,0 @@ -(ns metabase-enterprise.audit.pages.queries - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.cards :as cards] - [metabase.util.honeysql-extensions :as hx] - [schema.core :as s])) - -(defn ^:internal-query-fn ^:deprecated views-and-avg-execution-time-by-day - "Query that returns data for a two-series timeseries chart with number of queries ran and average query running time - broken out by day." - [] - {:metadata [[:day {:display_name "Date", :base_type :type/Date}] - [:views {:display_name "Views", :base_type :type/Integer}] - [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] - :results (common/reducible-query - {:select [[(hx/cast :date :started_at) :day] - [:%count.* :views] - [:%avg.running_time :avg_running_time]] - :from [:query_execution] - :group-by [(hx/cast :date :started_at)] - :order-by [[(hx/cast :date :started_at) :asc]]})}) - -(defn ^:internal-query-fn most-popular - "Query that returns the 10 most-popular Cards based on number of query executions, in descending order." - [] - {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] - [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] - [:executions {:display_name "Executions", :base_type :type/Integer}]] - :results (common/reducible-query - {:select [[:c.id :card_id] - [:c.name :card_name] - [:%count.* :executions]] - :from [[:query_execution :qe]] - :join [[:report_card :c] [:= :qe.card_id :c.id]] - :group-by [:c.id] - :order-by [[:executions :desc]] - :limit 10})}) - -(defn ^:internal-query-fn ^:deprecated slowest - "Query that returns the 10 slowest-running Cards based on average query execution time, in descending order." - [] - {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] - [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] - [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] - :results (common/reducible-query - {:select [[:c.id :card_id] - [:c.name :card_name] - [:%avg.running_time :avg_running_time]] - :from [[:query_execution :qe]] - :join [[:report_card :c] [:= :qe.card_id :c.id]] - :group-by [:c.id] - :order-by [[:avg_running_time :desc]] - :limit 10})}) - -(s/defn ^:internal-query-fn table - "A list of all questions. - - Three possible argument lists. All arguments are always nullable. - - [] : - Dump them all, sort by name ascending - - - [questionFilter] : - Dump all filtered by the questionFilter string, sort by name ascending. - questionFilter filters on the `name` column in `cards` table. - - - [questionFilter, collectionFilter, sortColumn, sortDirection] : - Dump all filtered by both questionFilter and collectionFilter, - sort by the given column and sort direction. - questionFilter filters on the `name` column in `cards` table. - collectionFilter filters on the `name` column in `collections` table. - - Sort column is given over in keyword form to honeysql. Default `card.name` - - Sort direction can be `asc` or `desc`, ascending and descending respectively. Default `asc`. - - All inputs have to be strings because that's how the magic middleware - that turns these functions into clojure-backed 'datasets' works." - ([] - (table nil nil nil nil)) - ([questionFilter :- (s/maybe s/Str)] - (table questionFilter nil nil nil)) - ([questionFilter :- (s/maybe s/Str) - collectionFilter :- (s/maybe s/Str) - sortColumn :- (s/maybe s/Str) - sortDirection :- (s/maybe (s/enum "asc" "desc"))] - {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] - [:card_name {:display_name "Name", :base_type :type/Name, :remapped_from :card_id}] - [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] - [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] - [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] - [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] - [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] - [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] - [:user_id {:display_name "Created By ID", :base_type :type/Integer, :remapped_to :user_name}] - [:user_name {:display_name "Created By", :base_type :type/Text, :remapped_from :user_id}] - [:public_link {:display_name "Public Link", :base_type :type/URL}] - [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] - [:avg_exec_time {:display_name "Average Runtime (ms)", :base_type :type/Integer}] - [:total_runtime {:display_name "Total Runtime (ms)", :base_type :type/Number}] - [:query_runs {:display_name "Query Runs", :base_type :type/Integer}] - ] - :results (common/reducible-query - (-> - {:with [cards/avg-exec-time-45 - cards/total-exec-time-45 - cards/query-runs-45] - :select [[:card.id :card_id] - [:card.name :card_name] - :collection_id - [:coll.name :collection_name] - :card.database_id - [:db.name :database_name] - :card.table_id - [:t.name :table_name] - [:card.creator_id :user_id] - [(common/user-full-name :u) :user_name] - [(common/card-public-url :card.public_uuid) :public_link] - :card.cache_ttl - [:avg_exec_time.avg_running_time_ms :avg_exec_time] - [:total_runtime.total_running_time_ms :total_runtime] - [:query_runs.count :query_runs]] - :from [[:report_card :card]] - :left-join [[:collection :coll] [:= :card.collection_id :coll.id] - [:metabase_database :db] [:= :card.database_id :db.id] - [:metabase_table :t] [:= :card.table_id :t.id] - [:core_user :u] [:= :card.creator_id :u.id] - :avg_exec_time [:= :card.id :avg_exec_time.card_id] - :total_runtime [:= :card.id :total_runtime.card_id] - :query_runs [:= :card.id :query_runs.card_id]] - :where [:= :card.archived false]} - (common/add-search-clause questionFilter :card.name) - (common/add-search-clause collectionFilter :coll.name) - (common/add-sort-clause - (or sortColumn "card.name") - (or sortDirection "asc"))))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/query_detail.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/query_detail.clj deleted file mode 100644 index 46e190e068c4..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/query_detail.clj +++ /dev/null @@ -1,19 +0,0 @@ -(ns metabase-enterprise.audit.pages.query-detail - "Queries to show details about a (presumably ad-hoc) query." - (:require [cheshire.core :as json] - [metabase-enterprise.audit.pages.common :as common] - [metabase.util.schema :as su] - [ring.util.codec :as codec] - [schema.core :as s])) - -(s/defn ^:internal-query-fn details - [query-hash :- su/NonBlankString] - {:metadata [[:query {:display_name "Query", :base_type :type/Dictionary}] - [:average_execution_time {:display_name "Avg. Exec. Time (ms)", :base_type :type/Number}]] - :results (common/reducible-query - {:select [:query - :average_execution_time] - :from [:query] - :where [:= :query_hash (codec/base64-decode query-hash)] - :limit 1}) - :xform (map #(update (vec %) 0 json/parse-string))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/question_detail.clj b/enterprise/backend/src/metabase_enterprise/audit/pages/question_detail.clj deleted file mode 100644 index db1427cbcc42..000000000000 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/question_detail.clj +++ /dev/null @@ -1,32 +0,0 @@ -(ns metabase-enterprise.audit.pages.question-detail - "Detail page for a single Card (Question)." - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.card-and-dashboard-detail :as card-and-dash-detail] - [metabase.models.card :refer [Card]] - [metabase.util.schema :as su] - [schema.core :as s])) - -(s/defn ^:internal-query-fn views-by-time - "Get views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`." - [card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] - (card-and-dash-detail/views-by-time "card" card-id datetime-unit)) - -(s/defn ^:internal-query-fn cached-views-by-time - "Get cached views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`." - [card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] - (card-and-dash-detail/cached-views-by-time card-id datetime-unit)) - -(s/defn ^:internal-query-fn revision-history - "Get the revision history for a Card." - [card-id :- su/IntGreaterThanZero] - (card-and-dash-detail/revision-history Card card-id)) - -(s/defn ^:internal-query-fn audit-log - "Get a view log for a Card." - [card-id :- su/IntGreaterThanZero] - (card-and-dash-detail/audit-log "card" card-id)) - -(s/defn ^:internal-query-fn avg-execution-time-by-time - "Average execution time broken out by period" - [card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] - (card-and-dash-detail/avg-execution-time-by-time card-id datetime-unit)) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/api/routes.clj b/enterprise/backend/src/metabase_enterprise/audit_app/api/routes.clj new file mode 100644 index 000000000000..575cc6ae8ff8 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/api/routes.clj @@ -0,0 +1,9 @@ +(ns metabase-enterprise.audit-app.api.routes + "API endpoints that are only enabled if we have a premium token with the `:audit-app` feature. These live under + `/api/ee/audit-app/`. Feature-flagging for these routes happens in [[metabase-enterprise.api.routes/routes]]." + (:require [compojure.core :as compojure] + [metabase-enterprise.audit-app.api.user :as user] + [metabase.api.routes.common :refer [+auth]])) + +(compojure/defroutes ^{:doc "Ring routes for mt API endpoints."} routes + (compojure/context "/user" [] (+auth user/routes))) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/api/user.clj b/enterprise/backend/src/metabase_enterprise/audit_app/api/user.clj new file mode 100644 index 000000000000..a47ffda1bff8 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/api/user.clj @@ -0,0 +1,22 @@ +(ns metabase-enterprise.audit-app.api.user + "`/api/ee/audit-app/user` endpoints. These only work if you have a premium token with the `:audit-app` feature." + (:require [compojure.core :refer [DELETE]] + [metabase.api.common :as api] + [metabase.api.user :as api.user] + [metabase.models.pulse :refer [Pulse]] + [metabase.models.pulse-channel-recipient :refer [PulseChannelRecipient]] + [toucan.db :as db])) + +(api/defendpoint DELETE "/:id/subscriptions" + "Delete all Alert and DashboardSubscription subscriptions for a User (i.e., so they will no longer receive them). + Archive all Alerts and DashboardSubscriptions created by the User. Only allowed for admins or for the current user." + [id] + (api.user/check-self-or-superuser id) + ;; delete all `PulseChannelRecipient` rows for this User, which means they will no longer receive any + ;; Alerts/DashboardSubscriptions + (db/delete! PulseChannelRecipient :user_id id) + ;; archive anything they created. + (db/update-where! Pulse {:creator_id id, :archived false} :archived true) + api/generic-204-no-content) + +(api/define-routes) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/interface.clj b/enterprise/backend/src/metabase_enterprise/audit_app/interface.clj new file mode 100644 index 000000000000..dbc53513c2c0 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/interface.clj @@ -0,0 +1,35 @@ +(ns metabase-enterprise.audit-app.interface + (:require [metabase.plugins.classloader :as classloader] + [metabase.util.i18n :refer [tru]] + [metabase.util.schema :as su] + [schema.core :as s])) + +(def ResultsMetadata + "Schema for the expected format for `:metadata` returned by an internal query function." + (su/non-empty + [[(s/one su/KeywordOrString "field name") + (s/one {:base_type su/FieldType, :display_name su/NonBlankString, s/Keyword s/Any} + "field metadata")]])) + +(defmulti internal-query + "Define a new internal query type. Conventionally `query-type` should be a namespaced keyword with the namespace in + which the method is defined. See docstring + for [[metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries]] for a description of what this + method should return." + {:arglists '([query-type & args])} + (fn [query-type & _] + (keyword query-type))) + +(defmethod internal-query :default + [query-type & _] + (throw (ex-info (str (tru "Unable to run internal query function: cannot resolve {0}" query-type)) + {:status-code 400}))) + +(defn resolve-internal-query + "Invoke the internal query with `query-type` (invokes the corresponding implementation of [[internal-query]])." + [query-type & args] + (let [query-type (keyword query-type) + ns-str (namespace query-type)] + (when ns-str + (classloader/require (symbol ns-str))) + (apply internal-query query-type args))) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/alerts.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/alerts.clj new file mode 100644 index 000000000000..7c92047e8502 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/alerts.clj @@ -0,0 +1,58 @@ +(ns metabase-enterprise.audit-app.pages.alerts + (:require [clojure.string :as str] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.pulses :as common.pulses])) + +(def ^:private table-metadata + (into + [[:card_id {:display_name "Question ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Question Name" :base_type :type/Text, :remapped_from :card_id}]] + common.pulses/table-metadata)) + +(def ^:private table-query-columns + (into + [:card_id + :card_name] + common.pulses/table-query-columns)) + +(defn- table-query [card-name] + (-> common.pulses/table-query + (update :select (partial into + [[:card.id :card_id] + [:card.name :card_name]])) + (update :left-join into [:pulse_card [:= :pulse.id :pulse_card.pulse_id] + [:report_card :card] [:= :pulse_card.card_id :card.id]]) + (update :where (fn [where] + (into + where + (filter some?) + ;; make sure the pulse_card actually exists. + [[:not= :pulse_card.card_id nil] + [:= :pulse.dashboard_id nil] + ;; if `pulse.alert_condition` is non-NULL then the Pulse is an Alert + [:not= :pulse.alert_condition nil] + (when-not (str/blank? card-name) + [:like :%lower.card.name (str \% (str/lower-case card-name) \%)])]))) + (assoc :order-by [[:%lower.card.name :asc] + ;; Newest first. ID instead of `created_at` because the column is currently only + ;; second-resolution for MySQL which busts our tests + [:channel.id :desc]]))) + +(def ^:private ^{:arglists '([row-map])} row-map->vec + (apply juxt (map first table-metadata))) + +(defn- post-process-row [row] + (-> (zipmap table-query-columns row) + common.pulses/post-process-row-map + row-map->vec)) + +;; with optional param `card-name`, only show subscriptions matching card name. +(defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + + ([_ card-name] + {:metadata table-metadata + :results (common/reducible-query (table-query card-name)) + :xform (map post-process-row)})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common.clj similarity index 89% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common.clj index 486fd248f5cb..04e74c7b7dfb 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common.clj @@ -1,4 +1,4 @@ -(ns metabase-enterprise.audit.pages.common +(ns metabase-enterprise.audit-app.pages.common "Shared functions used by audit internal queries across different namespaces." (:require [clojure.core.async :as a] [clojure.core.memoize :as memoize] @@ -10,7 +10,7 @@ [honeysql.helpers :as h] [java-time :as t] [medley.core :as m] - [metabase-enterprise.audit.query-processor.middleware.handle-audit-queries :as qp.middleware.audit] + [metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries :as qp.middleware.audit] [metabase.db :as mdb] [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute] [metabase.driver.sql-jdbc.sync :as sql-jdbc.sync] @@ -19,6 +19,7 @@ [metabase.query-processor.timezone :as qp.tz] [metabase.util :as u] [metabase.util.honeysql-extensions :as hx] + [metabase.util.i18n :refer [tru]] [metabase.util.urls :as urls] [schema.core :as s] [toucan.db :as db])) @@ -91,12 +92,20 @@ (fn [] (timezone (mdb/db-type) (db/connection))))) +(defn- compile-honeysql [driver honeysql-query] + (try + (let [honeysql-query (cond-> honeysql-query + ;; MySQL 5.x does not support CTEs, so convert them to subselects instead + (= driver :mysql) CTEs->subselects)] + (db/honeysql->sql (add-default-params honeysql-query))) + (catch Throwable e + (throw (ex-info (tru "Error compiling audit query: {0}" (ex-message e)) + {:driver driver, :honeysql-query honeysql-query} + e))))) + (defn- reduce-results* [honeysql-query context rff init] (let [driver (mdb/db-type) - honeysql-query (cond-> honeysql-query - ;; MySQL 5.x does not support CTEs, so convert them to subselects instead - (= driver :mysql) CTEs->subselects) - [sql & params] (db/honeysql->sql (add-default-params honeysql-query)) + [sql & params] (compile-honeysql driver honeysql-query) canceled-chan (context/canceled-chan context)] ;; MySQL driver normalizies timestamps. Setting `*results-timezone-id-override*` is a shortcut ;; instead of mocking up a chunk of regular QP pipeline. @@ -109,11 +118,17 @@ cols (sql-jdbc.execute/column-metadata driver rsmeta) metadata {:cols cols} rf (rff metadata)] - (reduce rf init (sql-jdbc.execute/reducible-rows driver rs rsmeta canceled-chan)))) (catch InterruptedException e (a/>!! canceled-chan :cancel) - (throw e)))))) + (throw e)) + (catch Throwable e + (throw (ex-info (tru "Error running audit query: {0}" (ex-message e)) + {:driver driver + :honeysql-query honeysql-query + :sql sql + :params params} + e))))))) (defn reducible-query "Return a function with the signature @@ -131,7 +146,7 @@ (defn query "Run a internal audit query, automatically including limits and offsets for paging. This function returns results directly as a series of maps (the 'legacy results' format as described in - `metabase-enterprise.audit.query-processor.middleware.handle-audit-queries.internal-queries`)" + `metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries.internal-queries`)" [honeysql-query] (let [context {:canceled-chan (a/promise-chan)} rff (fn [{:keys [cols]}] @@ -205,7 +220,7 @@ (defn lowercase-field "Lowercase a SQL field, to enter into honeysql query" [field] - (keyword (str "%lower." (name field)))) + (hsql/call :lower field)) (defn add-45-days-clause "Add an appropriate `WHERE` clause to limit query to 45 days" diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common/card_and_dashboard_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/card_and_dashboard_detail.clj similarity index 94% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common/card_and_dashboard_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common/card_and_dashboard_detail.clj index c06f7f5b84c4..894851fa9c9a 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common/card_and_dashboard_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/card_and_dashboard_detail.clj @@ -1,7 +1,7 @@ -(ns metabase-enterprise.audit.pages.common.card-and-dashboard-detail +(ns metabase-enterprise.audit-app.pages.common.card-and-dashboard-detail "Common queries used by both Card (Question) and Dashboard detail pages." (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.models.card :refer [Card]] [metabase.models.dashboard :refer [Dashboard]] [metabase.models.revision :as revision] @@ -95,11 +95,13 @@ [model :- ModelName, model-id :- su/IntGreaterThanZero] {:metadata [[:when {:display_name "When", :base_type :type/DateTime}] [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :who}] - [:who {:display_name "Who", :base_type :type/Name, :remapped_from :user_id}]] + [:who {:display_name "Who", :base_type :type/Name, :remapped_from :user_id}] + [:what {:display_name "What", :base_type :type/Text}]] :results (common/reducible-query {:select [[:vl.timestamp :when] :vl.user_id - [(common/user-full-name :u) :who]] + [(common/user-full-name :u) :who] + [:vl.metadata :what]] :from [[:view_log :vl]] :join [[:core_user :u] [:= :vl.user_id :u.id]] :where [:and diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common/cards.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/cards.clj similarity index 51% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common/cards.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common/cards.clj index 31c2a2390483..fbfa1d21a0f7 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common/cards.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/cards.clj @@ -1,5 +1,6 @@ -(ns metabase-enterprise.audit.pages.common.cards - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.common.cards + (:require [metabase-enterprise.audit-app.pages.common :as common] + [metabase.db.connection :as mdb.connection] [metabase.util.honeysql-extensions :as hx])) (def avg-exec-time @@ -25,6 +26,23 @@ :group-by [:card_id]} (common/add-45-days-clause :started_at))]) +(def latest-qe + "HoneySQL for a CTE to get latest QueryExecution for a Card." + [:latest_qe {:select [:query_execution.card_id :error :query_execution.started_at] + :from [:query_execution] + :join [[{:select [:card_id [:%max.started_at :started_at]] + :from [:query_execution] + :group-by [:card_id]} :inner_qe] + [:= :query_execution.started_at :inner_qe.started_at]] + :limit 1}]) + +(def query-runs + "HoneySQL for a CTE to include the total number of queries for each Card forever." + [:query_runs {:select [:card_id + [:%count.* :count]] + :from [:query_execution] + :group-by [:card_id]}]) + (def query-runs-45 "HoneySQL for a CTE to include the total number of queries for each Card for 45 days." [:query_runs (-> {:select [:card_id @@ -33,6 +51,21 @@ :group-by [:card_id]} (common/add-45-days-clause :started_at))]) +(def dashboards-count + "HoneySQL for a CTE to enumerate the dashboards for a Card." + [:dash_card {:select [:card_id [:%count.* :count]] + :from [:report_dashboardcard] + :group-by [:card_id]}]) + +(def dashboards-ids + "HoneySQL for a CTE to enumerate the dashboards for a Card. We get the actual ID's" + [:dash_card {:select [:card_id [(common/group-concat (hx/cast + (if (= (mdb.connection/db-type) :mysql) :char :text) + :report_dashboard.name) "|") :name_str]] + :from [:report_dashboardcard] + :join [:report_dashboard [:= :report_dashboardcard.dashboard_id :report_dashboard.id]] + :group-by [:card_id]}]) + (def views "HoneySQL for a CTE to include the total view count for each Card." [:card_views {:select [[:model_id :card_id] diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/common/dashboards.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/dashboards.clj similarity index 93% rename from enterprise/backend/src/metabase_enterprise/audit/pages/common/dashboards.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/common/dashboards.clj index 1a1da7a51d26..3445908f2b92 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/common/dashboards.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/dashboards.clj @@ -1,7 +1,7 @@ -(ns metabase-enterprise.audit.pages.common.dashboards +(ns metabase-enterprise.audit-app.pages.common.dashboards (:require [honeysql.core :as hsql] [honeysql.helpers :as h] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [metabase.util.urls :as urls])) @@ -13,6 +13,7 @@ [:saved_by_id {:display_name "Saved by User ID", :base_type :type/Text, :remapped_to :saved_by}] [:saved_by {:display_name "Saved by", :base_type :type/Text, :remapped_from :saved_by_id}] [:saved_on {:display_name "Saved on", :base_type :type/DateTime}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Integer}] [:last_edited_on {:display_name "Last edited on", :base_type :type/DateTime}] [:cards {:display_name "Cards", :base_type :type/Integer}] [:public_link {:display_name "Public Link", :base_type :type/URL}] @@ -44,6 +45,7 @@ [:u.id :saved_by_id] [(common/user-full-name :u) :saved_by] [:d.created_at :saved_on] + [:d.cache_ttl :saved_on] [:d.updated_at :last_edited_on] [:cc.card_count :cards] [(hsql/call :case diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/pulses.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/pulses.clj new file mode 100644 index 000000000000..3b8d59fce65b --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/common/pulses.clj @@ -0,0 +1,130 @@ +(ns metabase-enterprise.audit-app.pages.common.pulses + "Shared code for [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] + and [[metabase-enterprise.audit-app.pages.alerts]]." + (:require [cheshire.core :as json] + [clojure.tools.logging :as log] + [metabase.models.collection :as collection] + [metabase.util.cron :as u.cron] + [metabase.util.honeysql-extensions :as hx] + [metabase.util.i18n :refer [trs tru]])) + +(def table-metadata + "Common Metadata for the columns returned by both the [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] + and [[metabase-enterprise.audit-app.pages.alerts]] audit queries. (These respective queries also return their own + additional columns.)" + [[:pulse_id {:display_name "Pulse ID", :base_type :type/Integer}] + [:recipients {:display_name "Recipients", :base_type :type/Integer}] + [:subscription_type {:display_name "Type", :base_type :type/Text}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] + [:frequency {:display_name "Frequency", :base_type :type/Text}] + [:creator_id {:display_name "Created By ID", :base_type :type/Integer, :remapped_to :creator_name}] + [:creator_name {:display_name "Created By", :base_type :type/Text, :remapped_from :creator_id}] + [:created_at {:display_name "Created At", :base_type :type/DateTimeWithTZ}] + [:num_filters {:display_name "Filters", :base_type :type/Integer}]]) + +(def table-query-columns + "Keyword names of columns returned by the queries by both + the [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] and [[metabase-enterprise.audit-app.pages.alerts]] audit + queries." + [:pulse_id + :num_user_recipients + :channel_id + :channel_details + :subscription_type + :collection_id + :collection_name + :schedule_type + :schedule_hour + :schedule_day + :schedule_frame + :creator_id + :creator_name + :created_at + :pulse_parameters]) + +(def table-query + "Common HoneySQL base query for both the [[metabase-enterprise.audit-app.pages.dashboard-subscriptions]] + and [[metabase-enterprise.audit-app.pages.alerts]] audit queries. (The respective implementations tweak this query and + add additional columns, filters, and order-by clauses.)" + {:with [[:user_recipients {:select [[:recipient.pulse_channel_id :channel_id] + [:%count.* :count]] + :from [[:pulse_channel_recipient :recipient]] + :group-by [:channel_id]}]] + :select [[:pulse.id :pulse_id] + [:user_recipients.count :num_user_recipients] + [:channel.id :channel_id] + [:channel.details :channel_details] + [:channel.channel_type :subscription_type] + [:collection.id :collection_id] + [:collection.name :collection_name] + :channel.schedule_type + :channel.schedule_hour + :channel.schedule_day + :channel.schedule_frame + [:creator.id :creator_id] + [(hx/concat :creator.first_name (hx/literal " ") :creator.last_name) :creator_name] + [:channel.created_at :created_at] + [:pulse.parameters :pulse_parameters]] + :from [[:pulse_channel :channel]] + :left-join [:pulse [:= :channel.pulse_id :pulse.id] + :collection [:= :pulse.collection_id :collection.id] + [:core_user :creator] [:= :pulse.creator_id :creator.id] + :user_recipients [:= :channel.id :user_recipients.channel_id]] + :where [:and + [:not= :pulse.archived true] + [:= :channel.enabled true]]}) + +(defn- describe-frequency [row] + (-> (select-keys row [:schedule_type :schedule_hour :schedule_day :schedule_frame]) + u.cron/schedule-map->cron-string + u.cron/describe-cron-string)) + +(defn- describe-recipients + "Return the number of recipients for email `PulseChannel`s. Includes both User recipients (represented by + `PulseChannelRecipient` rows) and plain email recipients (stored directly in the `PulseChannel` `:details`). Returns + `nil` for Slack channels." + [{channel-id :channel_id + subscription-type :subscription_type + channel-details :channel_details + num-recipients :num_user_recipients}] + (let [details (json/parse-string channel-details true)] + (when (= (keyword subscription-type) :email) + ((fnil + 0 0) num-recipients (count (:emails details)))))) + +(defn- pulse-parameter-count [{pulse-parameters :pulse_parameters}] + (if-let [params (try + (some-> pulse-parameters (json/parse-string true)) + (catch Throwable e + (log/error e (trs "Error parsing Pulse parameters: {0}" (ex-message e))) + nil))] + (count params) + 0)) + +(defn- root-collection-name [] + (:name (collection/root-collection-with-ui-details nil))) + +(defn post-process-row-map + "Post-process a `row` **map** for the subscription and alert audit page tables. Get this map by doing something like + this: + + (zipmap table-query-columns row-vector) + + This map should contain at least the keys in [[table-query-columns]] (provided by the common [[table-query]]). After + calling this function, you'll need to convert the row map back to a vector; something like + + (apply juxt (map first table-metadata)) + + should do the trick." + [row] + {:pre [(map? row)]} + (-> row + (assoc :frequency (describe-frequency row) + :recipients (describe-recipients row) + :num_filters (pulse-parameter-count row)) + (update :subscription_type (fn [subscription-type] + (case (keyword subscription-type) + :email (tru "Email") + :slack (tru "Slack") + subscription-type))) + (update :collection_name #(or % (root-collection-name))))) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboard_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_detail.clj similarity index 73% rename from enterprise/backend/src/metabase_enterprise/audit/pages/dashboard_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_detail.clj index e36d59784cbd..afdf66bef117 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboard_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_detail.clj @@ -1,39 +1,42 @@ -(ns metabase-enterprise.audit.pages.dashboard-detail +(ns metabase-enterprise.audit-app.pages.dashboard-detail "Detail page for a single dashboard." - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.card-and-dashboard-detail :as card-and-dash-detail] - [metabase-enterprise.audit.pages.common.cards :as cards] + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.card-and-dashboard-detail :as card-and-dash-detail] + [metabase-enterprise.audit-app.pages.common.cards :as cards] [metabase.models.dashboard :refer [Dashboard]] [metabase.util.schema :as su] [schema.core :as s])) -(s/defn ^:internal-query-fn views-by-time - "Get views of a Dashboard broken out by a time `unit`, e.g. `day` or `day-of-week`." - [dashboard-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] +;; Get views of a Dashboard broken out by a time `unit`, e.g. `day` or `day-of-week`. +(s/defmethod audit.i/internal-query ::views-by-time + [_ dashboard-id :- su/IntGreaterThanZero datetime-unit :- common/DateTimeUnitStr] (card-and-dash-detail/views-by-time "dashboard" dashboard-id datetime-unit)) -(s/defn ^:internal-query-fn revision-history - [dashboard-id :- su/IntGreaterThanZero] +;; Revision history for a specific Dashboard. +(s/defmethod audit.i/internal-query ::revision-history + [_ dashboard-id :- su/IntGreaterThanZero] (card-and-dash-detail/revision-history Dashboard dashboard-id)) -(s/defn ^:internal-query-fn audit-log - [dashboard-id :- su/IntGreaterThanZero] +;; View log for a specific Dashboard. +(s/defmethod audit.i/internal-query ::audit-log + [_ dashboard-id :- su/IntGreaterThanZero] (card-and-dash-detail/audit-log "dashboard" dashboard-id)) - -(s/defn ^:internal-query-fn cards - [dashboard-id :- su/IntGreaterThanZero] +;; Information about the Saved Questions (Cards) in this instance. +(s/defmethod audit.i/internal-query ::cards + [_ dashboard-id :- su/IntGreaterThanZero] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Title", :base_type :type/Name, :remapped_from :card_id}] [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] - [:created_at {:display_name "Created At", :base_type :type/DateTime}] + [:created_at {:display_name "Created At", :base_type :type/DateTime}] [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] [:avg_running_time_ms {:display_name "Avg. exec. time (ms)", :base_type :type/Number}] - [:cache_ttl {:display_name "Cache TTL", :base_type :type/Number}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] [:public_link {:display_name "Public Link", :base_type :type/URL}] [:total_views {:display_name "Total Views", :base_type :type/Integer}]] :results (common/reducible-query diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_subscriptions.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_subscriptions.clj new file mode 100644 index 000000000000..b625c4a451fa --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboard_subscriptions.clj @@ -0,0 +1,53 @@ +(ns metabase-enterprise.audit-app.pages.dashboard-subscriptions + (:require [clojure.string :as str] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.pulses :as common.pulses])) + +(def ^:private table-metadata + (into + [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] + [:dashboard_name {:display_name "Dashboard Name" :base_type :type/Text, :remapped_from :dashboard_id}]] + common.pulses/table-metadata)) + +(def ^:private table-query-columns + (into + [:dashboard_id + :dashboard_name] + common.pulses/table-query-columns)) + +(defn- table-query [dashboard-name] + (-> common.pulses/table-query + (update :select (partial into + [[:dashboard.id :dashboard_id] + [:dashboard.name :dashboard_name]])) + (update :left-join into [[:report_dashboard :dashboard] [:= :pulse.dashboard_id :dashboard.id]]) + (update :where (fn [where] + (into + where + (filter some?) + [[:not= :pulse.dashboard_id nil] + (when-not (str/blank? dashboard-name) + [:like :%lower.dashboard.name (str \% (str/lower-case dashboard-name) \%)])]))) + (assoc :order-by [[:%lower.dashboard.name :asc] + ;; Newest first. ID instead of `created_at` because the column is currently only + ;; second-resolution for MySQL which busts our tests + [:channel.id :desc]]))) + +(def ^:private ^{:arglists '([row-map])} row-map->vec + (apply juxt (map first table-metadata))) + +(defn- post-process-row [row] + (-> (zipmap table-query-columns row) + common.pulses/post-process-row-map + row-map->vec)) + +;; with optional param `dashboard-name`, only show subscriptions matching dashboard name. +(defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + + ([_ dashboard-name] + {:metadata table-metadata + :results (common/reducible-query (table-query dashboard-name)) + :xform (map post-process-row)})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboards.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboards.clj similarity index 50% rename from enterprise/backend/src/metabase_enterprise/audit/pages/dashboards.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboards.clj index 9ace5fdc09a2..11806bd6667a 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/dashboards.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/dashboards.clj @@ -1,44 +1,31 @@ -(ns metabase-enterprise.audit.pages.dashboards +(ns metabase-enterprise.audit-app.pages.dashboards "Dashboards overview page." - (:require [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.dashboards :as dashboards] + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.dashboards :as dashboards] [metabase.util.honeysql-extensions :as hx] [schema.core :as s])) -(defn ^:deprecated ^:internal-query-fn views-per-day - "DEPRECATED: use `views-and-saves-by-time ` instead." - [] - {:metadata [[:day {:display_name "Date", :base_type :type/Date}] - [:views {:display_name "Views", :base_type :type/Integer}]] - :results (common/reducible-query - {:select [[(hx/cast :date :timestamp) :day] - [:%count.* :views]] - :from [:view_log] - :where [:= :model (hx/literal "dashboard")] - :group-by [(hx/cast :date :timestamp)] - :order-by [(hx/cast :date :timestamp)]})}) - - -(s/defn ^:internal-query-fn views-and-saves-by-time - "Two-series timeseries that includes total number of Dashboard views and saves broken out by a `datetime-unit`." - [datetime-unit :- common/DateTimeUnitStr] +;; Two-series timeseries that includes total number of Dashboard views and saves broken out by a `datetime-unit`. +(s/defmethod audit.i/internal-query ::views-and-saves-by-time + [_ datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:views {:display_name "Views", :base_type :type/Integer}] [:saves {:display_name "Saves", :base_type :type/Integer}]] ;; this is so nice and easy to implement in a single query with FULL OUTER JOINS but unfortunately only pg supports ;; them(!) :results (let [views (common/query - {:select [[(common/grouped-datetime datetime-unit :timestamp) :date] - [:%count.* :views]] - :from [:view_log] - :where [:= :model (hx/literal "dashboard")] - :group-by [(common/grouped-datetime datetime-unit :timestamp)]}) + {:select [[(common/grouped-datetime datetime-unit :timestamp) :date] + [:%count.* :views]] + :from [:view_log] + :where [:= :model (hx/literal "dashboard")] + :group-by [(common/grouped-datetime datetime-unit :timestamp)]}) date->views (zipmap (map :date views) (map :views views)) saves (common/query - {:select [[(common/grouped-datetime datetime-unit :created_at) :date] - [:%count.* :saves]] - :from [:report_dashboard] - :group-by [(common/grouped-datetime datetime-unit :created_at)]}) + {:select [[(common/grouped-datetime datetime-unit :created_at) :date] + [:%count.* :saves]] + :from [:report_dashboard] + :group-by [(common/grouped-datetime datetime-unit :created_at)]}) date->saves (zipmap (map :date saves) (map :saves saves)) all-dates (sort (keep identity (distinct (concat (keys date->views) (keys date->saves)))))] @@ -47,10 +34,9 @@ :views (date->views date 0) :saves (date->saves date 0)}))}) - -(defn ^:internal-query-fn ^:deprecated most-popular - "Deprecated: use `most-popular-with-avg-speed` instead." - [] +;; DEPRECATED Use `most-popular-with-avg-speed` instead. +(defmethod audit.i/internal-query ::most-popular + [_] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Title, :remapped_from :dashboard_id}] [:views {:display_name "Views", :base_type :type/Integer}]] @@ -65,49 +51,48 @@ :order-by [[:%count.* :desc]] :limit 10})}) -(defn ^:internal-query-fn most-popular-with-avg-speed - "10 most popular dashboards with their average speed." - [] +;; Ten most popular dashboards with their average speed. +(defmethod audit.i/internal-query ::most-popular-with-avg-speed + [_] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Title, :remapped_from :dashboard_id}] [:views {:display_name "Views", :base_type :type/Integer}] [:avg_running_time {:display_name "Avg. Question Load Time (ms)", :base_type :type/Decimal}]] :results (common/reducible-query - {:with [[:most_popular {:select [[:d.id :dashboard_id] - [:d.name :dashboard_name] - [:%count.* :views]] - :from [[:view_log :vl]] - :left-join [[:report_dashboard :d] [:= :vl.model_id :d.id]] - :where [:= :vl.model (hx/literal "dashboard")] - :group-by [:d.id] - :order-by [[:%count.* :desc]] - :limit 10}] - [:card_running_time {:select [:qe.card_id - [:%avg.qe.running_time :avg_running_time]] - :from [[:query_execution :qe]] - :where [:not= :qe.card_id nil] - :group-by [:qe.card_id]}] - [:dash_avg_running_time {:select [[:d.id :dashboard_id] - [:%avg.rt.avg_running_time :avg_running_time]] - :from [[:report_dashboardcard :dc]] - :left-join [[:card_running_time :rt] [:= :dc.card_id :rt.card_id] - [:report_dashboard :d] [:= :dc.dashboard_id :d.id]] - :group-by [:d.id] - :where [:in :d.id {:select [:dashboard_id] - :from [:most_popular]}]}]] - :select [:mp.dashboard_id - :mp.dashboard_name - :mp.views - :rt.avg_running_time] - :from [[:most_popular :mp]] - :left-join [[:dash_avg_running_time :rt] [:= :mp.dashboard_id :rt.dashboard_id]] - :order-by [[:mp.views :desc]] - :limit 10})}) - + {:with [[:most_popular {:select [[:d.id :dashboard_id] + [:d.name :dashboard_name] + [:%count.* :views]] + :from [[:view_log :vl]] + :left-join [[:report_dashboard :d] [:= :vl.model_id :d.id]] + :where [:= :vl.model (hx/literal "dashboard")] + :group-by [:d.id] + :order-by [[:%count.* :desc]] + :limit 10}] + [:card_running_time {:select [:qe.card_id + [:%avg.qe.running_time :avg_running_time]] + :from [[:query_execution :qe]] + :where [:not= :qe.card_id nil] + :group-by [:qe.card_id]}] + [:dash_avg_running_time {:select [[:d.id :dashboard_id] + [:%avg.rt.avg_running_time :avg_running_time]] + :from [[:report_dashboardcard :dc]] + :left-join [[:card_running_time :rt] [:= :dc.card_id :rt.card_id] + [:report_dashboard :d] [:= :dc.dashboard_id :d.id]] + :group-by [:d.id] + :where [:in :d.id {:select [:dashboard_id] + :from [:most_popular]}]}]] + :select [:mp.dashboard_id + :mp.dashboard_name + :mp.views + :rt.avg_running_time] + :from [[:most_popular :mp]] + :left-join [[:dash_avg_running_time :rt] [:= :mp.dashboard_id :rt.dashboard_id]] + :order-by [[:mp.views :desc]] + :limit 10})}) -(defn ^:internal-query-fn ^:deprecated slowest - "Query that returns the 10 Dashboards that have the slowest average execution times, in descending order." - [] +;; DEPRECATED Query that returns the 10 Dashboards that have the slowest average execution times, in descending order. +(defmethod audit.i/internal-query ::slowest + [_] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Title, :remapped_from :dashboard_id}] [:avg_running_time {:display_name "Avg. Question Load Time (ms)", :base_type :type/Decimal}]] @@ -127,10 +112,9 @@ :order-by [[:avg_running_time :desc]] :limit 10})}) - -(defn ^:internal-query-fn ^:deprecated most-common-questions - "Query that returns the 10 Cards that appear most often in Dashboards, in descending order." - [] +;; DEPRECATED Query that returns the 10 Cards that appear most often in Dashboards, in descending order. +(defmethod audit.i/internal-query ::most-common-questions + [_] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] [:count {:display_name "Count", :base_type :type/Integer}]] @@ -144,10 +128,9 @@ :order-by [[:%count.* :desc]] :limit 10})}) - -(s/defn ^:internal-query-fn table - "Internal audit app query powering a table of different Dashboards with lots of extra info about them." - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; Internal audit app query powering a table of different Dashboards with lots of extra info about them. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] (dashboards/table query-string))) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/database_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/database_detail.clj new file mode 100644 index 000000000000..0e3cfe4cc529 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/database_detail.clj @@ -0,0 +1,37 @@ +(ns metabase-enterprise.audit-app.pages.database-detail + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase.util.schema :as su] + [ring.util.codec :as codec] + [schema.core :as s])) + +;; Query execution history for queries against this Database. +(s/defmethod audit.i/internal-query ::audit-log + [_ database-id :- su/IntGreaterThanZero] + {:metadata [[:started_at {:display_name "Viewed on", :base_type :type/DateTime}] + [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :query}] + [:query_hash {:display_name "Query Hash", :base_type :type/Text}] + [:query {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] + [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user}] + [:user {:display_name "Queried by", :base_type :type/Text, :remapped_from :user_id}] + [:schema {:display_name "Schema", :base_type :type/Text}] + [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table}] + [:table {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}]] + :results (common/reducible-query + {:select [:qe.started_at + [:card.id :card_id] + [:qe.hash :query_hash] + [(common/card-name-or-ad-hoc :card) :query] + [:u.id :user_id] + [(common/user-full-name :u) :user] + :t.schema + [:t.id :table_id] + [:t.name :table]] + :from [[:query_execution :qe]] + :where [:= :qe.database_id database-id] + :join [[:metabase_database :db] [:= :db.id :qe.database_id] + [:core_user :u] [:= :qe.executor_id :u.id]] + :left-join [[:report_card :card] [:= :qe.card_id :card.id] + [:metabase_table :t] [:= :card.table_id :t.id]] + :order-by [[:qe.started_at :desc]]}) + :xform (map #(update (vec %) 2 codec/base64-encode))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/databases.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/databases.clj similarity index 79% rename from enterprise/backend/src/metabase_enterprise/audit/pages/databases.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/databases.clj index e58632c0a184..daa137866bd0 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/databases.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/databases.clj @@ -1,6 +1,7 @@ -(ns metabase-enterprise.audit.pages.databases +(ns metabase-enterprise.audit-app.pages.databases (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.cron :as cron] [schema.core :as s])) @@ -15,9 +16,11 @@ ;; JOIN metabase_database db ON t.db_id = db.id ;; GROUP BY db.id ;; ORDER BY lower(db.name) ASC -(defn ^:internal-query-fn ^:deprecated total-query-executions-by-db - "Return Databases with the total number of queries ran against them and the average running time for all queries." - [] +;; +;; DEPRECATED Return Databases with the total number of queries ran against them and the average running time for all +;; queries. +(defmethod audit.i/internal-query ::total-query-executions-by-db + [_] {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] [:queries {:display_name "Queries", :base_type :type/Integer}] @@ -34,9 +37,9 @@ :group-by [:db.id] :order-by [[:%lower.db.name :asc]]})}) -(s/defn ^:internal-query-fn query-executions-by-time - "Query that returns count of query executions grouped by Database and a `datetime-unit`." - [datetime-unit :- common/DateTimeUnitStr] +;; Query that returns count of query executions grouped by Database and a `datetime-unit`. +(s/defmethod audit.i/internal-query ::query-executions-by-time + [_ datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database Name", :base_type :type/Name, :remapped_from :database_id}] @@ -63,16 +66,17 @@ [:%lower.db.name :asc] [:qx.database_id :asc]]})}) -(defn ^:deprecated ^:internal-query-fn query-executions-per-db-per-day - "Query that returns count of query executions grouped by Database and day." - [] - (query-executions-by-time "day")) +;; DEPRECATED Use `::query-executions-by-time` instead. Query that returns count of query executions grouped by +;; Database and day. +(defmethod audit.i/internal-query ::query-executions-per-db-per-day + [_] + (audit.i/internal-query ::query-executions-by-time "day")) - -(s/defn ^:internal-query-fn table - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; Table with information and statistics about all the data warehouse Databases in this Metabase instance. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] ;; TODO - Should we convert sync_schedule from a cron string into English? Not sure that's going to be feasible for ;; really complicated schedules {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :title}] @@ -80,7 +84,8 @@ [:added_on {:display_name "Added On", :base_type :type/DateTime}] [:sync_schedule {:display_name "Sync Schedule", :base_type :type/Text}] [:schemas {:display_name "Schemas", :base_type :type/Integer}] - [:tables {:display_name "Tables", :base_type :type/Integer}]] + [:tables {:display_name "Tables", :base_type :type/Integer}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Integer}]] :results (common/reducible-query (-> {:with [[:counts {:select [[:db_id :id] @@ -93,7 +98,8 @@ [:db.created_at :added_on] [:db.metadata_sync_schedule :sync_schedule] [:counts.schemas :schemas] - [:counts.tables :tables]] + [:counts.tables :tables] + [:db.cache_ttl :cache_ttl]] :from [[:metabase_database :db]] :left-join [:counts [:= :db.id :counts.id]] :order-by [[:%lower.db.name :asc] diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/downloads.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/downloads.clj similarity index 62% rename from enterprise/backend/src/metabase_enterprise/audit/pages/downloads.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/downloads.clj index 0d01a99e2504..2691d2a8c175 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/downloads.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/downloads.clj @@ -1,63 +1,55 @@ -(ns metabase-enterprise.audit.pages.downloads +(ns metabase-enterprise.audit-app.pages.downloads "Audit queries returning info about query downloads. Query downloads are any query executions whose results are returned as CSV/JSON/XLS." (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.db :as mdb] [metabase.driver.sql.query-processor :as sql.qp] - [metabase.util.honeysql-extensions :as hx] - [schema.core :as s])) + [metabase.util.honeysql-extensions :as hx])) -;;; ------------------------------------------------ per-day-by-size ------------------------------------------------- - -(s/defn ^:internal-query-fn per-day-by-size - "Pairs of count of rows downloaded and date downloaded for the 1000 largest (in terms of row count) queries over the - past 30 days. Intended to power scatter plot." - [] +;; Pairs of count of rows downloaded and date downloaded for the 1000 largest (in terms of row count) queries over the +;; past 30 days. Intended to power scatter plot. +(defmethod audit.i/internal-query ::per-day-by-size + [_] {:metadata [[:date {:display_name "Day", :base_type :type/DateTime}] [:rows {:display_name "Rows in Query", :base_type :type/Integer}] [:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user_name}] [:user_name {:display_name "User", :base_type :type/Text, :remapped_from :user_id}]] :results (common/reducible-query - {:select [[:qe.started_at :date] - [:qe.result_rows :rows] - [:qe.executor_id :user_id] - [(common/user-full-name :u) :user_name]] - :from [[:query_execution :qe]] - :left-join [[:core_user :u] [:= :qe.executor_id :u.id]] - :where [:and - [:> :qe.started_at (sql.qp/add-interval-honeysql-form (mdb/db-type) :%now -30 :day)] - (common/query-execution-is-download :qe)] - :order-by [[:qe.result_rows :desc]] - :limit 1000})}) - - -;;; ---------------------------------------------------- per-user ---------------------------------------------------- - -(s/defn ^:internal-query-fn per-user - "Total count of query downloads broken out by user, ordered by highest total, for the top 10 users." - [] + {:select [[:qe.started_at :date] + [:qe.result_rows :rows] + [:qe.executor_id :user_id] + [(common/user-full-name :u) :user_name]] + :from [[:query_execution :qe]] + :left-join [[:core_user :u] [:= :qe.executor_id :u.id]] + :where [:and + [:> :qe.started_at (sql.qp/add-interval-honeysql-form (mdb/db-type) :%now -30 :day)] + (common/query-execution-is-download :qe)] + :order-by [[:qe.result_rows :desc]] + :limit 1000})}) + +;; Total count of query downloads broken out by user, ordered by highest total, for the top 10 users. +(defmethod audit.i/internal-query ::per-user + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user_name}] [:user_name {:display_name "User", :base_type :type/Text, :remapped_from :user_id}] [:downloads {:display_name "Downloads", :base_type :type/Integer}]] :results (common/reducible-query - {:with [[:downloads_by_user - {:select [[:qe.executor_id :user_id] - [:%count.* :downloads]] - :from [[:query_execution :qe]] - :where (common/query-execution-is-download :qe) - :group-by [:qe.executor_id] - :order-by [[:%count.* :desc]] - :limit 10}]] - :select [[:d.user_id :user_id] - [(common/user-full-name :u) :user_name] - [:d.downloads :downloads]] - :from [[:downloads_by_user :d]] - :join [[:core_user :u] [:= :d.user_id :u.id]] - :order-by [[:d.downloads :desc]]})}) - - -;;; ---------------------------------------------------- by-size ----------------------------------------------------- + {:with [[:downloads_by_user + {:select [[:qe.executor_id :user_id] + [:%count.* :downloads]] + :from [[:query_execution :qe]] + :where (common/query-execution-is-download :qe) + :group-by [:qe.executor_id] + :order-by [[:%count.* :desc]] + :limit 10}]] + :select [[:d.user_id :user_id] + [(common/user-full-name :u) :user_name] + [:d.downloads :downloads]] + :from [[:downloads_by_user :d]] + :join [[:core_user :u] [:= :d.user_id :u.id]] + :order-by [[:d.downloads :desc]]})}) (def ^:private bucket-maxes "Add/remove numbers here to adjust buckets returned by the `by-size` query." @@ -111,31 +103,28 @@ [[:= :rows_bucket_max -1] (hx/literal (format "> %s" (format-number-add-commas (last bucket-maxes))))]))) -(s/defn ^:internal-query-fn by-size - "Query download count broken out by bucketed number of rows of query. E.g. 10 downloads of queries with 0-10 rows, 15 - downloads of queries with 11-100, etc. Intended to power bar chart." - [] +;; Query download count broken out by bucketed number of rows of query. E.g. 10 downloads of queries with 0-10 rows, +;; 15 downloads of queries with 11-100, etc. Intended to power bar chart. +(defmethod audit.i/internal-query ::by-size + [_] {:metadata [[:rows {:display_name "Rows Downloaded", :base_type :type/Text}] [:downloads {:display_name "Downloads", :base_type :type/Integer}]] :results (common/reducible-query - {:with [[:bucketed_downloads - {:select [[rows->bucket-case-expression :rows_bucket_max]] - :from [:query_execution] - :where [:and - (common/query-execution-is-download :query_execution) - [:not= :result_rows nil]]}]] - :select [[bucket->range-str-case-expression :rows] - [:%count.* :downloads]] - :from [:bucketed_downloads] - :group-by [:rows_bucket_max] - :order-by [[:rows_bucket_max :asc]]})}) - - -;;; ----------------------------------------------------- table ------------------------------------------------------ - -(s/defn ^:internal-query-fn table - "Table showing all query downloads ordered by most recent." - [] + {:with [[:bucketed_downloads + {:select [[rows->bucket-case-expression :rows_bucket_max]] + :from [:query_execution] + :where [:and + (common/query-execution-is-download :query_execution) + [:not= :result_rows nil]]}]] + :select [[bucket->range-str-case-expression :rows] + [:%count.* :downloads]] + :from [:bucketed_downloads] + :group-by [:rows_bucket_max] + :order-by [[:rows_bucket_max :asc]]})}) + +;; Table showing all query downloads ordered by most recent. +(defmethod audit.i/internal-query ::table + [_] {:metadata [[:downloaded_at {:display_name "Downloaded At", :base_type :type/DateTime}] [:rows_downloaded {:display_name "Rows Downloaded", :base_type :type/Integer}] [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/queries.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/queries.clj new file mode 100644 index 000000000000..dfb2e91dae58 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/queries.clj @@ -0,0 +1,213 @@ +(ns metabase-enterprise.audit-app.pages.queries + (:require [honeysql.core :as hsql] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.cards :as cards] + [metabase.db.connection :as mdb.connection] + [metabase.util.honeysql-extensions :as hx])) + +;; DEPRECATED Query that returns data for a two-series timeseries chart with number of queries ran and average query +;; running time broken out by day. +(defmethod audit.i/internal-query ::views-and-avg-execution-time-by-day + [_] + {:metadata [[:day {:display_name "Date", :base_type :type/Date}] + [:views {:display_name "Views", :base_type :type/Integer}] + [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] + :results (common/reducible-query + {:select [[(hx/cast :date :started_at) :day] + [:%count.* :views] + [:%avg.running_time :avg_running_time]] + :from [:query_execution] + :group-by [(hx/cast :date :started_at)] + :order-by [[(hx/cast :date :started_at) :asc]]})}) + +;; Query that returns the 10 most-popular Cards based on number of query executions, in descending order. +(defmethod audit.i/internal-query ::most-popular + [_] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] + [:executions {:display_name "Executions", :base_type :type/Integer}]] + :results (common/reducible-query + {:select [[:c.id :card_id] + [:c.name :card_name] + [:%count.* :executions]] + :from [[:query_execution :qe]] + :join [[:report_card :c] [:= :qe.card_id :c.id]] + :group-by [:c.id] + :order-by [[:executions :desc]] + :limit 10})}) + +;; DEPRECATED Query that returns the 10 slowest-running Cards based on average query execution time, in descending +;; order. +(defmethod audit.i/internal-query ::slowest + [_] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Card", :base_type :type/Title, :remapped_from :card_id}] + [:avg_running_time {:display_name "Avg. Running Time (ms)", :base_type :type/Decimal}]] + :results (common/reducible-query + {:select [[:c.id :card_id] + [:c.name :card_name] + [:%avg.running_time :avg_running_time]] + :from [[:query_execution :qe]] + :join [[:report_card :c] [:= :qe.card_id :c.id]] + :group-by [:c.id] + :order-by [[:avg_running_time :desc]] + :limit 10})}) + +;; List of all failing questions +(defmethod audit.i/internal-query ::bad-table + ([_] + (audit.i/internal-query ::bad-table nil nil nil nil nil)) + ([_ + error-filter + db-filter + collection-filter + sort-column + sort-direction] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer :remapped_to :card_name}] + [:card_name {:display_name "Question", :base_type :type/Text :remapped_from :card_id}] + [:error_substr {:display_name "Error", :base_type :type/Text :code true}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text :remapped_from :collection_id}] + [:database_id {:display_name "Database ID", :base_type :type/Integer :remapped_to :database_name}] + [:database_name {:display_name "Database", :base_type :type/Text :remapped_from :database_id}] + [:schema_name {:display_name "Schema", :base_type :type/Text}] + [:table_id {:display_name "Table ID", :base_type :type/Integer :remapped_to :table_name}] + [:table_name {:display_name "Table", :base_type :type/Text :remapped_from :table_id}] + [:last_run_at {:display_name "Last run at", :base_type :type/DateTime}] + [:total_runs {:display_name "Total runs", :base_type :type/Integer}] + ;; if it appears a billion times each in 2 dashboards, that's 2 billion appearances + [:num_dashboards {:display_name "Dashboards it's in", :base_type :type/Integer}] + [:user_id {:display_name "Created By ID", :base_type :type/Integer :remapped_to :user_name}] + [:user_name {:display_name "Created By", :base_type :type/Text :remapped_from :user_id}] + [:updated_at {:display_name "Updated At", :base_type :type/DateTime}]] + :results (common/reducible-query + (let [coll-name (hsql/call :coalesce :coll.name "Our Analytics") + error-substr (hsql/call :concat + (hsql/call :substring :latest_qe.error + (if (= (mdb.connection/db-type) :mysql) 1 0) + 60) + "...") + dash-count (hsql/call :coalesce :dash_card.count 0)] + (-> + {:with [cards/query-runs + cards/latest-qe + cards/dashboards-count] + :select [[:card.id :card_id] + [:card.name :card_name] + [error-substr :error_substr] + :collection_id + [coll-name :collection_name] + :card.database_id + [:db.name :database_name] + [:t.schema :schema_name] + :card.table_id + [:t.name :table_name] + [:latest_qe.started_at :last_run_at] + [:query_runs.count :total_runs] + [dash-count :num_dashboards] + [:card.creator_id :user_id] + [(common/user-full-name :u) :user_name] + [:card.updated_at :updated_at]] + :from [[:report_card :card]] + :left-join [[:collection :coll] [:= :card.collection_id :coll.id] + [:metabase_database :db] [:= :card.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id] + [:core_user :u] [:= :card.creator_id :u.id] + :latest_qe [:= :card.id :latest_qe.card_id] + :query_runs [:= :card.id :query_runs.card_id] + :dash_card [:= :card.id :dash_card.card_id]] + :where [:and + [:= :card.archived false] + [:<> :latest_qe.error nil]]} + (common/add-search-clause error-filter :latest_qe.error) + (common/add-search-clause db-filter :db.name) + (common/add-search-clause collection-filter coll-name) + (common/add-sort-clause + (or sort-column "card.name") + (or sort-direction "asc")))))})) + +;; A list of all questions. +;; +;; Three possible argument lists. All arguments are always nullable. +;; +;; - [] : +;; Dump them all, sort by name ascending +;; +;; - [question-filter] : +;; Dump all filtered by the question-filter string, sort by name ascending. +;; question-filter filters on the `name` column in `cards` table. +;; +;; - [question-filter, collection-filter, sort-column, sort-direction] : +;; Dump all filtered by both question-filter and collection-filter, +;; sort by the given column and sort direction. +;; question-filter filters on the `name` column in `cards` table. +;; collection-filter filters on the `name` column in `collections` table. +;; +;; Sort column is given over in keyword form to honeysql. Default `card.name` +;; +;; Sort direction can be `asc` or `desc`, ascending and descending respectively. Default `asc`. +;; +;; All inputs have to be strings because that's how the magic middleware +;; that turns these functions into clojure-backed 'datasets' works. +(defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil nil nil nil)) + + ([query-type question-filter] + (audit.i/internal-query query-type question-filter nil nil nil)) + + ([_ + question-filter + collection-filter + sort-column + sort-direction] + {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] + [:card_name {:display_name "Name", :base_type :type/Name, :remapped_from :card_id}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text, :remapped_from :collection_id}] + [:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] + [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] + [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] + [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] + [:user_id {:display_name "Created By ID", :base_type :type/Integer, :remapped_to :user_name}] + [:user_name {:display_name "Created By", :base_type :type/Text, :remapped_from :user_id}] + [:public_link {:display_name "Public Link", :base_type :type/URL}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] + [:avg_exec_time {:display_name "Average Runtime (ms)", :base_type :type/Integer}] + [:total_runtime {:display_name "Total Runtime (ms)", :base_type :type/Number}] + [:query_runs {:display_name "Query Runs", :base_type :type/Integer}]] + :results (common/reducible-query + (-> + {:with [cards/avg-exec-time-45 + cards/total-exec-time-45 + cards/query-runs-45] + :select [[:card.id :card_id] + [:card.name :card_name] + :collection_id + [:coll.name :collection_name] + :card.database_id + [:db.name :database_name] + :card.table_id + [:t.name :table_name] + [:card.creator_id :user_id] + [(common/user-full-name :u) :user_name] + [(common/card-public-url :card.public_uuid) :public_link] + :card.cache_ttl + [:avg_exec_time.avg_running_time_ms :avg_exec_time] + [:total_runtime.total_running_time_ms :total_runtime] + [:query_runs.count :query_runs]] + :from [[:report_card :card]] + :left-join [[:collection :coll] [:= :card.collection_id :coll.id] + [:metabase_database :db] [:= :card.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id] + [:core_user :u] [:= :card.creator_id :u.id] + :avg_exec_time [:= :card.id :avg_exec_time.card_id] + :total_runtime [:= :card.id :total_runtime.card_id] + :query_runs [:= :card.id :query_runs.card_id]] + :where [:= :card.archived false]} + (common/add-search-clause question-filter :card.name) + (common/add-search-clause collection-filter :coll.name) + (common/add-sort-clause + (or sort-column "card.name") + (or sort-direction "asc"))))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/query_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/query_detail.clj new file mode 100644 index 000000000000..18b57abb27bb --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/query_detail.clj @@ -0,0 +1,72 @@ +(ns metabase-enterprise.audit-app.pages.query-detail + "Queries to show details about a (presumably ad-hoc) query." + (:require [cheshire.core :as json] + [honeysql.core :as hsql] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.cards :as cards] + [metabase.util.schema :as su] + [ring.util.codec :as codec] + [schema.core :as s])) + +(defmethod audit.i/internal-query ::bad-card + [_ card-id] + {:metadata [[:card_id {:display_name "Question ID", :base_type :type/Integer :remapped_from :card_name}] + [:card_name {:display_name "Question", :base_type :type/Text :remapped_from :card_id}] + [:error_str {:display_name "Error", :base_type :type/Text :code true}] + [:collection_id {:display_name "Collection ID", :base_type :type/Integer :remapped_to :collection_name}] + [:collection_name {:display_name "Collection", :base_type :type/Text :remapped_from :collection_id}] + [:database_id {:display_name "Database ID", :base_type :type/Integer :remapped_to :database_name}] + [:database_name {:display_name "Database", :base_type :type/Text :remapped_from :database_id}] + [:schema_name {:display_name "Schema", :base_type :type/Text}] + [:table_id {:display_name "Table ID", :base_type :type/Integer :remapped_to :table_name}] + [:table_name {:display_name "Table", :base_type :type/Text :remapped_from :table_id}] + [:last_run_at {:display_name "Last run at", :base_type :type/DateTime}] + [:total_runs {:display_name "Total runs", :base_type :type/Integer}] + ;; Denormalize by string_agg in order to avoid having to deal with complicated left join + [:dash_name_str {:display_name "Dashboards it's in", :base_type :type/Text}] + [:user_id {:display_name "Created By ID", :base_type :type/Integer :remapped_to :user_name}] + [:user_name {:display_name "Created By", :base_type :type/Text :remapped_from :user_id}] + [:updated_at {:display_name "Updated At", :base_type :type/DateTime}]] + :results (common/reducible-query + {:with [cards/query-runs + cards/latest-qe + cards/dashboards-ids] + :select [[:card.id :card_id] + [:card.name :card_name] + [:latest_qe.error :error_str] + :collection_id + [(hsql/call :coalesce :coll.name "Our Analytics") :collection_name] + :card.database_id + [:db.name :database_name] + [:t.schema :schema_name] + :card.table_id + [:t.name :table_name] + [:latest_qe.started_at :last_run_at] + [:query_runs.count :total_runs] + [:dash_card.name_str :dash_name_str] + [:card.creator_id :user_id] + [(common/user-full-name :u) :user_name] + [:card.updated_at :updated_at]] + :from [[:report_card :card]] + :left-join [[:collection :coll] [:= :card.collection_id :coll.id] + [:metabase_database :db] [:= :card.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id] + [:core_user :u] [:= :card.creator_id :u.id] + :latest_qe [:= :card.id :latest_qe.card_id] + :query_runs [:= :card.id :query_runs.card_id] + :dash_card [:= :card.id :dash_card.card_id]] + :where [:= :card.id card-id] })}) + +;; Details about a specific query (currently just average execution time). +(s/defmethod audit.i/internal-query ::details + [_ query-hash :- su/NonBlankString] + {:metadata [[:query {:display_name "Query", :base_type :type/Dictionary}] + [:average_execution_time {:display_name "Avg. Exec. Time (ms)", :base_type :type/Number}]] + :results (common/reducible-query + {:select [:query + :average_execution_time] + :from [:query] + :where [:= :query_hash (codec/base64-decode query-hash)] + :limit 1}) + :xform (map #(update (vec %) 0 json/parse-string))}) diff --git a/enterprise/backend/src/metabase_enterprise/audit_app/pages/question_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/question_detail.clj new file mode 100644 index 000000000000..e9561abdde5a --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/question_detail.clj @@ -0,0 +1,33 @@ +(ns metabase-enterprise.audit-app.pages.question-detail + "Detail page for a single Card (Question)." + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.card-and-dashboard-detail :as card-and-dash-detail] + [metabase.models.card :refer [Card]] + [metabase.util.schema :as su] + [schema.core :as s])) + +;; Get views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`. +(s/defmethod audit.i/internal-query ::views-by-time + [_ card-id :- su/IntGreaterThanZero datetime-unit :- common/DateTimeUnitStr] + (card-and-dash-detail/views-by-time "card" card-id datetime-unit)) + +;; Get cached views of a Card broken out by a time `unit`, e.g. `day` or `day-of-week`. +(s/defmethod audit.i/internal-query ::cached-views-by-time + [_ card-id :- su/IntGreaterThanZero, datetime-unit :- common/DateTimeUnitStr] + (card-and-dash-detail/cached-views-by-time card-id datetime-unit)) + +;; Get the revision history for a Card. +(s/defmethod audit.i/internal-query ::revision-history + [_ card-id :- su/IntGreaterThanZero] + (card-and-dash-detail/revision-history Card card-id)) + +;; Get a view log for a Card. +(s/defmethod audit.i/internal-query ::audit-log + [_ card-id :- su/IntGreaterThanZero] + (card-and-dash-detail/audit-log "card" card-id)) + +;; Average execution time broken out by period +(s/defmethod audit.i/internal-query ::avg-execution-time-by-time + [_ card-id :- su/IntGreaterThanZero datetime-unit :- common/DateTimeUnitStr] + (card-and-dash-detail/avg-execution-time-by-time card-id datetime-unit)) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/schemas.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/schemas.clj similarity index 67% rename from enterprise/backend/src/metabase_enterprise/audit/pages/schemas.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/schemas.clj index 276b06964a38..1704e6df11c2 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/schemas.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/schemas.clj @@ -1,5 +1,6 @@ -(ns metabase-enterprise.audit.pages.schemas - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.schemas + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [schema.core :as s])) @@ -22,9 +23,10 @@ ;; GROUP BY db_name, db_schema ;; ORDER BY count(*) DESC ;; LIMIT 10 -(defn ^:internal-query-fn ^:deprecated most-queried - "Query that returns the top 10 most-queried schemas, in descending order." - [] +;; +;; DEPRECATED Query that returns the top 10 most-queried schemas, in descending order. +(defmethod audit.i/internal-query ::most-queried + [_] {:metadata [[:schema {:display_name "Schema", :base_type :type/Title}] [:executions {:display_name "Executions", :base_type :type/Integer}]] :results (common/reducible-query @@ -64,9 +66,10 @@ ;; GROUP BY db_name, db_schema ;; ORDER BY avg_running_time DESC ;; LIMIT 10 -(defn ^:internal-query-fn ^:deprecated slowest-schemas - "Query that returns the top 10 schemas with the slowest average query execution time in descending order." - [] +;; +;; DEPRECATED Query that returns the top 10 schemas with the slowest average query execution time in descending order. +(defmethod audit.i/internal-query ::slowest-schemas + [_] {:metadata [[:schema {:display_name "Schema", :base_type :type/Title}] [:avg_running_time {:display_name "Average Running Time (ms)", :base_type :type/Decimal}]] :results (common/reducible-query @@ -109,13 +112,15 @@ ;; SELECT s.database_name AS "database", s."schema", s.tables, c.saved_count AS saved_queries ;; FROM schemas ;; LEFT JOIN cards c -;; ON s.database_id = c.database_id AND s."schema" = c."schema" -(s/defn ^:internal-query-fn ^:deprecated table - "Query that returns a data for a table full of fascinating information about the different schemas in use in our - application." - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; ON s.database_id = c.database_id +;; AND s."schema" = c."schema" +;; +;; DEPRECATED Query that returns a data for a table full of fascinating information about the different schemas in use +;; in our application. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database}] [:database {:display_name "Database", :base_type :type/Title, :remapped_from :database_id}] [:schema_id {:display_name "Schema ID", :base_type :type/Text, :remapped_to :schema}] @@ -123,30 +128,30 @@ [:tables {:display_name "Tables", :base_type :type/Integer}] [:saved_queries {:display_name "Saved Queries", :base_type :type/Integer}]] :results (common/reducible-query - (-> - {:with [[:cards {:select [[:t.db_id :database_id] - :t.schema - [:%count.* :saved_count]] - :from [[:report_card :c]] - :left-join [[:metabase_table :t] [:= :c.table_id :t.id]] - :where [:not= :c.table_id nil] - :group-by [:t.db_id :t.schema]}] - [:schemas {:select [[:db.id :database_id] - [:db.name :database_name] - :t.schema - [:%count.* :tables]] - :from [[:metabase_table :t]] - :left-join [[:metabase_database :db] [:= :t.db_id :db.id]] - :group-by [:db.id :t.schema] - :order-by [[:db.id :asc] [:t.schema :asc]]}]] - :select [:s.database_id - [:s.database_name :database] - [(hx/concat :s.database_id (hx/literal ".") :s.schema) :schema_id] - :s.schema - :s.tables - [:c.saved_count :saved_queries]] - :from [[:schemas :s]] - :left-join [[:cards :c] [:and - [:= :s.database_id :c.database_id] - [:= :s.schema :c.schema]]]} - (common/add-search-clause query-string :s.schema)))})) + (-> + {:with [[:cards {:select [[:t.db_id :database_id] + :t.schema + [:%count.* :saved_count]] + :from [[:report_card :c]] + :left-join [[:metabase_table :t] [:= :c.table_id :t.id]] + :where [:not= :c.table_id nil] + :group-by [:t.db_id :t.schema]}] + [:schemas {:select [[:db.id :database_id] + [:db.name :database_name] + :t.schema + [:%count.* :tables]] + :from [[:metabase_table :t]] + :left-join [[:metabase_database :db] [:= :t.db_id :db.id]] + :group-by [:db.id :t.schema] + :order-by [[:db.id :asc] [:t.schema :asc]]}]] + :select [:s.database_id + [:s.database_name :database] + [(hx/concat :s.database_id (hx/literal ".") :s.schema) :schema_id] + :s.schema + :s.tables + [:c.saved_count :saved_queries]] + :from [[:schemas :s]] + :left-join [[:cards :c] [:and + [:= :s.database_id :c.database_id] + [:= :s.schema :c.schema]]]} + (common/add-search-clause query-string :s.schema)))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/table_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/table_detail.clj similarity index 82% rename from enterprise/backend/src/metabase_enterprise/audit/pages/table_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/table_detail.clj index 2309d9eb4ae2..4dd12773928e 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/table_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/table_detail.clj @@ -1,11 +1,13 @@ -(ns metabase-enterprise.audit.pages.table-detail - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.table-detail + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.schema :as su] [ring.util.codec :as codec] [schema.core :as s])) -(s/defn ^:internal-query-fn audit-log - [table-id :- su/IntGreaterThanZero] +;; View log for a specific Table. +(s/defmethod audit.i/internal-query ::audit-log + [_ table-id :- su/IntGreaterThanZero] {:metadata [[:started_at {:display_name "Viewed on", :base_type :type/DateTime}] [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :query}] [:query {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/tables.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/tables.clj similarity index 67% rename from enterprise/backend/src/metabase_enterprise/audit/pages/tables.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/tables.clj index e852de402976..99c0b8f593d1 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/tables.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/tables.clj @@ -1,5 +1,6 @@ -(ns metabase-enterprise.audit.pages.tables - (:require [metabase-enterprise.audit.pages.common :as common] +(ns metabase-enterprise.audit-app.pages.tables + (:require [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [schema.core :as s])) @@ -39,23 +40,21 @@ [:metabase_database :db] [:= :t.db_id :db.id]] :order-by [[:executions asc-or-desc]]})}) -(defn ^:internal-query-fn most-queried - "Query that returns the top-10 most-queried Tables, in descending order." - [] +;; Query that returns the top-10 most-queried Tables, in descending order. +(defmethod audit.i/internal-query ::most-queried + [_] (query-counts :desc)) -(defn ^:internal-query-fn least-queried - "Query that returns the top-10 least-queried Tables (with at least one query execution), in ascending order." - [] +;; Query that returns the top-10 least-queried Tables (with at least one query execution), in ascending order. +(defmethod audit.i/internal-query ::least-queried + [_] (query-counts :asc)) - - -(s/defn ^:internal-query-fn table - "A table of Tables." - ([] - (table nil)) - ([query-string :- (s/maybe s/Str)] +;; A table of Tables. +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) + ([_ query-string :- (s/maybe s/Str)] {:metadata [[:database_id {:display_name "Database ID", :base_type :type/Integer, :remapped_to :database_name}] [:database_name {:display_name "Database", :base_type :type/Text, :remapped_from :database_id}] [:schema_id {:display_name "Schema ID", :base_type :type/Text, :remapped_to :schema_name}] @@ -64,18 +63,18 @@ [:table_name {:display_name "Table Name in DB", :base_type :type/Name, :remapped_from :table_id}] [:table_display_name {:display_name "Table Display Name", :base_type :type/Text}]] :results (common/reducible-query - (-> - {:select [[:db.id :database_id] - [:db.name :database_name] - [(hx/concat :db.id (hx/literal ".") :t.schema) :schema_id] - [:t.schema :table_schema] - [:t.id :table_id] - [:t.name :table_name] - [:t.display_name :table_display_name]] - :from [[:metabase_table :t]] - :join [[:metabase_database :db] [:= :t.db_id :db.id]] - :order-by [[:%lower.db.name :asc] - [:%lower.t.schema :asc] - [:%lower.t.name :asc]] - :where [:= :t.active true]} - (common/add-search-clause query-string :db.name :t.schema :t.name :t.display_name)))})) + (-> + {:select [[:db.id :database_id] + [:db.name :database_name] + [(hx/concat :db.id (hx/literal ".") :t.schema) :schema_id] + [:t.schema :table_schema] + [:t.id :table_id] + [:t.name :table_name] + [:t.display_name :table_display_name]] + :from [[:metabase_table :t]] + :join [[:metabase_database :db] [:= :t.db_id :db.id]] + :order-by [[:%lower.db.name :asc] + [:%lower.t.schema :asc] + [:%lower.t.name :asc]] + :where [:= :t.active true]} + (common/add-search-clause query-string :db.name :t.schema :t.name :t.display_name)))})) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/user_detail.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/user_detail.clj similarity index 79% rename from enterprise/backend/src/metabase_enterprise/audit/pages/user_detail.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/user_detail.clj index 652e0a915d3a..1fedecc936b1 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/user_detail.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/user_detail.clj @@ -1,19 +1,21 @@ -(ns metabase-enterprise.audit.pages.user-detail +(ns metabase-enterprise.audit-app.pages.user-detail (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] - [metabase-enterprise.audit.pages.common.cards :as cards] - [metabase-enterprise.audit.pages.common.dashboards :as dashboards] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] + [metabase-enterprise.audit-app.pages.common.cards :as cards] + [metabase-enterprise.audit-app.pages.common.dashboards :as dashboards] [metabase.util.honeysql-extensions :as hx] [metabase.util.schema :as su] [metabase.util.urls :as urls] [ring.util.codec :as codec] [schema.core :as s])) -(s/defn ^:internal-query-fn table - "Query that probides a single row of information about a given User, similar to the `users/table` query but restricted - to a single result. - (TODO - in the designs, this is pivoted; should we do that here in Clojure-land?)" - [user-id :- su/IntGreaterThanZero] +;; Query that probides a single row of information about a given User, similar to the `users/table` query but +;; restricted to a single result. +;; +;; (TODO - in the designs, this is pivoted; should we do that here in Clojure-land?) +(s/defmethod audit.i/internal-query ::table + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:name {:display_name "Name", :base_type :type/Name}] [:role {:display_name "Role", :base_type :type/Text}] [:groups {:display_name "Groups", :base_type :type/Text}] @@ -42,18 +44,18 @@ :where [:= :creator_id user-id]}] [:users {:select [[(common/user-full-name :u) :name] [(hsql/call :case - [:= :u.is_superuser true] - (hx/literal "Admin") - :else - (hx/literal "User")) + [:= :u.is_superuser true] + (hx/literal "Admin") + :else + (hx/literal "User")) :role] :id :date_joined [(hsql/call :case - [:= nil :u.sso_source] - (hx/literal "Email") - :else - :u.sso_source) + [:= nil :u.sso_source] + (hx/literal "Email") + :else + :u.sso_source) :signup_method] :last_name] :from [[:core_user :u]] @@ -74,9 +76,9 @@ :dashboards_saved :pulses_saved]})}) -(s/defn ^:internal-query-fn most-viewed-dashboards - "Return the 10 most-viewed Dashboards for a given User, in descending order." - [user-id :- su/IntGreaterThanZero] +;; Return the 10 most-viewed Dashboards for a given User, in descending order. +(s/defmethod audit.i/internal-query ::most-viewed-dashboards + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Name, :remapped_from :dashboard_id}] [:count {:display_name "Views", :base_type :type/Integer}]] @@ -93,9 +95,9 @@ :order-by [[:%count.* :desc]] :limit 10})}) -(s/defn ^:internal-query-fn most-viewed-questions - "Return the 10 most-viewed Questions for a given User, in descending order." - [user-id :- su/IntGreaterThanZero] +;; Return the 10 most-viewed Questions for a given User, in descending order. +(s/defmethod audit.i/internal-query ::most-viewed-questions + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Query", :base_type :type/Name, :remapped_from :card_id}] [:count {:display_name "Views", :base_type :type/Integer}]] @@ -112,8 +114,9 @@ :order-by [[:%count.* :desc]] :limit 10})}) -(s/defn ^:internal-query-fn query-views - [user-id :- su/IntGreaterThanZero] +;; Query views by a specific User. +(s/defmethod audit.i/internal-query ::query-views + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:viewed_on {:display_name "Viewed On", :base_type :type/DateTime}] [:card_id {:display_name "Card ID" :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] @@ -151,8 +154,9 @@ :order-by [[:qe.started_at :desc]]}) :xform (map #(update (vec %) 3 codec/base64-encode))}) -(s/defn ^:internal-query-fn dashboard-views - [user-id :- su/IntGreaterThanZero] +;; Dashboard views by a specific User. +(s/defmethod audit.i/internal-query ::dashboard-views + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:timestamp {:display_name "Viewed on", :base_type :type/DateTime}] [:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Text, :remapped_from :dashboard_id}] @@ -172,9 +176,12 @@ :left-join [[:collection :coll] [:= :dash.collection_id :coll.id]] :order-by [[:vl.timestamp :desc]]})}) -(s/defn ^:internal-query-fn object-views-by-time - "Timeseries chart that shows the number of Question or Dashboard views for a User, broken out by `datetime-unit`." - [user-id :- su/IntGreaterThanZero, model :- (s/enum "card" "dashboard"), datetime-unit :- common/DateTimeUnitStr] +;; Timeseries chart that shows the number of Question or Dashboard views for a User, broken out by `datetime-unit`. +(s/defmethod audit.i/internal-query ::object-views-by-time + [_ + user-id :- su/IntGreaterThanZero + model :- (s/enum "card" "dashboard") + datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:views {:display_name "Views", :base_type :type/Integer}]] :results (common/reducible-query @@ -187,14 +194,16 @@ :group-by [(common/grouped-datetime datetime-unit :timestamp)] :order-by [[(common/grouped-datetime datetime-unit :timestamp) :asc]]})}) -(s/defn ^:internal-query-fn created-dashboards - ([user-id] - (created-dashboards user-id nil)) - ([user-id :- su/IntGreaterThanZero, query-string :- (s/maybe s/Str)] +;; Dashboards created by a specific User. +(s/defmethod audit.i/internal-query ::created-dashboards + ([query-type user-id] + (audit.i/internal-query query-type user-id nil)) + ([_ user-id :- su/IntGreaterThanZero query-string :- (s/maybe s/Str)] (dashboards/table query-string [:= :u.id user-id]))) -(s/defn ^:internal-query-fn created-questions - [user-id :- su/IntGreaterThanZero] +;; Questions created by a specific User. +(s/defmethod audit.i/internal-query ::created-questions + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Title", :base_type :type/Name, :remapped_from :card_id}] [:collection_id {:display_name "Collection ID", :base_type :type/Integer, :remapped_to :collection_name}] @@ -205,7 +214,7 @@ [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :table_name}] [:table_name {:display_name "Table", :base_type :type/Text, :remapped_from :table_id}] [:avg_running_time_ms {:display_name "Avg. exec. time (ms)", :base_type :type/Number}] - [:cache_ttl {:display_name "Cache TTL", :base_type :type/Number}] + [:cache_ttl {:display_name "Cache Duration", :base_type :type/Number}] [:public_link {:display_name "Public Link", :base_type :type/URL}] [:total_views {:display_name "Total Views", :base_type :type/Integer}]] :results (common/reducible-query @@ -236,10 +245,10 @@ :where [:= :card.creator_id user-id] :order-by [[:%lower.card.name :asc]]})}) -(s/defn ^:internal-query-fn downloads - "Table of query downloads (i.e., queries whose results are returned as CSV/JSON/XLS) done by this user, ordered by - most recent." - [user-id :- su/IntGreaterThanZero] +;; Table of query downloads (i.e., queries whose results are returned as CSV/JSON/XLS) done by this user, ordered by +;; most recent. +(s/defmethod audit.i/internal-query ::downloads + [_ user-id :- su/IntGreaterThanZero] {:metadata [[:downloaded_at {:display_name "Downloaded At", :base_type :type/DateTime}] [:rows_downloaded {:display_name "Rows Downloaded", :base_type :type/Integer}] [:card_id {:display_name "Card ID", :base_type :type/Integer, :remapped_to :card_name}] @@ -250,20 +259,20 @@ [:table_id {:display_name "Table ID", :base_type :type/Integer, :remapped_to :source_table}] [:source_table {:display_name "Source Table", :base_type :type/Text, :remapped_from :table_id}]] :results (common/reducible-query - {:select [[:qe.started_at :downloaded_at] - [:qe.result_rows :rows_downloaded] - [:card.id :card_id] - [(common/card-name-or-ad-hoc :card) :card_name] - [(common/native-or-gui :qe) :query_type] - [:db.id :database_id] - [:db.name :database] - [:t.id :table_id] - [:t.name :source_table]] - :from [[:query_execution :qe]] - :left-join [[:report_card :card] [:= :card.id :qe.card_id] - [:metabase_database :db] [:= :qe.database_id :db.id] - [:metabase_table :t] [:= :card.table_id :t.id]] - :where [:and - [:= :executor_id user-id] - (common/query-execution-is-download :qe)] - :order-by [[:qe.started_at :desc]]})}) + {:select [[:qe.started_at :downloaded_at] + [:qe.result_rows :rows_downloaded] + [:card.id :card_id] + [(common/card-name-or-ad-hoc :card) :card_name] + [(common/native-or-gui :qe) :query_type] + [:db.id :database_id] + [:db.name :database] + [:t.id :table_id] + [:t.name :source_table]] + :from [[:query_execution :qe]] + :left-join [[:report_card :card] [:= :card.id :qe.card_id] + [:metabase_database :db] [:= :qe.database_id :db.id] + [:metabase_table :t] [:= :card.table_id :t.id]] + :where [:and + [:= :executor_id user-id] + (common/query-execution-is-download :qe)] + :order-by [[:qe.started_at :desc]]})}) diff --git a/enterprise/backend/src/metabase_enterprise/audit/pages/users.clj b/enterprise/backend/src/metabase_enterprise/audit_app/pages/users.clj similarity index 74% rename from enterprise/backend/src/metabase_enterprise/audit/pages/users.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/pages/users.clj index c7b18a63741c..00e7b00c9d0a 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/pages/users.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/pages/users.clj @@ -1,14 +1,16 @@ -(ns metabase-enterprise.audit.pages.users +(ns metabase-enterprise.audit-app.pages.users (:require [honeysql.core :as hsql] - [metabase-enterprise.audit.pages.common :as common] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as common] [metabase.util.honeysql-extensions :as hx] [ring.util.codec :as codec] [schema.core :as s])) -(defn ^:internal-query-fn ^:deprecated active-users-and-queries-by-day - "Query that returns data for a two-series timeseries: the number of DAU (a User is considered active for purposes of - this query if they ran at least one query that day), and total number of queries ran. Broken out by day." - [] +;; DEPRECATED Query that returns data for a two-series timeseries: the number of DAU (a User is considered active for +;; purposes of this query if they ran at least one query that day), and total number of queries ran. Broken out by +;; day. +(defmethod audit.i/internal-query ::active-users-and-queries-by-day + [_] {:metadata [[:users {:display_name "Users", :base_type :type/Integer}] [:queries {:display_name "Queries", :base_type :type/Integer}] [:day {:display_name "Date", :base_type :type/Date}]] @@ -25,27 +27,26 @@ :group-by [:day] :order-by [[:day :asc]]})}) - -(s/defn ^:internal-query-fn active-and-new-by-time - "Two-series timeseries that returns number of active Users (Users who ran at least one query) and number of new Users, - broken out by `datetime-unit`." - [datetime-unit :- common/DateTimeUnitStr] +;; Two-series timeseries that returns number of active Users (Users who ran at least one query) and number of new +;; Users, broken out by `datetime-unit`. +(s/defmethod audit.i/internal-query ::active-and-new-by-time + [_ datetime-unit :- common/DateTimeUnitStr] {:metadata [[:date {:display_name "Date", :base_type (common/datetime-unit-str->base-type datetime-unit)}] [:active_users {:display_name "Active Users", :base_type :type/Integer}] [:new_users {:display_name "New Users", :base_type :type/Integer}]] ;; this is so nice and easy to implement in a single query with FULL OUTER JOINS but unfortunately only pg supports ;; them(!) :results (let [active (common/query - {:select [[(common/grouped-datetime datetime-unit :started_at) :date] - [:%distinct-count.executor_id :count]] - :from [:query_execution] - :group-by [(common/grouped-datetime datetime-unit :started_at)]}) + {:select [[(common/grouped-datetime datetime-unit :started_at) :date] + [:%distinct-count.executor_id :count]] + :from [:query_execution] + :group-by [(common/grouped-datetime datetime-unit :started_at)]}) date->active (zipmap (map :date active) (map :count active)) new (common/query - {:select [[(common/grouped-datetime datetime-unit :date_joined) :date] - [:%count.* :count]] - :from [:core_user] - :group-by [(common/grouped-datetime datetime-unit :date_joined)]}) + {:select [[(common/grouped-datetime datetime-unit :date_joined) :date] + [:%count.* :count]] + :from [:core_user] + :group-by [(common/grouped-datetime datetime-unit :date_joined)]}) date->new (zipmap (map :date new) (map :count new)) all-dates (sort (keep identity (distinct (concat (keys date->active) (keys date->new)))))] @@ -54,10 +55,9 @@ :active_users (date->active date 0) :new_users (date->new date 0)}))}) - -(defn ^:internal-query-fn most-active - "Query that returns the 10 most active Users (by number of query executions) in descending order." - [] +;; Query that returns the 10 most active Users (by number of query executions) in descending order. +(defmethod audit.i/internal-query ::most-active + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :name}] [:name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:count {:display_name "Query Executions", :base_type :type/Integer}]] @@ -79,73 +79,72 @@ [:%lower.u.first_name :asc]] :limit 10})}) - -(defn ^:internal-query-fn most-saves - "Query that returns the 10 Users with the most saved objects in descending order." - [] +;; Query that returns the 10 Users with the most saved objects in descending order. +(defmethod audit.i/internal-query ::most-saves + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :user_name}] [:user_name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:saves {:display_name "Saved Objects", :base_type :type/Integer}]] :results (common/reducible-query - {:with [[:card_saves {:select [:creator_id - [:%count.* :count]] - :from [:report_card] - :group-by [:creator_id]}] - [:dashboard_saves {:select [:creator_id + {:with [[:card_saves {:select [:creator_id [:%count.* :count]] - :from [:report_dashboard] + :from [:report_card] :group-by [:creator_id]}] - [:pulse_saves {:select [:creator_id - [:%count.* :count]] - :from [:pulse] - :group-by [:creator_id]}]] - :select [[:u.id :user_id] - [(common/user-full-name :u) :user_name] - [(hx/+ (common/zero-if-null :card_saves.count) - (common/zero-if-null :dashboard_saves.count) - (common/zero-if-null :pulse_saves.count)) - :saves]] - :from [[:core_user :u]] - :left-join [:card_saves [:= :u.id :card_saves.creator_id] - :dashboard_saves [:= :u.id :dashboard_saves.creator_id] - :pulse_saves [:= :u.id :pulse_saves.creator_id]] - :order-by [[:saves :desc] - [:u.last_name :asc] - [:u.first_name :asc]] - :limit 10})}) - + [:dashboard_saves {:select [:creator_id + [:%count.* :count]] + :from [:report_dashboard] + :group-by [:creator_id]}] + [:pulse_saves {:select [:creator_id + [:%count.* :count]] + :from [:pulse] + :group-by [:creator_id]}]] + :select [[:u.id :user_id] + [(common/user-full-name :u) :user_name] + [(hx/+ (common/zero-if-null :card_saves.count) + (common/zero-if-null :dashboard_saves.count) + (common/zero-if-null :pulse_saves.count)) + :saves]] + :from [[:core_user :u]] + :left-join [:card_saves [:= :u.id :card_saves.creator_id] + :dashboard_saves [:= :u.id :dashboard_saves.creator_id] + :pulse_saves [:= :u.id :pulse_saves.creator_id]] + :order-by [[:saves :desc] + [:u.last_name :asc] + [:u.first_name :asc]] + :limit 10})}) -(defn ^:internal-query-fn query-execution-time-per-user - "Query that returns the total time spent executing queries, broken out by User, for the top 10 Users." - [] +;; Query that returns the total time spent executing queries, broken out by User, for the top 10 Users. +(defmethod audit.i/internal-query ::query-execution-time-per-user + [_] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :name}] [:name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:execution_time_ms {:display_name "Total Execution Time (ms)", :base_type :type/Decimal}]] :results (common/reducible-query - {:with [[:exec_time {:select [[:%sum.running_time :execution_time_ms] - :qe.executor_id] - :from [[:query_execution :qe]] - :where [:not= nil :qe.executor_id] - :group-by [:qe.executor_id] - :order-by [[:%sum.running_time :desc]] - :limit 10}]] - :select [[:u.id :user_id] - [(common/user-full-name :u) :name] - [(hsql/call :case [:not= :exec_time.execution_time_ms nil] :exec_time.execution_time_ms - :else 0) - :execution_time_ms]] - :from [[:core_user :u]] - :left-join [:exec_time [:= :exec_time.executor_id :u.id]] - :order-by [[:execution_time_ms :desc] - [:%lower.u.last_name :asc] - [:%lower.u.first_name :asc]] - :limit 10})}) + {:with [[:exec_time {:select [[:%sum.running_time :execution_time_ms] + :qe.executor_id] + :from [[:query_execution :qe]] + :where [:not= nil :qe.executor_id] + :group-by [:qe.executor_id] + :order-by [[:%sum.running_time :desc]] + :limit 10}]] + :select [[:u.id :user_id] + [(common/user-full-name :u) :name] + [(hsql/call :case [:not= :exec_time.execution_time_ms nil] :exec_time.execution_time_ms + :else 0) + :execution_time_ms]] + :from [[:core_user :u]] + :left-join [:exec_time [:= :exec_time.executor_id :u.id]] + :order-by [[:execution_time_ms :desc] + [:%lower.u.last_name :asc] + [:%lower.u.first_name :asc]] + :limit 10})}) -(s/defn ^:internal-query-fn table - ([] - (table nil)) +;; A table of all the Users for this instance, and various statistics about them (see metadata below). +(s/defmethod audit.i/internal-query ::table + ([query-type] + (audit.i/internal-query query-type nil)) - ([query-string :- (s/maybe s/Str)] + ([_ query-string :- (s/maybe s/Str)] {:metadata [[:user_id {:display_name "User ID", :base_type :type/Integer, :remapped_to :name}] [:name {:display_name "Name", :base_type :type/Name, :remapped_from :user_id}] [:role {:display_name "Role", :base_type :type/Text}] @@ -221,11 +220,10 @@ [:%lower.u.first_name :asc]]} (common/add-search-clause query-string :u.first_name :u.last_name)))})) - -(defn ^:internal-query-fn query-views - "Return a log of all query executions, including information about the Card associated with the query and the - Collection it is in (both, if applicable) and Database/Table referenced by the query." - [] +;; Return a log of all query executions, including information about the Card associated with the query and the +;; Collection it is in (both, if applicable) and Database/Table referenced by the query. +(defmethod audit.i/internal-query ::query-views + [_] {:metadata [[:viewed_on {:display_name "Viewed On", :base_type :type/DateTime}] [:card_id {:display_name "Card ID" :base_type :type/Integer, :remapped_to :card_name}] [:card_name {:display_name "Query", :base_type :type/Text, :remapped_from :card_id}] @@ -267,9 +265,9 @@ :order-by [[:qe.started_at :desc]]}) :xform (map #(update (vec %) 3 codec/base64-encode))}) -(defn ^:internal-query-fn dashboard-views - "Return a log of when all Dashboard views, including the Collection the Dashboard belongs to." - [] +;; Return a log of when all Dashboard views, including the Collection the Dashboard belongs to. +(defmethod audit.i/internal-query ::dashboard-views + [_] {:metadata [[:timestamp {:display_name "Viewed on", :base_type :type/DateTime}] [:dashboard_id {:display_name "Dashboard ID", :base_type :type/Integer, :remapped_to :dashboard_name}] [:dashboard_name {:display_name "Dashboard", :base_type :type/Text, :remapped_from :dashboard_id}] diff --git a/enterprise/backend/src/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries.clj b/enterprise/backend/src/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries.clj similarity index 66% rename from enterprise/backend/src/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries.clj rename to enterprise/backend/src/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries.clj index 489e38f478cf..4c9c5b59d32e 100644 --- a/enterprise/backend/src/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries.clj +++ b/enterprise/backend/src/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries.clj @@ -1,17 +1,18 @@ -(ns metabase-enterprise.audit.query-processor.middleware.handle-audit-queries - "Middleware that handles special `internal` type queries. `internal` queries are implementeed directly by Clojure - functions, and do not neccesarily need to query a database to provide results; by default, they completely skip - the rest of the normal QP pipeline. `internal` queries should look like the following: +(ns metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries + "Middleware that handles special `internal` type queries. `internal` queries are implemented directly by various + implementations of the [[metabase-enterprise.audit-app.interface/internal-query]] multimethod, and do not necessarily + need to query a database to provide results; by default, they completely skip the rest of the normal QP pipeline. + `internal` queries as passed to the Query Processor should look like the following: {:type :internal - :fn \"metabase-enterprise.audit.pages.dashboards/table\" + :fn \"metabase-enterprise.audit-app.pages.dashboards/table\" :args []} ; optional vector of args to pass to the fn above - To run an `internal` query, you must have superuser permissions, and the function itself must be tagged as an - `:internal-query-fn`. This middleware will automatically resolve the function as appropriate, loading its namespace - if needed. + To run an `internal` query, you must have superuser permissions. This middleware will automatically resolve the + function as appropriate, loading its namespace if needed. - (defn ^:internal-query-fn table [] + (defmethod audit.i/internal-query ::table + [_] {:metadata ..., :results ...}) The function should return a map with two keys, `:metadata` and `:results`, in either the 'legacy' or 'reducible' @@ -19,7 +20,7 @@ LEGACY FORMAT: - * `:metadata` is a series of [col-name metadata-map] pairs. + * `:metadata` is a series of [col-name metadata-map] pairs. See [[metabase-enterprise.audit-app.interface/ResultsMetadata]] * `:results` is a series of maps. {:metadata [[:title {:display_name \"Title\", :base_type :type/Text}] @@ -37,22 +38,15 @@ :results (fn [context] ...) :xform ...}" (:require [clojure.data :as data] - [clojure.string :as str] + [metabase-enterprise.audit-app.interface :as audit.i] [metabase.api.common :as api] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.query-processor.context :as context] [metabase.query-processor.error-type :as error-type] [metabase.util.i18n :refer [tru]] [metabase.util.schema :as su] [schema.core :as s])) -(def ^:private ResultsMetadata - "Schema for the expected format for `:metadata` returned by an internal query function." - (su/non-empty - [[(s/one su/KeywordOrString "field name") - (s/one {:base_type su/FieldType, :display_name su/NonBlankString, s/Keyword s/Any} - "field metadata")]])) - (defn- check-results-and-metadata-keys-match "Primarily for dev and debugging purposes. We can probably take this out when shipping the finished product." [results metadata] @@ -74,7 +68,7 @@ (assoc v :name (name k)))) (s/defn ^:private format-results [{:keys [results metadata]} :- {:results [su/Map] - :metadata ResultsMetadata}] + :metadata audit.i/ResultsMetadata}] (check-results-and-metadata-keys-match results metadata) {:cols (metadata->cols metadata) :rows (for [row results] @@ -94,26 +88,6 @@ to implement paging for all audit app queries automatically." nil) -(def ^:private resolve-internal-query-fn-lock (Object.)) - -(defn- resolve-internal-query-fn - "Returns the varr for the internal query fn." - [qualified-fn-str] - (let [[ns-str] (str/split qualified-fn-str #"/")] - (or - ;; resolve if already available... - (locking resolve-internal-query-fn-lock - (resolve (symbol qualified-fn-str)) - ;; if not, load the namespace... - (require (symbol ns-str)) - ;; ...then try resolving again - (resolve (symbol qualified-fn-str))) - ;; failing that, throw an Exception - (throw - (Exception. - (str (tru "Unable to run internal query function: cannot resolve {0}" - qualified-fn-str))))))) - (defn- reduce-reducible-results [rff context {:keys [metadata results xform], :or {xform identity}}] (let [cols (metadata->cols metadata) reducible-rows (results context) @@ -140,18 +114,12 @@ (api/check-superuser) ;; Make sure audit app is enabled (currently the only use case for internal queries). We can figure out a way to ;; allow non-audit-app queries if and when we add some - (when-not (metastore/enable-audit-app?) + (when-not (premium-features/enable-audit-app?) (throw (ex-info (tru "Audit App queries are not enabled on this instance.") {:type error-type/invalid-query}))) - ;;now resolve the query - (let [fn-varr (resolve-internal-query-fn qualified-fn-str)] - ;; Make sure this is actually allowed to be a internal query fn & has the results metadata we'll need - (when-not (:internal-query-fn (meta fn-varr)) - (throw (Exception. (str (tru "Invalid internal query function: {0} is not marked as an ^:internal-query-fn" - qualified-fn-str))))) - (binding [*additional-query-params* (dissoc query :fn :args)] - (let [results (apply @fn-varr args)] - (reduce-results rff context results))))) + (binding [*additional-query-params* (dissoc query :fn :args)] + (let [resolved (apply audit.i/resolve-internal-query qualified-fn-str args)] + (reduce-results rff context resolved)))) (defn handle-internal-queries "Middleware that handles `internal` type queries." diff --git a/enterprise/backend/src/metabase_enterprise/content_management/api/review.clj b/enterprise/backend/src/metabase_enterprise/content_management/api/review.clj new file mode 100644 index 000000000000..3e83095e1108 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/content_management/api/review.clj @@ -0,0 +1,25 @@ +(ns metabase-enterprise.content-management.api.review + (:require [compojure.core :refer [POST]] + [metabase.api.common :as api] + [metabase.models.moderation-review :as moderation-review] + [metabase.moderation :as moderation] + [metabase.util.schema :as su] + [schema.core :as s])) + +(api/defendpoint POST "/" + "Create a new `ModerationReview`." + [:as {{:keys [text moderated_item_id moderated_item_type status]} :body}] + {text (s/maybe s/Str) + moderated_item_id su/IntGreaterThanZero + moderated_item_type moderation/moderated-item-types + status moderation-review/Statuses} + (api/check-superuser) + (let [review-data {:text text + :moderated_item_id moderated_item_id + :moderated_item_type moderated_item_type + :moderator_id api/*current-user-id* + :status status}] + (api/check-404 (moderation/moderated-item review-data)) + (moderation-review/create-review! review-data))) + +(api/define-routes) diff --git a/enterprise/backend/src/metabase_enterprise/content_management/api/routes.clj b/enterprise/backend/src/metabase_enterprise/content_management/api/routes.clj new file mode 100644 index 000000000000..24744bf74d51 --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/content_management/api/routes.clj @@ -0,0 +1,12 @@ +(ns metabase-enterprise.content-management.api.routes + (:require [compojure.core :as compojure :refer [context]] + [metabase-enterprise.api.routes.common :as ee.api.common] + [metabase-enterprise.content-management.api.review :as review] + [metabase.api.routes.common :refer [+auth]])) + +(defn- +require-content-management [handler] + (ee.api.common/+require-premium-feature :content-management handler)) + +(compojure/defroutes ^{:doc "API routes only available if we have a premium token with the `:content-management` feature."} + routes + (context "/moderation-review" [] (+require-content-management (+auth review/routes)))) diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj b/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj index 524cf82745ca..9c0c7eea62bd 100644 --- a/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj +++ b/enterprise/backend/src/metabase_enterprise/enhancements/ee_strategy_impl.clj @@ -65,7 +65,7 @@ ;; For `MyProtocol` methods: invoke `ee-impl` if EE enhancements are enabled, otherwise invoke `oss-impl` (def impl - (reify-ee-strategy-impl #'settings.metastore/enable-enhancements? ee-impl oss-impl + (reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? ee-impl oss-impl MyProtocol)) At the time of this writing, this only works with first-class Clojure Protocols (as opposed to plain Java diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj b/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj index ca1a4d995706..5b79cd6f45c3 100644 --- a/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj +++ b/enterprise/backend/src/metabase_enterprise/enhancements/integrations/ldap.clj @@ -6,7 +6,7 @@ [metabase.integrations.ldap.interface :as i] [metabase.models.setting :as setting :refer [defsetting]] [metabase.models.user :as user :refer [User]] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util :as u] [metabase.util.i18n :refer [deferred-tru trs]] [metabase.util.schema :as su] @@ -113,5 +113,5 @@ forwards method invocations to `impl`; if EE features *are not* enabled, forwards method invocations to the default OSS impl." ;; TODO -- should we require `:sso` token features for using the LDAP enhancements? - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? impl default-impl/impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? impl default-impl/impl LDAPIntegration)) diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj b/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj index 53a6ec4ed69b..ff2b077c73c0 100644 --- a/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj +++ b/enterprise/backend/src/metabase_enterprise/enhancements/models/native_query_snippet/permissions.clj @@ -4,7 +4,7 @@ [metabase.models.interface :as i] [metabase.models.native-query-snippet.permissions :as snippet.perms] [metabase.models.permissions :as perms] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util.schema :as su] [pretty.core :refer [PrettyPrintable]] [schema.core :as s] @@ -46,7 +46,7 @@ "EE implementation of NativeQuerySnippet permissions. Uses Collection permissions instead allowing anyone to view or edit all Snippets. (Only when a valid Enterprise Edition token is present. Otherwise, this forwards method invocations to the default impl)." - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? ee-impl* snippet.perms/default-impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? ee-impl* snippet.perms/default-impl snippet.perms/PermissionsImpl)) (snippet.perms/set-impl! ee-impl) diff --git a/enterprise/backend/src/metabase_enterprise/enhancements/models/permissions/block_permissions.clj b/enterprise/backend/src/metabase_enterprise/enhancements/models/permissions/block_permissions.clj new file mode 100644 index 000000000000..9d7c399c889f --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/enhancements/models/permissions/block_permissions.clj @@ -0,0 +1,35 @@ +(ns metabase-enterprise.enhancements.models.permissions.block-permissions + (:require [metabase.api.common :as api] + [metabase.models.permissions :as perms] + [metabase.public-settings.premium-features :as settings.premium-features] + [metabase.query-processor.error-type :as qp.error-type] + [metabase.util.i18n :refer [tru]])) + +(defn- current-user-has-block-permissions-for-database? + [database-or-id] + (contains? @api/*current-user-permissions-set* (perms/database-block-perms-path database-or-id))) + +(defn check-block-permissions + "Assert that block permissions are not in effect for Database for a query that's only allowed to run because of + Collection perms; throw an Exception if they are. Otherwise returns a keyword explaining why the check + succeeded (this is mostly for test/debug purposes). The query is still allowed to run if the current User has + appropriate data permissions from another Group. See the namespace documentation for [[metabase.models.collection]] + for more details. + + Note that this feature is Metabase© Enterprise Edition™ only and only enabled if we have a valid Enterprise Edition™ + token. [[metabase.query-processor.middleware.permissions/check-block-permissions]] invokes this function if it + exists." + [{database-id :database, :as query}] + (cond + (not (settings.premium-features/enable-enhancements?)) + ::enhancements-not-enabled + + (not (current-user-has-block-permissions-for-database? database-id)) + ::no-block-permissions-for-db + + :else + ;; TODO -- come up with a better error message. + (throw (ex-info (tru "Blocked: you are not allowed to run queries against Database {0}." database-id) + {:type qp.error-type/missing-required-permissions + :actual-permissions @api/*current-user-permissions-set* + :permissions-error? true})))) diff --git a/enterprise/backend/src/metabase_enterprise/pulse.clj b/enterprise/backend/src/metabase_enterprise/pulse.clj index 2352b06f9d1b..f364b531f5ac 100644 --- a/enterprise/backend/src/metabase_enterprise/pulse.clj +++ b/enterprise/backend/src/metabase_enterprise/pulse.clj @@ -1,6 +1,8 @@ (ns metabase-enterprise.pulse + "TODO -- this should be moved to `metabase-enterprise..pulse` once we figure out which feature this belongs + to." (:require [metabase-enterprise.enhancements.ee-strategy-impl :as ee-strategy-impl] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.pulse.interface :as i]) (:import metabase.pulse.interface.SubscriptionParameters)) @@ -20,5 +22,5 @@ (def ee-strategy-parameters-impl "Enterprise way of getting dashboard filter parameters" - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? parameters-impl i/default-parameters-impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? parameters-impl i/default-parameters-impl i/SubscriptionParameters)) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj b/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj index 882157a71597..b26f54d32cdf 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/api/gtap.clj @@ -3,7 +3,7 @@ (:require [compojure.core :refer [DELETE GET POST PUT]] [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] [metabase.api.common :as api] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.util :as u] [metabase.util.i18n :refer [tru]] [metabase.util.schema :as su] @@ -67,7 +67,7 @@ "Wrap the Ring handler to make sure sandboxes are enabled before allowing access to the API endpoints." [handler] (fn [request respond raise] - (if-not (metastore/enable-sandboxes?) + (if-not (premium-features/enable-sandboxes?) (raise (ex-info (str (tru "Error: sandboxing is not enabled for this instance.") " " (tru "Please check you have set a valid Enterprise token and try again.")) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj b/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj index 52e4f343801c..ca59d2997330 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/api/routes.clj @@ -1,23 +1,23 @@ (ns metabase-enterprise.sandbox.api.routes - "Multi-tenant API routes." + "API routes that are only enabled if we have a premium token with the `:sandboxes` feature." (:require [compojure.core :as compojure] + [metabase-enterprise.api.routes.common :as ee.api.common] [metabase-enterprise.sandbox.api.gtap :as gtap] [metabase-enterprise.sandbox.api.table :as table] [metabase-enterprise.sandbox.api.user :as user] - [metabase.server.middleware.auth :as middleware.auth])) - -;; this is copied from `metabase.api.routes` because if we require that above we will destroy startup times for `lein -;; ring server` -(def ^:private +auth - "Wrap `routes` so they may only be accessed with proper authentiaction credentials." - middleware.auth/enforce-authentication) + [metabase.api.routes.common :refer [+auth]])) (compojure/defroutes ^{:doc "Ring routes for mt API endpoints."} routes + ;; EE-only sandboxing routes live under `/mt` for historical reasons. `/mt` is for multi-tenant. + ;; + ;; TODO - We should change this to `/sandboxes` or something like that. (compojure/context - "/mt" - [] - - (compojure/routes - (compojure/context "/gtap" [] (+auth gtap/routes)) - (compojure/context "/user" [] (+auth user/routes)))) - (compojure/context "/table" [] (+auth table/routes))) + "/mt" [] + (ee.api.common/+require-premium-feature + :sandboxes + (compojure/routes + (compojure/context "/gtap" [] (+auth gtap/routes)) + (compojure/context "/user" [] (+auth user/routes))))) + ;; when sandboxing is enabled we *replace* GET /api/table/:id/query_metadata with a special EE version. If + ;; sandboxing is not enabled, this passes thru to the OSS implementation of the endpoint. + (compojure/context "/table" [] (ee.api.common/+when-premium-feature :sandboxes (+auth table/routes)))) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj b/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj index 3802cf4394cd..6561060ebdb7 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/api/util.clj @@ -15,4 +15,4 @@ ;; access they shouldn't have. If we don't have permissions, we can't determine whether they are segmented, so ;; throw. (throw (ex-info (str (tru "No permissions found for current user")) - {:status-code 403})))))) + {:status-code 403})))))) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj b/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj index 111fafcd2dd8..a8939352b11c 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/models/group_table_access_policy.clj @@ -1,7 +1,9 @@ (ns metabase-enterprise.sandbox.models.group-table-access-policy "Model definition for Group Table Access Policy, aka GTAP. A GTAP is useed to control access to a certain Table for a certain PermissionsGroup. Whenever a member of that group attempts to query the Table in question, a Saved Question - specified by the GTAP is instead used as the source of the query." + specified by the GTAP is instead used as the source of the query. + + See documentation in [[metabase.models.permissions]] for more information about the Metabase permissions system." (:require [clojure.tools.logging :as log] [medley.core :as m] [metabase.mbql.normalize :as normalize] @@ -20,6 +22,19 @@ (models/defmodel GroupTableAccessPolicy :group_table_access_policy) +;; This guard is to make sure this file doesn't get compiled twice when building the uberjar -- that will totally +;; screw things up because Toucan models use Potemkin `defrecord+` under the hood. +(when *compile-files* + (defonce previous-compilation-trace (atom nil)) + (when @previous-compilation-trace + (println "THIS FILE HAS ALREADY BEEN COMPILED!!!!!") + (println "This compilation trace:") + ((requiring-resolve 'clojure.pprint/pprint) (vec (.getStackTrace (Thread/currentThread)))) + (println "Previous compilation trace:") + ((requiring-resolve 'clojure.pprint/pprint) @previous-compilation-trace) + (throw (ex-info "THIS FILE HAS ALREADY BEEN COMPILED!!!!!" {}))) + (reset! previous-compilation-trace (vec (.getStackTrace (Thread/currentThread))))) + (defn- normalize-attribute-remapping-targets [attribute-remappings] (m/map-vals normalize/normalize diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj b/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj index 8ee2cffcae42..59c80750a311 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/models/params/field_values.clj @@ -6,7 +6,7 @@ [metabase.models.field :as field :refer [Field]] [metabase.models.field-values :as field-values :refer [FieldValues]] [metabase.models.params.field-values :as params.field-values] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util :as u] [pretty.core :as pretty] [toucan.db :as db] @@ -83,5 +83,5 @@ "Enterprise version of the fetch FieldValues for current User logic. Uses our EE strategy pattern adapter: if EE features *are* enabled, forwards method invocations to `impl`; if EE features *are not* enabled, forwards method invocations to the default OSS impl." - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-sandboxes? impl params.field-values/default-impl + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-sandboxes? impl params.field-values/default-impl params.field-values/FieldValuesForCurrentUser)) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj b/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj index 9aa41048fe41..a3a005b11d07 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/models/permissions/delete_sandboxes.clj @@ -4,7 +4,7 @@ [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] [metabase.models.permissions.delete-sandboxes :as delete-sandboxes] [metabase.models.table :refer [Table]] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.util :as u] [metabase.util.i18n :refer [tru]] [pretty.core :as pretty] @@ -90,19 +90,17 @@ (defn- delete-gtaps-for-group-database! [{:keys [group-id database-id], :as context} changes] (log/debugf "Deleting unneeded GTAPs for Group %d for Database %d. Graph changes: %s" - group-id database-id (pr-str changes)) - (cond - (= changes :none) - (do - (log/debugf "Group %d no longer has any perms for Database %d, deleting all GTAPs for this DB" group-id database-id) - (delete-gtaps-with-condition! group-id [:= :table.db_id database-id])) - - (= changes :all) + group-id database-id (pr-str changes)) + (if (#{:none :all :block} changes) (do - (log/debugf "Group %d now has full data perms for Database %d, deleting all GTAPs for this DB" group-id database-id) + (log/debugf "Group %d %s for Database %d, deleting all GTAPs for this DB" + group-id + (case changes + :none "no longer has any perms" + :all "now has full data perms" + :block "is now BLOCKED from all non-data-perms access") + database-id) (delete-gtaps-with-condition! group-id [:= :table.db_id database-id])) - - :else (doseq [schema-name (set (keys changes))] (delete-gtaps-for-group-schema! (assoc context :schema-name schema-name) @@ -134,7 +132,7 @@ (def ee-strategy-impl "EE impl for Sandbox (GTAP) deletion behavior. Don't use this directly." (ee-strategy-impl/reify-ee-strategy-impl - #'settings.metastore/enable-sandboxes? + #'settings.premium-features/enable-sandboxes? impl delete-sandboxes/oss-default-impl delete-sandboxes/DeleteSandboxes)) diff --git a/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj b/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj index d5635d779248..d93d35e38a5c 100644 --- a/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj +++ b/enterprise/backend/src/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions.clj @@ -1,4 +1,7 @@ (ns metabase-enterprise.sandbox.query-processor.middleware.row-level-restrictions + "Apply segmented a.k.a. sandboxing anti-permissions to the query, i.e. replace sandboxed Tables with the + appropriate [[metabase-enterprise.sandbox.models.group-table-access-policy]]s (GTAPs). See dox + for [[metabase.models.permissions]] for a high-level overview of the Metabase permissions system." (:require [clojure.core.memoize :as memoize] [clojure.tools.logging :as log] [metabase-enterprise.sandbox.models.group-table-access-policy :as gtap :refer [GroupTableAccessPolicy]] @@ -236,7 +239,7 @@ preprocess-source-query (source-query-form-ensure-metadata table-id card-id))) -(s/defn ^:private gtap->perms-set :- #{perms/ObjectPath} +(s/defn ^:private gtap->perms-set :- #{perms/Path} "Calculate the set of permissions needed to run the query associated with a GTAP; this set of permissions is excluded during the normal QP perms check. diff --git a/enterprise/backend/src/metabase_enterprise/search/scoring.clj b/enterprise/backend/src/metabase_enterprise/search/scoring.clj index 585726190de0..7e6e0cef865b 100644 --- a/enterprise/backend/src/metabase_enterprise/search/scoring.clj +++ b/enterprise/backend/src/metabase_enterprise/search/scoring.clj @@ -1,6 +1,7 @@ (ns metabase-enterprise.search.scoring + ;; TODO -- move to `metabase-enterprise..*` (:require [metabase-enterprise.enhancements.ee-strategy-impl :as ee-strategy-impl] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [metabase.search.scoring :as scoring])) (defn- official-collection-score @@ -10,6 +11,13 @@ 1 0)) +(defn- verified-score + "A scorer for verified items." + [{:keys [moderated_status]}] + (if (contains? #{"verified"} moderated_status) + 1 + 0)) + (def scoring-impl "Scoring implementation that adds score for items in official collections." (reify scoring/ResultScore @@ -17,10 +25,13 @@ (conj (scoring/score-result scoring/oss-score-impl result) {:weight 2 :score (official-collection-score result) - :name "official collection score"})))) + :name "official collection score"} + {:weight 2 + :score (verified-score result) + :name "verified"})))) (def ee-scoring "Enterprise scoring of results, falling back to the open source version if enterprise is not enabled." - (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? + (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? scoring-impl scoring/oss-score-impl scoring/ResultScore)) diff --git a/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj b/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj index c613e4db752b..cf9a946a969a 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/cmd.clj @@ -55,13 +55,14 @@ (load/load-settings path context) (load/load-dependencies path context)] reload-fns (filter fn? all-res)] - (if-not (empty? reload-fns) - (do (log/info (trs "Finished first pass of load; now performing second pass")) - (doseq [reload-fn reload-fns] - (reload-fn)))) + (when (seq reload-fns) + (log/info (trs "Finished first pass of load; now performing second pass")) + (doseq [reload-fn reload-fns] + (reload-fn))) (log/info (trs "END LOAD from {0} with context {1}" path context)))) (catch Throwable e - (log/error e (trs "ERROR LOAD from {0}: {1}" path (.getMessage e))))))) + (log/error e (trs "ERROR LOAD from {0}: {1}" path (.getMessage e))) + (throw e))))) (defn- select-entities-in-collections ([model collections] @@ -104,13 +105,15 @@ [:= :personal_owner_id (some-> users first u/the-id)]] state-filter]})] - (-> (db/select Collection - {:where [:and - (reduce (fn [acc coll] - (conj acc [:like :location (format "/%d/%%" (:id coll))])) - [:or] base-collections) - state-filter]}) - (into base-collections))))) + (if (empty? base-collections) + [] + (-> (db/select Collection + {:where [:and + (reduce (fn [acc coll] + (conj acc [:like :location (format "/%d/%%" (:id coll))])) + [:or] base-collections) + state-filter]}) + (into base-collections)))))) (defn dump diff --git a/enterprise/backend/src/metabase_enterprise/serialization/load.clj b/enterprise/backend/src/metabase_enterprise/serialization/load.clj index 1fa4d005cd4e..83546fa60edb 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/load.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/load.clj @@ -6,7 +6,7 @@ [clojure.tools.logging :as log] [medley.core :as m] [metabase-enterprise.serialization.names :as names :refer [fully-qualified-name->context]] - [metabase-enterprise.serialization.upsert :refer [maybe-fixup-card-template-ids! maybe-upsert-many!]] + [metabase-enterprise.serialization.upsert :refer [maybe-upsert-many!]] [metabase.config :as config] [metabase.mbql.normalize :as mbql.normalize] [metabase.mbql.util :as mbql.util] @@ -28,7 +28,7 @@ [metabase.models.segment :refer [Segment]] [metabase.models.setting :as setting] [metabase.models.table :refer [Table]] - [metabase.models.user :refer [User]] + [metabase.models.user :as user :refer [User]] [metabase.shared.models.visualization-settings :as mb.viz] [metabase.util :as u] [metabase.util.date-2 :as u.date] @@ -36,7 +36,8 @@ [toucan.db :as db] [yaml.core :as yaml] [yaml.reader :as y.reader]) - (:import java.time.temporal.Temporal)) + (:import java.time.temporal.Temporal + java.util.UUID)) (extend-type Temporal y.reader/YAMLReader (decode [data] @@ -135,8 +136,16 @@ necessary when dealing with the full MBQL query tree (which can have arbitrary nesting of maps and vectors)." [m] - (reduce (fn [acc ks] - (pull-unresolved-names-up acc (drop-last ks))) m (paths-to-key-in m ::unresolved-names))) + (let [paths (paths-to-key-in m ::unresolved-names)] + (if-not (empty? paths) + (reduce (fn [acc ks] + (let [ks* (drop-last ks)] + (if-not (empty? ks*) + (pull-unresolved-names-up acc ks*) + acc))) + m + paths) + m))) (defn- mbql-fully-qualified-names->ids* [entity] @@ -185,8 +194,8 @@ (unresolved-names->string entity nil)) ([entity insert-id] (str - (if-let [nm (:name entity)] (str "\"" nm "\"")) - (if insert-id (format " (inserted as ID %d) " insert-id)) + (when-let [nm (:name entity)] (str "\"" nm "\"")) + (when insert-id (format " (inserted as ID %d) " insert-id)) "missing:\n " (str/join "\n " @@ -338,17 +347,17 @@ (-> (if-let [link-type (::mb.viz/link-type click-behavior)] (case link-type ::mb.viz/card (let [card-id (::mb.viz/link-target-id click-behavior)] - (if (string? card-id) + (when (string? card-id) (update-existing-in-capture-missing click-behavior [::mb.viz/link-target-id] (comp :card fully-qualified-name->context)))) ::mb.viz/dashboard (let [dashboard-id (::mb.viz/link-target-id click-behavior)] - (if (string? dashboard-id) - (update-existing-in-capture-missing - click-behavior - [::mb.viz/link-target-id] - (comp :dashboard fully-qualified-name->context)))) + (when (string? dashboard-id) + (update-existing-in-capture-missing + click-behavior + [::mb.viz/link-target-id] + (comp :dashboard fully-qualified-name->context)))) click-behavior) click-behavior) (m/update-existing ::mb.viz/parameter-mapping resolve-click-behavior-parameter-mapping))) @@ -376,18 +385,46 @@ (pull-unresolved-names-up vs-norm [::mb.viz/click-behavior] resolved-cb)) vs-norm)) -(defn- resolve-column-settings [vs-norm] +(defn- resolve-column-settings + "Resolve the entries in a :column_settings map (which is under a :visualization_settings map). These map entries + may contain fully qualified field names, or even other cards. In case of an unresolved name (i.e. a card that hasn't + yet been loaded), we will track it under ::unresolved-names and revisit on the next pass." + [vs-norm] (if-let [col-settings (::mb.viz/column-settings vs-norm)] (let [resolved-cs (reduce-kv accumulate-converted-column-settings {} col-settings)] (pull-unresolved-names-up vs-norm [::mb.viz/column-settings] resolved-cs)) vs-norm)) +(defn- resolve-table-columns + "Resolve the :table.columns key from a :visualization_settings map, which may contain fully qualified field names. + Such fully qualified names will be converted to the numeric field ID before being filled into the loaded card. Only + other field names (not cards, or other collection based entity types) should be referenced here, so there is no need + to detect or track ::unresolved-names." + [vs-norm] + (if (::mb.viz/table-columns vs-norm) + (letfn [(resolve-table-column-field-ref [[f-type f-str f-md]] + (if (names/fully-qualified-field-name? f-str) + [f-type ((comp :field fully-qualified-name->context) f-str) f-md] + [f-type f-str f-md])) + (resolve-field-id [{:keys [::mb.viz/table-column-field-ref] :as tbl-col}] + (update tbl-col ::mb.viz/table-column-field-ref resolve-table-column-field-ref))] + (update vs-norm ::mb.viz/table-columns (fn [tbl-cols] + (mapv resolve-field-id tbl-cols)))) + vs-norm)) + (defn- resolve-visualization-settings + "Resolve all references from a :visualization_settings map, the various submaps of which may contain: + - fully qualified field names + - fully qualified card or dashboard names + + Any unresolved entities from this resolution process will be tracked via ::unresolved-named so that the card or + dashboard card holding these visualization settings can be revisited in a future pass." [entity] (if-let [viz-settings (:visualization_settings entity)] (let [resolved-vs (-> (mb.viz/db->norm viz-settings) resolve-top-level-click-behavior resolve-column-settings + resolve-table-columns mb.viz/norm->db)] (pull-unresolved-names-up entity [:visualization_settings] resolved-vs)) entity)) @@ -397,11 +434,11 @@ {:added "0.40.0"} [context dashboards] (let [dashboard-ids (maybe-upsert-many! context Dashboard - (for [dashboard dashboards] - (-> dashboard - (dissoc :dashboard_cards) - (assoc :collection_id (:collection context) - :creator_id @default-user)))) + (for [dashboard dashboards] + (-> dashboard + (dissoc :dashboard_cards) + (assoc :collection_id (:collection context) + :creator_id @default-user)))) dashboard-cards (map :dashboard_cards dashboards) ;; a function that prepares a dash card for insertion, while also validating to ensure the underlying ;; card_id could be resolved from the fully qualified name @@ -418,12 +455,12 @@ (let [add-keys [:dashboard_cards card-idx :visualization_settings] fixed-names (m/map-vals #(concat add-keys %) unresolved) with-fixed-names (assoc with-viz ::unresolved-names fixed-names)] - (-> acc - (update ::revisit (fn [revisit-map] - (update revisit-map dash-idx #(cons with-fixed-names %)))) - ;; index means something different here than in the Card case (it's actually the index - ;; of the dashboard) - (update ::revisit-index #(conj % dash-idx)))) + (-> acc + (update ::revisit (fn [revisit-map] + (update revisit-map dash-idx #(cons with-fixed-names %)))) + ;; index means something different here than in the Card case (it's actually the index + ;; of the dashboard) + (update ::revisit-index #(conj % dash-idx)))) (update acc ::process #(conj % with-viz))))) prep-init-acc {::process [] ::revisit-index #{} ::revisit {}} filtered-cards (reduce-kv @@ -439,14 +476,14 @@ dashcard-ids (maybe-upsert-many! context DashboardCard (map #(dissoc % :series) proceed-cards)) series-pairs (map vector (map :series proceed-cards) dashcard-ids)] (maybe-upsert-many! context DashboardCardSeries - (for [[series dashboard-card-id] series-pairs - dashboard-card-series series - :when (and dashboard-card-series dashboard-card-id)] - (-> dashboard-card-series - (assoc :dashboardcard_id dashboard-card-id) - (update :card_id fully-qualified-name->card-id)))) + (for [[series dashboard-card-id] series-pairs + dashboard-card-series series + :when (and dashboard-card-series dashboard-card-id)] + (-> dashboard-card-series + (assoc :dashboardcard_id dashboard-card-id) + (update :card_id fully-qualified-name->card-id)))) (let [revisit-dashboards (map (partial nth dashboards) revisit-indexes)] - (if-not (empty? revisit-dashboards) + (when (seq revisit-dashboards) (let [revisit-map (::revisit filtered-cards) revisit-inf-fn (fn [[dash-idx dashcards]] (format @@ -462,7 +499,7 @@ "Retrying dashboards for collection %s: %s" (or (:collection context) "root") (str/join ", " (map :name revisit-dashboards))) - (load-dashboards context revisit-dashboards))))))) + (load-dashboards (assoc context :mode :update) revisit-dashboards))))))) (defmethod load "dashboards" [path context] @@ -496,7 +533,7 @@ channel channels :when pulse-id] (assoc channel :pulse_id pulse-id))) - (if-not (empty? revisit) + (when (seq revisit) (let [revisit-info-map (group-by ::pulse-name revisit)] (log/infof "Unresolved references for pulses in collection %s; will reload after first pass complete:%n%s%n" (or (:collection context) "root") @@ -509,7 +546,7 @@ (fn [] (log/infof "Reloading pulses from collection %d" (:collection context)) (let [pulse-indexes (map ::pulse-index revisit)] - (load-pulses (map (partial nth pulses) pulse-indexes) context))))))) + (load-pulses (map (partial nth pulses) pulse-indexes) (assoc context :mode :update)))))))) (defmethod load "pulses" [path context] @@ -603,17 +640,9 @@ {::revisit [] ::revisit-index #{} ::process []} (vec resolved-cards)) dummy-insert-cards (not-empty (::revisit grouped-cards)) - process-cards (::process grouped-cards) - touched-card-ids (maybe-upsert-many! - context Card - process-cards)] - (maybe-fixup-card-template-ids! - (assoc context :mode :update) - Card - (for [card (slurp-many paths)] (resolve-card card (assoc context :mode :update))) - touched-card-ids) - - (if dummy-insert-cards + process-cards (::process grouped-cards)] + (maybe-upsert-many! context Card process-cards) + (when dummy-insert-cards (let [dummy-inserted-ids (maybe-upsert-many! context Card @@ -628,17 +657,46 @@ (fn [] (log/infof "Attempting to reload cards in collection %d" (:collection context)) (let [revisit-indexes (::revisit-index grouped-cards)] - (load-cards context paths (mapv (partial nth cards) revisit-indexes)))))))) + (load-cards (assoc context :mode :update) paths (mapv (partial nth cards) revisit-indexes)))))))) (defmethod load "cards" [path context] (binding [names/*suppress-log-name-lookup-exception* true] (load-cards context (list-dirs path) nil))) +(defn- pre-insert-user + "A function called on each User instance before it is inserted (via upsert)." + [user] + (log/infof "User with email %s is new to target DB; setting a random password" (:email user)) + (assoc user :password (str (UUID/randomUUID)))) + +;; leaving comment out for now (deliberately), because this will send a password reset email to newly inserted users +;; when enabled in a future release; see `defmethod load "users"` below +#_(defn- post-insert-user + "A function called on the ID of each `User` instance after it is inserted (via upsert)." + [user-id] + (when-let [{email :email, google-auth? :google_auth, is-active? :is_active} + (db/select-one [User :email :google_auth :is_active] :id user-id)] + (let [reset-token (user/set-password-reset-token! user-id) + site-url (public-settings/site-url) + password-reset-url (str site-url "/auth/reset_password/" reset-token) + ;; in a web server context, the server-name ultimately comes from ServletRequest/getServerName + ;; (i.e. the Java class, via Ring); this is the closest approximation in our batch context + server-name (.getHost (URL. site-url))] + (let [email-res (email/send-password-reset-email! email google-auth? server-name password-reset-url is-active?)] + (if (:error email-res) + (log/infof "Failed to send password reset email generated for user ID %d (%s): %s" + user-id + email + (:message email-res)) + (log/infof "Password reset email generated for user ID %d (%s)" user-id email))) + user-id))) + (defmethod load "users" [path context] ;; Currently we only serialize the new owner user, so it's fine to ignore mode setting - (maybe-upsert-many! context User + ;; add :post-insert-fn post-insert-user back to start sending password reset emails + (maybe-upsert-many! (assoc context :pre-insert-fn pre-insert-user) User (for [user (slurp-dir path)] (dissoc user :password)))) @@ -650,7 +708,7 @@ (defn- make-reload-fn [all-results] (let [all-fns (filter fn? all-results)] - (if-not (empty? all-fns) + (when (seq all-fns) (let [new-fns (doall all-fns)] (fn [] (make-reload-fn (for [reload-fn new-fns] diff --git a/enterprise/backend/src/metabase_enterprise/serialization/names.clj b/enterprise/backend/src/metabase_enterprise/serialization/names.clj index 0cb368e869cf..e4eaca955cc3 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/names.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/names.clj @@ -72,7 +72,7 @@ (str (->> segment :table_id (fully-qualified-name Table)) "/segments/" (safe-name segment))) (defn- local-collection-name [collection] - (let [ns-part (if-let [coll-ns (:namespace collection)] + (let [ns-part (when-let [coll-ns (:namespace collection)] (str ":" (if (keyword? coll-ns) (name coll-ns) coll-ns) "/"))] (str "/collections/" ns-part (safe-name collection)))) diff --git a/enterprise/backend/src/metabase_enterprise/serialization/serialize.clj b/enterprise/backend/src/metabase_enterprise/serialization/serialize.clj index d02916dec508..e500e2dbec14 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/serialize.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/serialize.clj @@ -208,7 +208,8 @@ [card] (-> card (m/update-existing :table_id (partial fully-qualified-name Table)) - (update :database_id (partial fully-qualified-name Database)))) + (update :database_id (partial fully-qualified-name Database)) + (m/update-existing :visualization_settings convert-viz-settings))) (defmethod serialize-one (type Pulse) [pulse] diff --git a/enterprise/backend/src/metabase_enterprise/serialization/upsert.clj b/enterprise/backend/src/metabase_enterprise/serialization/upsert.clj index 061c53a3f7b4..3e686877cf22 100644 --- a/enterprise/backend/src/metabase_enterprise/serialization/upsert.clj +++ b/enterprise/backend/src/metabase_enterprise/serialization/upsert.clj @@ -117,8 +117,19 @@ :insert))))))) (defn maybe-upsert-many! - "Batch upsert-or-skip" - [{:keys [mode on-error] :as context} model entities] + "Batch upsert many entities. + + Within the `context` map, the following keys are recognized: + `mode` indicates mode of operation for existing entities (`:upsert` or `:skip`), as per the `identity-condition` + `on-error` indicates what to do in case of upsert error (`:continue` or `:abort`) + `pre-insert-fn` (optional) is a function to call on each entity to be inserted, before it is inserted + `post-insert-fn` (optional) is a function to call on each entity to be inserted, after it is inserted" + [{:keys [mode on-error pre-insert-fn post-insert-fn] + :or {pre-insert-fn identity + post-insert-fn identity} + :as context} + model + entities] (let [{:keys [update insert skip]} (group-by-action context model entities)] (doseq [[_ entity _] insert] (log/info (trs "Inserting {0}" (name-for-logging (name model) entity)))) @@ -131,7 +142,8 @@ (->> (concat (for [[position _ existing] skip] [(u/the-id existing) position]) - (map vector (maybe-insert-many! model on-error (map second insert)) + (map vector (map post-insert-fn + (maybe-insert-many! model on-error (map (comp pre-insert-fn second) insert))) (map first insert)) (for [[position entity existing] update] (let [id (u/the-id existing)] @@ -143,14 +155,3 @@ [id position]))) (sort-by second) (map first)))) - -(defn maybe-fixup-card-template-ids! - "Upserts `entities` that are in `selected-ids`. Cards with template-tags that refer to other cards need a second pass - of fixing the card-ids. To not overwrite cards that were skipped in previous step, classify entities and validate - against the ones that were just modified." - [context model entities selected-ids] - (let [{:keys [update _ _]} (group-by-action context model entities) - id-set (set selected-ids) - final-ents (filter #(id-set (:id (nth % 2))) update)] - (maybe-upsert-many! context model - (map second final-ents)))) diff --git a/enterprise/backend/src/metabase_enterprise/sso/api/interface.clj b/enterprise/backend/src/metabase_enterprise/sso/api/interface.clj new file mode 100644 index 000000000000..76e70a5eccfa --- /dev/null +++ b/enterprise/backend/src/metabase_enterprise/sso/api/interface.clj @@ -0,0 +1,34 @@ +(ns metabase-enterprise.sso.api.interface + (:require [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] + [metabase.util.i18n :refer [tru]])) + +(defn- sso-backend + "Function that powers the defmulti in figuring out which SSO backend to use. It might be that we need to have more + complex logic around this, but now it's just a simple priority. If SAML is configured use that otherwise JWT" + [_] + (cond + (sso-settings/saml-configured?) :saml + (sso-settings/jwt-enabled) :jwt + :else nil)) + +(defmulti sso-get + "Multi-method for supporting the first part of an SSO signin request. An implementation of this method will usually + result in a redirect to an SSO backend" + sso-backend) + +(defmulti sso-post + "Multi-method for supporting a POST-back from an SSO signin request. An implementation of this method will need to + validate the POST from the SSO backend and successfully log the user into Metabase." + sso-backend) + +(defn- throw-not-configured-error [] + (throw (ex-info (str (tru "SSO has not been enabled and/or configured")) + {:status-code 400}))) + +(defmethod sso-get :default + [_] + (throw-not-configured-error)) + +(defmethod sso-post :default + [_] + (throw-not-configured-error)) diff --git a/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj b/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj index c2ee1580ef62..d5484ad96967 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/api/routes.clj @@ -2,7 +2,12 @@ (:require [compojure.core :as compojure] [metabase-enterprise.sso.api.sso :as sso])) -;; This needs to be installed in the `metabase.server.routes/routes` -- not `metabase.api.routes/routes` !!! +;; This needs to be injected into [[metabase.server.routes/routes]] -- not [[metabase.api.routes/routes]] !!! +;; +;; TODO -- should we make a `metabase-enterprise.routes` namespace where this can live instead of injecting it +;; directly? +;; +;; TODO -- we need to feature-flag this based on the `:sso` feature (compojure/defroutes ^{:doc "Ring routes for auth (SAML) API endpoints."} routes (compojure/context "/auth" diff --git a/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj b/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj index f6bc02576fac..39f7cad3f011 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/api/sso.clj @@ -5,60 +5,30 @@ we can have a uniform interface both via the API and code" (:require [clojure.tools.logging :as log] [compojure.core :refer [GET POST]] - [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] + [metabase-enterprise.sso.api.interface :as sso.i] + metabase-enterprise.sso.integrations.jwt + metabase-enterprise.sso.integrations.saml [metabase.api.common :as api] - [metabase.plugins.classloader :as classloader] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.util :as u] [metabase.util.i18n :refer [trs tru]] [stencil.core :as stencil])) -(defn- sso-backend - "Function that powers the defmulti in figuring out which SSO backend to use. It might be that we need to have more - complex logic around this, but now it's just a simple priority. If SAML is configured use that otherwise JWT" - [_] - ;; load the SSO integrations so their implementations for the multimethods below are available. Can't load in - ;; `:require` because it would cause a circular ref / those namespaces aren't used here at any rate - ;; (`cljr-clean-namespace` would remove them) - (classloader/require '[metabase-enterprise.sso.integrations jwt saml]) - (cond - (sso-settings/saml-configured?) :saml - (sso-settings/jwt-enabled) :jwt - :else nil)) +;; load the SSO integrations so their implementations for the multimethods below are available. +(comment metabase-enterprise.sso.integrations.jwt/keep-me + metabase-enterprise.sso.integrations.saml/keep-me) -(defmulti sso-get - "Multi-method for supporting the first part of an SSO signin request. An implementation of this method will usually - result in a redirect to an SSO backend" - sso-backend) - -(defmulti sso-post - "Multi-method for supporting a POST-back from an SSO signin request. An implementation of this method will need to - validate the POST from the SSO backend and successfully log the user into Metabase." - sso-backend) - -(defn- throw-not-configured-error [] - (throw (ex-info (str (tru "SSO has not been enabled and/or configured")) - {:status-code 400}))) - -(defmethod sso-get :default - [_] - (throw-not-configured-error)) - -(defmethod sso-post :default - [_] - (throw-not-configured-error)) - -(defn- throw-if-no-metastore-token [] - (when-not (metastore/enable-sso?) +(defn- throw-if-no-premium-features-token [] + (when-not (premium-features/enable-sso?) (throw (ex-info (str (tru "SSO requires a valid token")) {:status-code 403})))) (api/defendpoint GET "/" "SSO entry-point for an SSO user that has not logged in yet" {:as req} - (throw-if-no-metastore-token) + (throw-if-no-premium-features-token) (try - (sso-get req) + (sso.i/sso-get req) (catch Throwable e (log/error #_e (trs "Error returning SSO entry point")) (throw e)))) @@ -76,9 +46,9 @@ (api/defendpoint POST "/" "Route the SSO backends call with successful login details" {:as req} - (throw-if-no-metastore-token) + (throw-if-no-premium-features-token) (try - (sso-post req) + (sso.i/sso-post req) (catch Throwable e (log/error e (trs "Error logging in")) (sso-error-page e)))) diff --git a/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj b/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj index cd5d07e1f76c..0b5848cf57a8 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/integrations/jwt.clj @@ -1,7 +1,7 @@ (ns metabase-enterprise.sso.integrations.jwt "Implementation of the JWT backend for sso" (:require [buddy.sign.jwt :as jwt] - [metabase-enterprise.sso.api.sso :as sso] + [metabase-enterprise.sso.api.interface :as sso.i] [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] [metabase-enterprise.sso.integrations.sso-utils :as sso-utils] [metabase.api.common :as api] @@ -90,7 +90,7 @@ (api/check (sso-settings/jwt-configured?) [400 (tru "JWT SSO has not been enabled and/or configured")])) -(defmethod sso/sso-get :jwt +(defmethod sso.i/sso-get :jwt [{{:keys [jwt redirect]} :params, :as request}] (check-jwt-enabled) (if jwt @@ -99,6 +99,6 @@ (when redirect (str "?return_to=" redirect)))))) -(defmethod sso/sso-post :jwt +(defmethod sso.i/sso-post :jwt [req] (throw (ex-info "POST not valid for JWT SSO requests" {:status-code 400}))) diff --git a/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj b/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj index d9fbd0935c24..89d22dc27f52 100644 --- a/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj +++ b/enterprise/backend/src/metabase_enterprise/sso/integrations/saml.clj @@ -20,7 +20,7 @@ [clojure.string :as str] [clojure.tools.logging :as log] [medley.core :as m] - [metabase-enterprise.sso.api.sso :as sso] + [metabase-enterprise.sso.api.interface :as sso.i] [metabase-enterprise.sso.integrations.sso-settings :as sso-settings] [metabase-enterprise.sso.integrations.sso-utils :as sso-utils] [metabase.api.common :as api] @@ -107,7 +107,7 @@ (api/check (sso-settings/saml-configured?) [400 (tru "SAML has not been enabled and/or configured")])) -(defmethod sso/sso-get :saml +(defmethod sso.i/sso-get :saml ;; Initial call that will result in a redirect to the IDP along with information about how the IDP can authenticate ;; and redirect them back to us [req] @@ -170,7 +170,7 @@ (when (u/base64-string? s) (codecs/bytes->str (codec/base64-decode s)))) -(defmethod sso/sso-post :saml +(defmethod sso.i/sso-post :saml ;; Does the verification of the IDP's response and 'logs the user in'. The attributes are available in the response: ;; `(get-in saml-info [:assertions :attrs]) [{:keys [params], :as request}] diff --git a/enterprise/backend/test/metabase_enterprise/advanced_config/api/pulse_test.clj b/enterprise/backend/test/metabase_enterprise/advanced_config/api/pulse_test.clj new file mode 100644 index 000000000000..47b0c02c02cb --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/advanced_config/api/pulse_test.clj @@ -0,0 +1,61 @@ +(ns metabase-enterprise.advanced-config.api.pulse-test + (:require [clojure.test :refer :all] + [metabase.models :refer [Card]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [metabase.util :as u])) + +(deftest test-pulse-endpoint-should-respect-email-domain-allow-list-test + (testing "POST /api/pulse/test" + (mt/with-temp Card [card {:dataset_query (mt/mbql-query venues)}] + ;; make sure we validate raw emails whether they're part of `:details` or part of `:recipients` -- we + ;; technically allow either right now + (doseq [channel [{:details {:emails ["test@metabase.com"]}} + {:recipients [{:email "test@metabase.com"}] + :details {}}]] + (testing (format "\nChannel = %s\n" (u/pprint-to-str channel)) + (letfn [(send! [expected-status-code] + (let [pulse-name (mt/random-name)] + (mt/with-fake-inbox + {:response (mt/user-http-request + :rasta :post expected-status-code "pulse/test" + {:name pulse-name + :cards [{:id (u/the-id card) + :include_csv false + :include_xls false + :dashboard_card_id nil}] + :channels [(merge {:enabled true + :channel_type "email" + :schedule_type "daily" + :schedule_hour 12 + :schedule_day nil} + channel)] + :skip_if_empty false}) + :recipients (set (keys (mt/regex-email-bodies (re-pattern pulse-name))))})))] + (testing "allowed email -- should pass" + (mt/with-temporary-setting-values [subscription-allowed-domains "metabase.com"] + (premium-features-test/with-premium-features #{:advanced-config} + (let [{:keys [response recipients]} (send! 200)] + (is (= {:ok true} + response)) + (is (contains? recipients "test@metabase.com")))) + (testing "No :advanced-config token" + (premium-features-test/with-premium-features #{} + (let [{:keys [response recipients]} (send! 200)] + (is (= {:ok true} + response)) + (is (contains? recipients "test@metabase.com"))))))) + (testing "disallowed email" + (mt/with-temporary-setting-values [subscription-allowed-domains "example.com"] + (testing "should fail when :advanced-config is enabled" + (premium-features-test/with-premium-features #{:advanced-config} + (let [{:keys [response recipients]} (send! 403)] + (is (= "You cannot create new subscriptions for the domain \"metabase.com\". Allowed domains are: example.com" + (:message response))) + (is (not (contains? recipients "test@metabase.com")))))) + (testing "No :advanced-config token -- should still pass" + (premium-features-test/with-premium-features #{} + (let [{:keys [response recipients]} (send! 200)] + (is (= {:ok true} + response)) + (is (contains? recipients "test@metabase.com"))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/advanced_config/models/pulse_channel_test.clj b/enterprise/backend/test/metabase_enterprise/advanced_config/models/pulse_channel_test.clj new file mode 100644 index 000000000000..bd52cdc1e3cd --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/advanced_config/models/pulse_channel_test.clj @@ -0,0 +1,51 @@ +(ns metabase-enterprise.advanced-config.models.pulse-channel-test + (:require [clojure.string :as str] + [clojure.test :refer :all] + [metabase.models :refer [Pulse PulseChannel]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [metabase.util :as u] + [toucan.db :as db] + [toucan.util.test :as tt])) + +(deftest validate-email-domains-test + (mt/with-temp Pulse [{pulse-id :id}] + (doseq [operation [:create :update] + enable-advanced-config? [true false] + allowed-domains [nil + #{"metabase.com"} + #{"metabase.com" "toucan.farm"}] + emails [nil + ["cam@metabase.com"] + ["cam@metabase.com" "cam@toucan.farm"] + ["cam@metabase.com" "cam@disallowed-domain.com"]] + :let [fail? (and enable-advanced-config? + allowed-domains + (not (every? (fn [email] + (contains? allowed-domains (u/email->domain email))) + emails)))]] + (premium-features-test/with-premium-features (if enable-advanced-config? + #{:advanced-config} + #{}) + (mt/with-temporary-setting-values [subscription-allowed-domains (str/join "," allowed-domains)] + ;; `with-premium-features` and `with-temporary-setting-values` will add `testing` context for the other + ;; stuff. + (testing (str (format "\nOperation = %s" operation) + (format "\nEmails = %s" (pr-str emails))) + (let [thunk (case operation + :create + #(db/insert! PulseChannel + (merge (tt/with-temp-defaults PulseChannel) + {:pulse_id pulse-id, :details {:emails emails}})) + + :update + #(mt/with-temp PulseChannel [{pulse-channel-id :id} {:pulse_id pulse-id}] + (db/update! PulseChannel pulse-channel-id, :details {:emails emails})))] + (if fail? + (testing "should fail" + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"You cannot create new subscriptions for the domain \"[\w@\.-]+\". Allowed domains are: .+" + (thunk)))) + (testing "should succeed" + (is (thunk))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit/pages_test.clj b/enterprise/backend/test/metabase_enterprise/audit/pages_test.clj deleted file mode 100644 index 16fd594c2487..000000000000 --- a/enterprise/backend/test/metabase_enterprise/audit/pages_test.clj +++ /dev/null @@ -1,91 +0,0 @@ -(ns metabase-enterprise.audit.pages-test - (:require [clojure.java.classpath :as classpath] - [clojure.string :as str] - [clojure.test :refer :all] - [clojure.tools.namespace.find :as ns-find] - [metabase.models :refer [Card Dashboard DashboardCard Database Table]] - [metabase.plugins.classloader :as classloader] - [metabase.public-settings.metastore-test :as metastore-test] - [metabase.query-processor :as qp] - [metabase.query-processor.util :as qp-util] - [metabase.test :as mt] - [metabase.test.fixtures :as fixtures] - [metabase.util :as u] - [ring.util.codec :as codec] - [schema.core :as s])) - -(use-fixtures :once (fixtures/initialize :db)) - -(deftest preconditions-test - (classloader/require 'metabase-enterprise.audit.pages.dashboards) - (testing "the query should exist" - (is (some? (resolve (symbol "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed"))))) - - (testing "test that a query will fail if not ran by an admin" - (metastore-test/with-metastore-token-features #{:audit-app} - (is (= {:status "failed", :error "You don't have permissions to do that."} - (-> (mt/user-http-request :lucky :post 202 "dataset" - {:type :internal - :fn "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed"}) - (select-keys [:status :error])))))) - - (testing "ok, now try to run it. Should fail because we don't have audit-app enabled" - (metastore-test/with-metastore-token-features nil - (is (= {:status "failed", :error "Audit App queries are not enabled on this instance."} - (-> (mt/user-http-request :crowberto :post 202 "dataset" - {:type :internal - :fn "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed"}) - (select-keys [:status :error]))))))) - -(defn- all-queries [] - (for [ns-symb (ns-find/find-namespaces (classpath/system-classpath)) - :when (and (str/starts-with? (name ns-symb) "metabase-enterprise.audit.pages") - (not (str/ends-with? (name ns-symb) "-test"))) - [_ varr] (do (classloader/require ns-symb) - (ns-interns ns-symb)) - :when (:internal-query-fn (meta varr))] - varr)) - -(defn- varr->query [varr {:keys [database table card dash]}] - (let [mta (meta varr) - fn-str (str (ns-name (:ns mta)) "/" (:name mta)) - arglist (mapv keyword (first (:arglists mta)))] - {:type :internal - :fn fn-str - :args (for [arg arglist] - (case arg - :datetime-unit "day" - :dashboard-id (u/the-id dash) - :card-id (u/the-id card) - :user-id (mt/user->id :crowberto) - :database-id (u/the-id database) - :table-id (u/the-id table) - :model "card" - :query-hash (codec/base64-encode (qp-util/query-hash {:database 1, :type :native}))))})) - -(defn- test-varr - [varr objects] - (testing (format "%s %s:%d" varr (ns-name (:ns (meta varr))) (:line (meta varr))) - (let [query (varr->query varr objects)] - (testing (format "\nquery =\n%s" (u/pprint-to-str query)) - (is (schema= {:status (s/eq :completed) - s/Keyword s/Any} - (qp/process-query query))))))) - -(defn- do-with-temp-objects [f] - (mt/with-temp* [Database [database] - Table [table {:db_id (u/the-id database)}] - Card [card {:table_id (u/the-id table), :database_id (u/the-id database)}] - Dashboard [dash] - DashboardCard [_ {:card_id (u/the-id card), :dashboard_id (u/the-id dash)}]] - (f {:database database, :table table, :card card, :dash dash}))) - -(defmacro ^:private with-temp-objects [[objects-binding] & body] - `(do-with-temp-objects (fn [~objects-binding] ~@body))) - -(deftest all-queries-test - (mt/with-test-user :crowberto - (with-temp-objects [objects] - (metastore-test/with-metastore-token-features #{:audit-app} - (doseq [varr (all-queries)] - (test-varr varr objects)))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/api/user_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/api/user_test.clj new file mode 100644 index 000000000000..4ea38ed8df0b --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/api/user_test.clj @@ -0,0 +1,84 @@ +(ns metabase-enterprise.audit-app.api.user-test + (:require [clojure.test :refer :all] + [metabase.models :refer [Card Dashboard DashboardCard Pulse PulseCard PulseChannel PulseChannelRecipient User]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [toucan.db :as db])) + +(deftest delete-subscriptions-test + (testing "DELETE /api/ee/audit-app/user/:id/subscriptions" + (testing "Should require a token with `:audit-app`" + (premium-features-test/with-premium-features #{} + (mt/with-temp User [{user-id :id}] + (is (= "This API endpoint is only enabled if you have a premium token with the :audit-app feature." + (mt/user-http-request user-id + :delete 402 + (format "ee/audit-app/user/%d/subscriptions" user-id))))))) + + (premium-features-test/with-premium-features #{:audit-app} + (doseq [run-type [:admin :non-admin]] + (mt/with-temp* [User [{user-id :id}] + Card [{card-id :id}] + ;; Alert, created by a different User + Pulse [{alert-id :id} {:alert_condition "rows" + :alert_first_only false + :name nil}] + PulseCard [_ {:pulse_id alert-id + :card_id card-id}] + PulseChannel [{alert-chan-id :id} {:pulse_id alert-id}] + PulseChannelRecipient [_ {:user_id user-id + :pulse_channel_id alert-chan-id}] + ;; DashboardSubscription, created by this User; multiple recipients + Dashboard [{dashboard-id :id}] + DashboardCard [{dashcard-id :id} {:dashboard_id dashboard-id + :card_id card-id}] + Pulse [{dash-sub-id :id} {:dashboard_id dashboard-id + :creator_id user-id}] + PulseCard [_ {:pulse_id dash-sub-id + :card_id card-id + :dashboard_card_id dashcard-id}] + PulseChannel [{dash-sub-chan-id :id} {:pulse_id dash-sub-id}] + PulseChannelRecipient [_ {:user_id user-id + :pulse_channel_id dash-sub-chan-id}] + PulseChannelRecipient [_ {:user_id (mt/user->id :rasta) + :pulse_channel_id dash-sub-chan-id}]] + (letfn [(describe-objects [] + {:num-subscriptions (db/count PulseChannelRecipient :user_id user-id) + :alert-archived? (db/select-one-field :archived Pulse :id alert-id) + :dashboard-subscription-archived? (db/select-one-field :archived Pulse :id dash-sub-id)}) + (api-delete-subscriptions! [request-user-name-or-id expected-status-code] + (mt/user-http-request request-user-name-or-id + :delete expected-status-code + (format "ee/audit-app/user/%d/subscriptions" user-id)))] + (testing "Sanity check: User should have 2 subscriptions (1 Alert, 1 DashboardSubscription)" + (is (= {:num-subscriptions 2 + :alert-archived? false + :dashboard-subscription-archived? false} + (describe-objects)))) + (case run-type + :non-admin + (testing "Non-admin" + (testing "should not be allowed to delete all subscriptions for another User" + (is (= "You don't have permissions to do that." + (api-delete-subscriptions! :rasta 403))) + (is (= {:num-subscriptions 2 + :alert-archived? false + :dashboard-subscription-archived? false} + (describe-objects)))) + (testing "should be allowed to delete all subscriptions for themselves." + (is (nil? (api-delete-subscriptions! user-id 204))) + (testing (str "\nAlert should get archived because this User was the last subscriber." + "\nDashboardSubscription should get archived because this User created it.") + (is (= {:num-subscriptions 0 + :alert-archived? true + :dashboard-subscription-archived? true} + (describe-objects)))))) + + :admin + (testing "Admin should be allowed to delete all subscriptions for another User" + (is (nil? (api-delete-subscriptions! :crowberto 204))) + (testing "\nAlert and DashboardSubscription should have gotten archived as well" + (is (= {:num-subscriptions 0 + :alert-archived? true + :dashboard-subscription-archived? true} + (describe-objects)))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/pages/alerts_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages/alerts_test.clj new file mode 100644 index 000000000000..ebb4a759bfa0 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages/alerts_test.clj @@ -0,0 +1,88 @@ +(ns metabase-enterprise.audit-app.pages.alerts-test + (:require [clojure.string :as str] + [clojure.test :refer :all] + [metabase-enterprise.audit-app.pages.alerts :as audit.alerts] + [metabase.models :refer [Card Collection Pulse PulseCard PulseChannel PulseChannelRecipient]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.test :as mt] + [metabase.util :as u] + [toucan.db :as db])) + +(defn- alerts [card-name] + (mt/with-test-user :crowberto + (premium-features-test/with-premium-features #{:audit-app} + (qp/process-query + {:type :internal + :fn (u/qualified-name ::audit.alerts/table) + :args [card-name]})))) + +(deftest table-test + (is (= [] + (mt/rows (alerts (mt/random-name))))) + (let [card-name (mt/random-name)] + (mt/with-temp Collection [{collection-id :id, collection-name :name}] + ;; test with both the Root Collection and a non-Root Collection + (doseq [{:keys [collection-id collection-name]} [{:collection-id collection-id + :collection-name collection-name} + {:collection-id nil + :collection-name "Our analytics"}]] + (testing (format "Collection = %d %s" collection-id collection-name) + (mt/with-temp* [Card [{card-id :id} {:name card-name + :collection_id collection-id}] + Pulse [{pulse-id :id} {:collection_id collection-id + :alert_condition "rows"}] + PulseCard [_ {:card_id card-id + :pulse_id pulse-id}] + PulseChannel [{channel-id :id} {:pulse_id pulse-id + :channel_type "email" + :details {:emails ["amazing@fake.com"]} + :schedule_type "monthly" + :schedule_frame "first" + :schedule_day "mon" + :schedule_hour 8}] + PulseChannelRecipient [_ {:pulse_channel_id channel-id + :user_id (mt/user->id :rasta)}] + PulseChannel [{channel-2-id :id} {:pulse_id pulse-id + :channel_type "slack" + :details {:channel "#wow"} + :schedule_type "hourly"}]] + (is (= {:columns ["card_id" + "card_name" + "pulse_id" + "recipients" + "subscription_type" + "collection_id" + "collection_name" + "frequency" + "creator_id" + "creator_name" + "created_at" + "num_filters"] + ;; sort by newest first. + :rows [[card-id + card-name + pulse-id + nil + "Slack" + collection-id + collection-name + "Every hour" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-2-id) + 0] + [card-id + card-name + pulse-id + 2 + "Email" + collection-id + collection-name + "At 8:00 AM, on the first Tuesday of the month" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-id) + 0]]} + (mt/rows+column-names + (alerts (str/join (rest (butlast card-name))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit/pages/common_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages/common_test.clj similarity index 70% rename from enterprise/backend/test/metabase_enterprise/audit/pages/common_test.clj rename to enterprise/backend/test/metabase_enterprise/audit_app/pages/common_test.clj index be846a696ec0..27d41f816f87 100644 --- a/enterprise/backend/test/metabase_enterprise/audit/pages/common_test.clj +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages/common_test.clj @@ -1,22 +1,25 @@ -(ns metabase-enterprise.audit.pages.common-test +(ns metabase-enterprise.audit-app.pages.common-test (:require [clojure.test :refer :all] - [metabase-enterprise.audit.pages.common :as pages.common] + [honeysql.core :as hsql] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase-enterprise.audit-app.pages.common :as pages.common] [metabase.db :as mdb] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.query-processor :as qp] - [metabase.test :as mt])) + [metabase.test :as mt] + [metabase.util :as u] + [metabase.util.honeysql-extensions :as hx])) (defn- run-query - [varr & {:as additional-query-params}] + [query-type & {:as additional-query-params}] (mt/with-test-user :crowberto - (metastore-test/with-metastore-token-features #{:audit-app} + (premium-features-test/with-premium-features #{:audit-app} (qp/process-query (merge {:type :internal - :fn (let [mta (meta varr)] - (format "%s/%s" (ns-name (:ns mta)) (:name mta)))} + :fn (u/qualified-name query-type)} additional-query-params))))) -(defn- ^:private ^:internal-query-fn legacy-format-query-fn - [a1] +(defmethod audit.i/internal-query ::legacy-format-query-fn + [_ a1] (let [h2? (= (mdb/db-type) :h2)] {:metadata [[:A {:display_name "A", :base_type :type/DateTime}] [:B {:display_name "B", :base_type :type/Integer}]] @@ -24,8 +27,8 @@ {:union-all [{:select [[a1 :A] [2 :B]]} {:select [[3 :A] [4 :B]]}]})})) -(defn- ^:private ^:internal-query-fn reducible-format-query-fn - [a1] +(defmethod audit.i/internal-query ::reducible-format-query-fn + [_ a1] {:metadata [[:A {:display_name "A", :base_type :type/DateTime}] [:B {:display_name "B", :base_type :type/Integer}]] :results (pages.common/reducible-query @@ -35,13 +38,13 @@ (deftest transform-results-test (testing "Make sure query function result are transformed to QP results correctly" - (metastore-test/with-metastore-token-features #{:audit-app} - (doseq [[format-name {:keys [varr expected-rows]}] {"legacy" {:varr #'legacy-format-query-fn - :expected-rows [[100 2] [3 4]]} - "reducible" {:varr #'reducible-format-query-fn - :expected-rows [[101 2] [4 4]]}}] + (premium-features-test/with-premium-features #{:audit-app} + (doseq [[format-name {:keys [query-type expected-rows]}] {"legacy" {:query-type ::legacy-format-query-fn + :expected-rows [[100 2] [3 4]]} + "reducible" {:query-type ::reducible-format-query-fn + :expected-rows [[101 2] [4 4]]}}] (testing (format "format = %s" format-name) - (let [results (delay (run-query varr :args [100]))] + (let [results (delay (run-query query-type :args [100]))] (testing "cols" (is (= [{:display_name "A", :base_type :type/DateTime, :name "A"} {:display_name "B", :base_type :type/Integer, :name "B"}] @@ -50,20 +53,36 @@ (is (= expected-rows (mt/rows @results)))))))))) +(deftest add-45-days-clause-test + (testing "add 45 days clause" + (is (= + {:where + [:> + (hx/with-type-info + (hsql/call :cast :bob.dobbs #honeysql.types.SqlRaw{:s "date"}) + {::hx/database-type "date"}) + nil]} + (assoc-in (#'pages.common/add-45-days-clause {} :bob.dobbs) [:where 2] nil))))) + +(deftest add-search-clause-test + (testing "add search clause" + (is (= {:where `(:or [:like ~(hsql/call :lower :t.name) "%birds%"] [:like ~(hsql/call :lower :db.name) "%birds%"])} + (#'pages.common/add-search-clause {} "birds" :t.name :db.name))))) + (deftest query-limit-and-offset-test (testing "Make sure params passed in as part of the query map are respected" - (metastore-test/with-metastore-token-features #{:audit-app} - (doseq [[format-name {:keys [varr expected-rows]}] {"legacy" {:varr #'legacy-format-query-fn - :expected-rows [[100 2] [3 4]]} - "reducible" {:varr #'reducible-format-query-fn - :expected-rows [[101 2] [4 4]]}}] + (premium-features-test/with-premium-features #{:audit-app} + (doseq [[format-name {:keys [query-type expected-rows]}] {"legacy" {:query-type ::legacy-format-query-fn + :expected-rows [[100 2] [3 4]]} + "reducible" {:query-type ::reducible-format-query-fn + :expected-rows [[101 2] [4 4]]}}] (testing (format "format = %s" format-name) (testing :limit (is (= [(first expected-rows)] - (mt/rows (run-query varr :args [100], :limit 1))))) + (mt/rows (run-query query-type :args [100], :limit 1))))) (testing :offset (is (= [(second expected-rows)] - (mt/rows (run-query varr :args [100], :offset 1)))))))))) + (mt/rows (run-query query-type :args [100], :offset 1)))))))))) (deftest CTES->subselects-test (testing "FROM substitution" diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/pages/dashboard_subscriptions_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages/dashboard_subscriptions_test.clj new file mode 100644 index 000000000000..6726b3737c21 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages/dashboard_subscriptions_test.clj @@ -0,0 +1,86 @@ +(ns metabase-enterprise.audit-app.pages.dashboard-subscriptions-test + (:require [clojure.string :as str] + [clojure.test :refer :all] + [metabase-enterprise.audit-app.pages.dashboard-subscriptions :as audit.dashboard-subscriptions] + [metabase.models :refer [Collection Dashboard Pulse PulseChannel PulseChannelRecipient]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.test :as mt] + [metabase.util :as u] + [toucan.db :as db])) + +(defn- dashboard-subscriptions [dashboard-name] + (mt/with-test-user :crowberto + (premium-features-test/with-premium-features #{:audit-app} + (qp/process-query + {:type :internal + :fn (u/qualified-name ::audit.dashboard-subscriptions/table) + :args [dashboard-name]})))) + +(deftest table-test + (is (= [] + (mt/rows (dashboard-subscriptions (mt/random-name))))) + (let [dashboard-name (mt/random-name)] + (mt/with-temp Collection [{collection-id :id, collection-name :name}] + ;; test with both the Root Collection and a non-Root Collection + (doseq [{:keys [collection-id collection-name]} [{:collection-id collection-id + :collection-name collection-name} + {:collection-id nil + :collection-name "Our analytics"}]] + (testing (format "Collection = %d %s" collection-id collection-name) + (mt/with-temp* [Dashboard [{dashboard-id :id} {:name dashboard-name + :collection_id collection-id}] + Pulse [{pulse-id :id} {:dashboard_id dashboard-id + :collection_id collection-id}] + PulseChannel [{channel-id :id} {:pulse_id pulse-id + :channel_type "email" + :details {:emails ["amazing@fake.com"]} + :schedule_type "monthly" + :schedule_frame "first" + :schedule_day "mon" + :schedule_hour 8}] + PulseChannelRecipient [_ {:pulse_channel_id channel-id + :user_id (mt/user->id :rasta)}] + PulseChannel [{channel-2-id :id} {:pulse_id pulse-id + :channel_type "slack" + :details {:channel "#wow"} + :schedule_type "hourly"}]] + (is (= {:columns ["dashboard_id" + "dashboard_name" + "pulse_id" + "recipients" + "subscription_type" + "collection_id" + "collection_name" + "frequency" + "creator_id" + "creator_name" + "created_at" + "num_filters"] + ;; sort by newest first. + :rows [[dashboard-id + dashboard-name + pulse-id + nil + "Slack" + collection-id + collection-name + "Every hour" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-2-id) + 0] + [dashboard-id + dashboard-name + pulse-id + 2 + "Email" + collection-id + collection-name + "At 8:00 AM, on the first Tuesday of the month" + (mt/user->id :rasta) + "Rasta Toucan" + (db/select-one-field :created_at PulseChannel :id channel-id) + 0]]} + (mt/rows+column-names + (dashboard-subscriptions (str/join (rest (butlast dashboard-name))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit_app/pages_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/pages_test.clj new file mode 100644 index 000000000000..c57be3ae51f5 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/audit_app/pages_test.clj @@ -0,0 +1,169 @@ +(ns metabase-enterprise.audit-app.pages-test + (:require [clojure.java.classpath :as classpath] + [clojure.java.io :as io] + [clojure.string :as str] + [clojure.test :refer :all] + [clojure.tools.namespace.find :as ns-find] + [clojure.tools.reader :as tools.reader] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase.models :refer [Card Dashboard DashboardCard Database Table]] + [metabase.plugins.classloader :as classloader] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.query-processor.util :as qp-util] + [metabase.test :as mt] + [metabase.test.fixtures :as fixtures] + [metabase.util :as u] + [ring.util.codec :as codec] + [schema.core :as s])) + +(use-fixtures :once (fixtures/initialize :db)) + +(deftest preconditions-test + (classloader/require 'metabase-enterprise.audit-app.pages.dashboards) + (testing "the method should exist" + (is (fn? (get-method audit.i/internal-query :metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed)))) + + (testing "test that a query will fail if not ran by an admin" + (premium-features-test/with-premium-features #{:audit-app} + (is (= {:status "failed", :error "You don't have permissions to do that."} + (-> (mt/user-http-request :lucky :post 202 "dataset" + {:type :internal + :fn "metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed"}) + (select-keys [:status :error])))))) + + (testing "ok, now try to run it. Should fail because we don't have audit-app enabled" + (premium-features-test/with-premium-features nil + (is (= {:status "failed", :error "Audit App queries are not enabled on this instance."} + (-> (mt/user-http-request :crowberto :post 202 "dataset" + {:type :internal + :fn "metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed"}) + (select-keys [:status :error]))))))) + +(defn- all-query-methods + "Return a set of all audit/internal query types (excluding test/`:default` impls)." + [] + ;; load all `metabase-enterprise.audit-app.pages` namespaces. + (doseq [ns-symb (ns-find/find-namespaces (classpath/system-classpath)) + :when (and (str/starts-with? (name ns-symb) "metabase-enterprise.audit-app.pages") + (not (str/ends-with? (name ns-symb) "-test")))] + (classloader/require ns-symb)) + ;; now find all the impls of [[metabase-enterprise.audit-app.interface/internal-query]] from the pages namespaces + (into (sorted-set) + (filter (fn [query-type] + (when-let [ns-str (namespace query-type)] + (and (str/starts-with? ns-str "metabase-enterprise.audit-app.pages.") + (not (str/ends-with? ns-str "-test")))))) + (keys (methods audit.i/internal-query)))) + +(defn- query-defmethod-source-form + "Find the source [[defmethod]] or [[schema.core/defmethod]] form for the internal query named by `query-type`." + [query-type] + (let [file (-> (namespace query-type) + munge + (str/replace #"\." "/") + (str ".clj")) + ns-symb (symbol (namespace query-type))] + (with-open [reader (java.io.PushbackReader. (io/reader (io/resource file)))] + (binding [*ns* (the-ns ns-symb)] + (loop [] + (let [form (tools.reader/read reader false ::eof)] + (cond + (= form ::eof) + (throw (ex-info (str "Cannot find source for " query-type) + {:namespace ns-symb, :file file})) + + (and (seq? form) + (#{'defmethod 's/defmethod} (first form)) + (= (second form) 'audit.i/internal-query) + (= (nth form 2) query-type)) + form + + :else + (recur)))))))) + +(defn- arglist-strip-schema-annotations + "Remove Schema `:-` annotations from `arglist`." + [arglist] + (let [remove-next? (volatile! false)] + (into [] + (remove (fn [value] + (cond + (= value :-) + (do + (vreset! remove-next? true) + true) + + @remove-next? + (do + (vreset! remove-next? false) + true) + + :else + false))) + arglist))) + +(defn- query-defmethod-arglists + "Return a sequence of arglists for the internal query named by `query-type`." + [query-type] + (let [fn-tail (drop 3 (query-defmethod-source-form query-type))] + (mapv arglist-strip-schema-annotations + (if (vector? (first fn-tail)) + [(first fn-tail)] + (map first fn-tail))))) + +(defn- test-query-maps + "Generate a sequence of test query maps (as you'd pass to the QP) for the internal query named by `query-type`. + Generates one map for arity of the method." + [query-type {:keys [database table card dash]}] + (for [arglist (query-defmethod-arglists query-type)] + {:type :internal + :fn (u/qualified-name query-type) + :args (for [arg (mapv keyword (rest arglist))] + (case arg + :datetime-unit "day" + :dashboard-id (u/the-id dash) + :card-id (u/the-id card) + :user-id (mt/user->id :crowberto) + :database-id (u/the-id database) + :table-id (u/the-id table) + :model "card" + :query-hash (codec/base64-encode (qp-util/query-hash {:database 1, :type :native})) + :query-string "toucans" + :question-filter "bird sales" + :collection-filter "coin collection" + :error-filter "a" + :db-filter "PU" + :sort-column "card.id" + :sort-direction "desc" + :dashboard-name "wow" + :card-name "Credit Card"))})) + +(defn- do-tests-for-query-type + "Run test(s) for the internal query named by `query-type`. Runs one test for each map returned + by [[test-query-maps]]." + [query-type objects] + (doseq [query (test-query-maps query-type objects)] + (testing (format "\nquery =\n%s" (u/pprint-to-str query)) + (is (schema= {:status (s/eq :completed) + s/Keyword s/Any} + (qp/process-query query)))))) + +(defn- do-with-temp-objects [f] + (mt/with-temp* [Database [database] + Table [table {:db_id (u/the-id database)}] + Card [card {:table_id (u/the-id table), :database_id (u/the-id database)}] + Dashboard [dash] + DashboardCard [_ {:card_id (u/the-id card), :dashboard_id (u/the-id dash)}]] + (f {:database database, :table table, :card card, :dash dash}))) + +(defmacro ^:private with-temp-objects [[objects-binding] & body] + `(do-with-temp-objects (fn [~objects-binding] ~@body))) + +(deftest all-queries-test + (mt/with-test-user :crowberto + (with-temp-objects [objects] + (premium-features-test/with-premium-features #{:audit-app} + (doseq [query-type (all-query-methods)] + (testing query-type + (do-tests-for-query-type query-type objects))))))) diff --git a/enterprise/backend/test/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries_test.clj b/enterprise/backend/test/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries_test.clj similarity index 56% rename from enterprise/backend/test/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries_test.clj rename to enterprise/backend/test/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries_test.clj index 560e15c1a813..c2f0e95c3be2 100644 --- a/enterprise/backend/test/metabase_enterprise/audit/query_processor/middleware/handle_audit_queries_test.clj +++ b/enterprise/backend/test/metabase_enterprise/audit_app/query_processor/middleware/handle_audit_queries_test.clj @@ -1,28 +1,29 @@ -(ns metabase-enterprise.audit.query-processor.middleware.handle-audit-queries-test - "Additional tests for this namespace can be found in `metabase-enterprise.audit.pages-test`." +(ns metabase-enterprise.audit-app.query-processor.middleware.handle-audit-queries-test + "Additional tests for this namespace can be found in `metabase-enterprise.audit-app.pages-test`." (:require [clojure.test :refer :all] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase-enterprise.audit-app.interface :as audit.i] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.query-processor :as qp] - [metabase.test :as mt])) + [metabase.test :as mt] + [metabase.util :as u])) (defn- run-query - [varr & {:as additional-query-params}] + [query-type & {:as additional-query-params}] (mt/with-test-user :crowberto - (metastore-test/with-metastore-token-features #{:audit-app} + (premium-features-test/with-premium-features #{:audit-app} (qp/process-query (merge {:type :internal - :fn (let [mta (meta varr)] - (format "%s/%s" (ns-name (:ns mta)) (:name mta)))} + :fn (u/qualified-name query-type)} additional-query-params))))) -(defn- ^:private ^:internal-query-fn legacy-format-query-fn - [a1] +(defmethod audit.i/internal-query ::legacy-format-query-fn + [_ a1] {:metadata [[:a {:display_name "A", :base_type :type/DateTime}] [:b {:display_name "B", :base_type :type/Integer}]] :results [{:a a1, :b 2} {:a 3, :b 5}]}) -(defn- ^:private ^:internal-query-fn reducible-format-query-fn - [a1] +(defmethod audit.i/internal-query ::reducible-format-query-fn + [_ a1] {:metadata [[:a {:display_name "A", :base_type :type/DateTime}] [:b {:display_name "B", :base_type :type/Integer}]] :results (constantly [[a1 2] @@ -31,12 +32,12 @@ (deftest transform-results-test (testing "Make sure query function result are transformed to QP results correctly" - (doseq [[format-name {:keys [varr expected-rows]}] {"legacy" {:varr #'legacy-format-query-fn - :expected-rows [[100 2] [3 5]]} - "reducible" {:varr #'reducible-format-query-fn - :expected-rows [[101 2] [4 5]]}}] + (doseq [[format-name {:keys [query-type expected-rows]}] {"legacy" {:query-type ::legacy-format-query-fn + :expected-rows [[100 2] [3 5]]} + "reducible" {:query-type ::reducible-format-query-fn + :expected-rows [[101 2] [4 5]]}}] (testing (format "format = %s" format-name) - (let [results (delay (run-query varr :args [100]))] + (let [results (delay (run-query query-type :args [100]))] (testing "cols" (is (= [{:display_name "A", :base_type :type/DateTime, :name "a"} {:display_name "B", :base_type :type/Integer, :name "b"}] diff --git a/enterprise/backend/test/metabase_enterprise/content_management/api/review_test.clj b/enterprise/backend/test/metabase_enterprise/content_management/api/review_test.clj new file mode 100644 index 000000000000..ac403de6f53d --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/content_management/api/review_test.clj @@ -0,0 +1,97 @@ +(ns metabase-enterprise.content-management.api.review-test + (:require [clojure.test :refer :all] + [metabase.models.card :refer [Card]] + [metabase.models.moderation-review :as mod-review :refer [ModerationReview]] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.test :as mt] + [toucan.db :as db])) + +(defn- normalized-response + [moderation-review] + (dissoc moderation-review :id :updated_at :created_at)) + +;;todo: check it can review dashboards, and that it cannot review other models +(deftest create-test + (testing "POST /api/moderation-review" + (testing "Should require a token with `:content-management`" + (premium-features-test/with-premium-features #{} + (is (= "This API endpoint is only enabled if you have a premium token with the :content-management feature." + (mt/user-http-request :rasta :post 402 "moderation-review" + {:text "review" + :status "verified" + :moderated_item_id 1 + :moderated_item_type "card"}))))) + + (premium-features-test/with-premium-features #{:content-management} + (mt/with-temp* [Card [{card-id :id :as card} {:name "Test Card"}]] + (mt/with-model-cleanup [ModerationReview] + (letfn [(moderate! [status text] + (normalized-response + (mt/user-http-request :crowberto :post 200 "moderation-review" + {:text text + :status status + :moderated_item_id card-id + :moderated_item_type "card"}))) + (review-count [] (db/count ModerationReview + :moderated_item_id card-id + :moderated_item_type "card"))] + (testing "Non admin cannot create a moderation review" + (is (= 0 (review-count))) + (is (= "You don't have permissions to do that." + (mt/user-http-request :rasta :post 403 "moderation-review" + {:text "review" + :status "verified" + :moderated_item_id card-id + :moderated_item_type "card"}))) + (is (= 0 (review-count)))) + (is (= {:text "Looks good to me" + :moderated_item_id card-id + :moderated_item_type "card" + :moderator_id (mt/user->id :crowberto) + :status "verified" + :most_recent true} + (moderate! "verified" "Looks good to me"))) + (testing "When adding a new moderation review, marks it as most recent" + (is (= {:text "hmm" + :status nil + :most_recent true} + (select-keys (moderate! nil "hmm") [:text :status :most_recent]))) + (testing "And previous moderation reviews are marked as not :most_recent" + (is (= #{{:text "hmm" :most_recent true :status nil} + {:text "Looks good to me" :most_recent false :status "verified"}} + (into #{} + (map #(select-keys % [:text :status :most_recent])) + (db/select ModerationReview + :moderated_item_id card-id + :moderated_item_type "card")))))) + (testing "Ensures we never have more than `modreview/max-moderation-reviews`" + (db/insert-many! ModerationReview (repeat (* 2 mod-review/max-moderation-reviews) + {:moderated_item_id card-id + :moderated_item_type "card" + :moderator_id (mt/user->id :crowberto) + :most_recent false + :status "verified" + :text "old review"})) + ;; manually inserted many + + (is (> (review-count) mod-review/max-moderation-reviews)) + (moderate! "verified" "lookin good") + ;; api ensures we never have more than our limit + + (is (<= (review-count) mod-review/max-moderation-reviews))) + (testing "Only allows for valid status" + (doseq [status mod-review/statuses] + (is (= status (:status (moderate! status "good"))))) + ;; i wish this was better. Should have a better error message and honestly shouldn't be a 500 + (tap> (mt/user-http-request :crowberto :post 400 "moderation-review" + {:text "not a chance this works" + :status "invalid status" + :moderated_item_id card-id + :moderated_item_type "card"}))) + (testing "Can't moderate a card that doesn't exist" + (is (= "Not found." + (mt/user-http-request :crowberto :post 404 "moderation-review" + {:text "card doesn't exist" + :status "verified" + :moderated_item_id Integer/MAX_VALUE + :moderated_item_type "card"})))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj index 90c06d8e59ab..6b693f45daea 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/api/collection_test.clj @@ -4,7 +4,7 @@ [metabase.models.collection :as collection] [metabase.models.permissions :as perms] [metabase.models.permissions-group :as group] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt])) (deftest ee-disabled-snippets-graph-test @@ -17,14 +17,14 @@ (= (:id snippet) (:id a-snippet))) response))))] (testing "\nIf we have a valid EE token, we should only see Snippets in the Root Collection with valid perms" - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (is (= false (can-see-snippet?))) (perms/grant-collection-read-permissions! (group/all-users) (assoc collection/root-collection :namespace "snippets")) (is (= true (can-see-snippet?))))) (testing "\nIf we do not have a valid EE token, all Snippets should come back from the graph regardless of our perms" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (is (= true (can-see-snippet?))) (perms/revoke-collection-permissions! (group/all-users) (assoc collection/root-collection :namespace "snippets")) diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj index f0744d196cd8..32893dccd7cf 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/api/native_query_snippet_test.clj @@ -4,7 +4,7 @@ [metabase.models.collection :as collection] [metabase.models.permissions :as perms] [metabase.models.permissions-group :as group] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt] [metabase.util :as u] [toucan.db :as db])) @@ -22,12 +22,12 @@ (testing (format "\nSnippet in %s" collection-name) (mt/with-temp NativeQuerySnippet [snippet {:collection_id (:id collection)}] (testing "\nShould be allowed regardless if EE features aren't enabled" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (is (= true (has-perms? snippet)) "allowed?"))) (testing "\nWith EE features enabled" - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (testing (format "\nShould not be allowed with no perms for %s" collection-name) (is (= false (has-perms? snippet)) @@ -113,10 +113,10 @@ (when-not (= source-collection dest-collection) (testing (format "\nMove from %s -> %s should need write ('curate') perms for both" (:name source-collection) (:name dest-collection)) (testing "\nShould be allowed if EE perms aren't enabled" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (is (= true (has-perms?))))) - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (doseq [c [source-collection dest-collection]] (testing (format "\nPerms for only %s should fail" (:name c)) (try diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj index 7de093a2058a..4f40ff6ecc56 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/ee_strategy_impl_test.clj @@ -1,7 +1,7 @@ (ns metabase-enterprise.enhancements.ee-strategy-impl-test (:require [clojure.test :refer :all] [metabase-enterprise.enhancements.ee-strategy-impl :as ee-strategy-impl] - [metabase.public-settings.metastore :as settings.metastore] + [metabase.public-settings.premium-features :as settings.premium-features] [pretty.core :refer [PrettyPrintable]])) (defprotocol ^:private MyProtocol @@ -20,17 +20,17 @@ (deftest generate-method-impl-test (is (= '((m1 [_] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss)) (m1 [_ a] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss a))) (#'ee-strategy-impl/generate-method-impl - (list 'var 'metabase.public-settings.metastore/enable-enhancements?) + (list 'var 'metabase.public-settings.premium-features/enable-enhancements?) 'ee 'oss {:var #'MyProtocol} {:name 'm1 @@ -45,23 +45,23 @@ (is (= '(metabase_enterprise.enhancements.ee_strategy_impl_test.MyProtocol (m1 [_] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss)) (m1 [_ a] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m1 ee oss a)) (m2 [_ a b] (metabase-enterprise.enhancements.ee-strategy-impl/invoke-ee-when-enabled - #'metabase.public-settings.metastore/enable-enhancements? + #'metabase.public-settings.premium-features/enable-enhancements? metabase-enterprise.enhancements.ee-strategy-impl-test/m2 ee oss a b))) (#'ee-strategy-impl/generate-protocol-impl - (list 'var 'metabase.public-settings.metastore/enable-enhancements?) + (list 'var 'metabase.public-settings.premium-features/enable-enhancements?) 'ee 'oss protocol-symb))))))) (deftest e2e-test @@ -77,20 +77,20 @@ MyProtocol (m2 [_ x y] (- x y))) - impl (ee-strategy-impl/reify-ee-strategy-impl #'settings.metastore/enable-enhancements? ee oss MyProtocol)] + impl (ee-strategy-impl/reify-ee-strategy-impl #'settings.premium-features/enable-enhancements? ee oss MyProtocol)] (testing "sanity check" (is (= 3 (m2 ee 1 2))) (is (= -1 (m2 oss 1 2)))) - (with-redefs [settings.metastore/enable-enhancements? (constantly false)] + (with-redefs [settings.premium-features/enable-enhancements? (constantly false)] (is (= -1 (m2 impl 1 2)))) - (with-redefs [settings.metastore/enable-enhancements? (constantly true)] + (with-redefs [settings.premium-features/enable-enhancements? (constantly true)] (is (= 3 (m2 impl 1 2)))) (testing "Should pretty print" (is (= (str "(metabase-enterprise.enhancements.ee-strategy-impl/reify-ee-strategy-impl" - " #'metabase.public-settings.metastore/enable-enhancements?" + " #'metabase.public-settings.premium-features/enable-enhancements?" " (ee) (oss))") (pr-str impl)))))) diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj index d6d0e5554067..2210da61d166 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/google_test.clj @@ -2,11 +2,11 @@ (:require [clojure.test :refer :all] [metabase.integrations.google :as google] [metabase.models.user :as user :refer [User]] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.test :as mt])) (deftest google-auth-create-new-user!-test - (with-redefs [metastore/enable-sso? (constantly true)] + (with-redefs [premium-features/enable-sso? (constantly true)] (testing "should support multiple domains (#5218)" (mt/with-temporary-setting-values [google-auth-auto-create-accounts-domain "metabase.com,example.com"] (mt/with-model-cleanup [User] diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj index f9a9a0320021..e1d3065caa3b 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/integrations/ldap_test.clj @@ -3,7 +3,7 @@ [metabase-enterprise.enhancements.integrations.ldap :as ldap-ee] [metabase.integrations.ldap :as ldap] [metabase.models.user :as user :refer [User]] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] [metabase.test :as mt] [metabase.test.integrations.ldap :as ldap.test] [metabase.util.schema :as su] @@ -11,7 +11,7 @@ [toucan.db :as db])) (deftest find-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "find by username" (is (= {:dn "cn=John Smith,ou=People,dc=metabase,dc=com" @@ -92,7 +92,7 @@ (ldap/find-user "sally.brown@metabase.com")))))))) (deftest attribute-sync-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "find by email/username should return other attributes as well" (is (= {:dn "cn=Lucky Pigeon,ou=Birds,dc=metabase,dc=com" @@ -165,7 +165,7 @@ (db/delete! User :%lower.email "john.smith@metabase.com")))))))) (deftest update-attributes-on-login-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "Existing user's attributes are updated on fetch" (try @@ -214,7 +214,7 @@ (db/delete! User :%lower.email "john.smith@metabase.com"))))))) (deftest fetch-or-create-user-test - (with-redefs [metastore/enable-enhancements? (constantly true)] + (with-redefs [premium-features/enable-enhancements? (constantly true)] (ldap.test/with-ldap-server (testing "a new user is created when they don't already exist" (try diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj index 458f0b97eb27..654a93bf1921 100644 --- a/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj +++ b/enterprise/backend/test/metabase_enterprise/enhancements/models/native_query_snippet/permissions_test.clj @@ -5,7 +5,7 @@ [metabase.models.interface :as i] [metabase.models.permissions :as perms] [metabase.models.permissions-group :as group] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt])) (def ^:private root-collection (assoc collection/root-collection :name "Root Collection", :namespace "snippets")) @@ -22,9 +22,9 @@ (is (= expected (has-perms-for-id?)))))))] (testing "should be allowed if EE perms aren't enabled" - (metastore-test/with-metastore-token-features #{} + (premium-features-test/with-premium-features #{} (test-perms* true))) - (metastore-test/with-metastore-token-features #{:enhancements} + (premium-features-test/with-premium-features #{:enhancements} (testing "should NOT be allowed if EE perms are enabled and you do not have perms" (test-perms* false)) (testing "should be allowed if you have perms" diff --git a/enterprise/backend/test/metabase_enterprise/enhancements/models/permissions/block_permissions_test.clj b/enterprise/backend/test/metabase_enterprise/enhancements/models/permissions/block_permissions_test.clj new file mode 100644 index 000000000000..aa6080430782 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/enhancements/models/permissions/block_permissions_test.clj @@ -0,0 +1,244 @@ +(ns metabase-enterprise.enhancements.models.permissions.block-permissions-test + (:require [clojure.test :refer :all] + [metabase-enterprise.enhancements.models.permissions.block-permissions :as block-perms] + [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] + [metabase.api.common :as api] + [metabase.models :refer [Card Collection Database Permissions PermissionsGroup PermissionsGroupMembership User]] + [metabase.models.permissions :as perms] + [metabase.models.permissions-group :as group] + [metabase.public-settings.premium-features-test :as premium-features-test] + [metabase.query-processor :as qp] + [metabase.query-processor.middleware.permissions :as qp.perms] + [metabase.test :as mt] + [metabase.util :as u] + [schema.core :as s] + [toucan.db :as db])) + +;;;; Graph-related stuff + +(defn- test-db-perms [group-id] + (get-in (perms/data-perms-graph) [:groups group-id (mt/id)])) + +(defn- api-test-db-perms [group-id] + (into {} + (map (fn [[k v]] + [k (cond-> v (string? v) keyword)])) + (get-in (mt/user-http-request :crowberto :get 200 "permissions/graph") + [:groups + (keyword (str group-id)) + (keyword (str (mt/id)))]))) + +(deftest graph-test + (testing "block permissions should come back from" + (doseq [[message perms] {"the graph function" + test-db-perms + + "the API" + api-test-db-perms}] + (testing (str message "\n")) + (mt/with-temp* [PermissionsGroup [{group-id :id}] + Permissions [_ {:group_id group-id + :object (perms/database-block-perms-path (mt/id))}]] + (is (= {:schemas :block} + (perms group-id))) + (testing (str "\nBlock perms and data perms shouldn't exist together at the same time, but if they do for some " + "reason, then the graph endpoint should ignore the data perms.") + (doseq [path [(perms/data-perms-path (mt/id)) + (perms/data-perms-path (mt/id) "public") + (perms/data-perms-path (mt/id) "public" (mt/id :venues))]] + (testing (format "\nPath = %s" (pr-str path)) + (mt/with-temp* [Permissions [_ {:group_id group-id + :object path}]] + (is (= (merge {:schemas :block} + ;; block perms won't affect the value of `:native`; if a given group has both + ;; `/db/1/` and `/block/db/1/` then the graph will come back with `:native + ;; :write` and `:schemas :block`. This state isn't normally allowed, but the + ;; graph code doesn't currently correct it if it happens. Not sure it's worth + ;; the extra code complexity since it should never happen in the first place. + (when (= path (perms/data-perms-path (mt/id))) + {:native :write})) + (perms group-id))))))))))) + +(defn- grant-block-perms! [group-id] + (perms/update-data-perms-graph! [group-id (mt/id)] {:schemas :block})) + +(defn- api-grant-block-perms! [group-id] + (let [current-graph (perms/data-perms-graph) + new-graph (assoc-in current-graph [:groups group-id (mt/id)] {:schemas :block}) + result (premium-features-test/with-premium-features #{:advanced-permissions} + (mt/user-http-request :crowberto :put 200 "permissions/graph" new-graph))] + (is (= "block" + (get-in result [:groups + (keyword (str group-id)) + (keyword (str (mt/id))) + :schemas]))))) + +(deftest api-throws-error-if-premium-feature-not-enabled + (testing "PUT /api/permissions/graph" + (testing (str "fails when a group has a block permission set, and the instance doesn't have the " + ":advanced-permissions premium feature enabled") + (mt/with-temp PermissionsGroup [{group-id :id}] + (let [current-graph (perms/data-perms-graph) + new-graph (assoc-in current-graph [:groups group-id (mt/id)] {:schemas :block}) + result (premium-features-test/with-premium-features #{} ; disable premium features + (mt/user-http-request :crowberto :put 402 "permissions/graph" new-graph))] + (is (= "Can't use block permissions without having the advanced-permissions premium feature" + result))))))) + +(deftest update-graph-test + (testing "Should be able to set block permissions with" + (doseq [[description grant!] {"the graph update function" + (fn [group-id] + (premium-features-test/with-premium-features #{:advanced-permissions} + (grant-block-perms! group-id))) + + "the perms graph API endpoint" + api-grant-block-perms!}] + (testing (str description "\n") + (mt/with-temp PermissionsGroup [{group-id :id}] + (testing "Group should have no perms upon creation" + (is (= nil + (test-db-perms group-id)))) + (testing "group has no existing permissions" + (mt/with-model-cleanup [Permissions] + (grant! group-id) + (is (= {:schemas :block} + (test-db-perms group-id))))) + (testing "group has existing data permissions... :block should remove them" + (mt/with-model-cleanup [Permissions] + (perms/grant-full-db-permissions! group-id (mt/id)) + (grant! group-id) + (is (= {:schemas :block} + (test-db-perms group-id))) + (is (= #{(perms/database-block-perms-path (mt/id))} + (db/select-field :object Permissions :group_id group-id)))))))))) + +(deftest update-graph-delete-sandboxes-test + (testing "When setting `:block` permissions any GTAP rows for that Group/Database should get deleted." + (premium-features-test/with-premium-features #{:sandboxes :advanced-permissions} + (mt/with-model-cleanup [Permissions] + (mt/with-temp* [PermissionsGroup [{group-id :id}] + GroupTableAccessPolicy [_ {:table_id (mt/id :venues) + :group_id group-id}]] + (grant-block-perms! group-id) + (is (= {:schemas :block} + (test-db-perms group-id))) + (is (not (db/exists? GroupTableAccessPolicy :group_id group-id)))))))) + +(deftest update-graph-data-perms-should-delete-block-perms-test + (testing "granting data permissions should delete existing block permissions" + (mt/with-temp* [PermissionsGroup [{group-id :id}] + Permissions [_ {:group_id group-id, :object (perms/database-block-perms-path (mt/id))}]] + (is (= {:schemas :block} + (test-db-perms group-id))) + (perms/update-data-perms-graph! [group-id (mt/id) :schemas] {"public" {(mt/id :venues) {:read :all}}}) + (is (= {:schemas {"public" {(mt/id :venues) {:read :all}}}} + (test-db-perms group-id)))))) + +(deftest update-graph-disallow-native-query-perms-test + (testing "Disallow block permissions + native query permissions" + (mt/with-temp* [PermissionsGroup [{group-id :id}] + Permissions [_ {:group_id group-id, :object (perms/data-perms-path (mt/id))}]] + (testing "via the fn" + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + ;; TODO -- this error message is totally garbage, fix this + #"DB permissions with a valid combination of values for :native and :schemas" + ;; #"DB permissions with a valid combination of values for :native and :schemas" + (perms/update-data-perms-graph! [group-id (mt/id)] + {:schemas :block, :native :write})))) + (testing "via the API" + (let [current-graph (perms/data-perms-graph) + new-graph (assoc-in current-graph + [:groups group-id (mt/id)] + {:schemas :block, :native :write})] + (is (schema= {:message #".*DB permissions with a valid combination of values for :native and :schemas.*" + s/Keyword s/Any} + (premium-features-test/with-premium-features #{:advanced-permissions} + (mt/user-http-request :crowberto :put 500 "permissions/graph" new-graph))))))))) + +(deftest delete-database-delete-block-perms-test + (testing "If a Database gets DELETED, any block permissions for it should get deleted too." + (mt/with-temp* [Database [{db-id :id}] + Permissions [_ {:group_id (u/the-id (group/all-users)) + :object (perms/database-block-perms-path db-id)}]] + (letfn [(perms-exist? [] + (db/exists? Permissions :object (perms/database-block-perms-path db-id)))] + (is (perms-exist?)) + (db/delete! Database :id db-id) + (is (not (perms-exist?))))))) + +;;;; QP perms-check related stuff. + +(deftest qp-block-permissions-test + (mt/with-temp-copy-of-db + (let [query {:database (mt/id) + :type :query + :query {:source-table (mt/id :venues) + :limit 1}}] + (mt/with-temp* [User [{user-id :id}] + PermissionsGroup [{group-id :id}] + PermissionsGroupMembership [_ {:group_id group-id, :user_id user-id}] + Collection [{collection-id :id}] + Card [{card-id :id} {:collection_id collection-id + :dataset_query query}] + Permissions [_ {:group_id group-id, :object (perms/collection-read-path collection-id)}]] + (premium-features-test/with-premium-features #{:enhancements} + (perms/revoke-data-perms! (group/all-users) (mt/id)) + (perms/revoke-data-perms! group-id (mt/id)) + (letfn [(run-ad-hoc-query [] + (mt/with-current-user user-id + (qp/process-query query))) + (run-saved-question [] + (binding [qp.perms/*card-id* card-id] + (run-ad-hoc-query))) + (check-block-perms [] + (mt/with-current-user user-id + (#'qp.perms/check-block-permissions query)))] + (testing "sanity check: should not be able to run ad-hoc query" + (is (not (contains? @api/*current-user-permissions-set* + (perms/data-perms-path (mt/id))))) + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"You do not have permissions to run this query" + (run-ad-hoc-query)))) + (testing "sanity check: should be able to run query as saved Question before block perms are set." + (is (run-saved-question)) + (is (= ::block-perms/no-block-permissions-for-db + (check-block-perms)))) + ;; 'grant' the block permissions. + (mt/with-temp Permissions [_ {:group_id group-id, :object (perms/database-block-perms-path (mt/id))}] + (testing "if EE token does not have the `:enhancements` feature: should not do check" + (premium-features-test/with-premium-features #{} + (is (= ::block-perms/enhancements-not-enabled + (check-block-perms))))) + (testing "disallow running the query" + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"Blocked: you are not allowed to run queries against Database \d+" + (check-block-perms))) + (is (thrown-with-msg? + clojure.lang.ExceptionInfo + #"Blocked: you are not allowed to run queries against Database \d+" + (run-saved-question)))) + (testing "\nAllow running if current User has data permissions from another group." + (mt/with-temp* [PermissionsGroup [{group-2-id :id}] + PermissionsGroupMembership [_ {:group_id group-2-id, :user_id user-id}]] + (doseq [[message perms] {"with full DB perms" (perms/data-perms-path (mt/id)) + "with perms for the Table in question" (perms/table-query-path (mt/id :venues))}] + (mt/with-temp Permissions [_ {:group_id group-2-id, :object perms}] + (testing "Should be able to run the query" + (doseq [[message f] {"ad-hoc queries" run-ad-hoc-query + "Saved Questions" run-saved-question}] + (testing message + (is (f))))))) + (testing "\nSandboxed permissions" + (premium-features-test/with-premium-features #{:enhancements :sandboxing} + (mt/with-temp* [Permissions [_ {:group_id group-2-id + :object (perms/table-segmented-query-path (mt/id :venues))}] + GroupTableAccessPolicy [_ {:table_id (mt/id :venues), :group_id group-id}]] + (testing "Should be able to run the query" + (doseq [[message f] {"ad-hoc queries" run-ad-hoc-query + "Saved Questions" run-saved-question}] + (testing message + (is (f))))))))))))))))) diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj index 694151b31019..2b4664bf43b5 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/api/card_test.clj @@ -18,7 +18,7 @@ PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta) :group_id (u/the-id group)}]] (mt/with-db db - (perms/revoke-permissions! (perms-group/all-users) db) + (perms/revoke-data-perms! (perms-group/all-users) db) (perms/grant-permissions! group (perms/table-segmented-query-path table)) (perms/grant-collection-readwrite-permissions! group collection) (is (some? ((mt/user->client :rasta) :post 202 "card" @@ -36,7 +36,7 @@ Card [card {:name "Some Name" :collection_id (u/the-id collection)}]] (mt/with-db db - (perms/revoke-permissions! (perms-group/all-users) db) + (perms/revoke-data-perms! (perms-group/all-users) db) (perms/grant-permissions! group (perms/table-segmented-query-path table)) (perms/grant-collection-readwrite-permissions! group collection) (is (= "Another Name" diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj index 043257dbdb91..bd464e979fed 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/api/gtap_test.clj @@ -3,18 +3,15 @@ [metabase-enterprise.sandbox.models.group-table-access-policy :refer [GroupTableAccessPolicy]] [metabase.http-client :as http] [metabase.models :refer [Card Field PermissionsGroup Table]] - [metabase.public-settings.metastore :as metastore] + [metabase.public-settings.premium-features :as premium-features] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.server.middleware.util :as middleware.u] [metabase.test :as mt] [schema.core :as s])) -(defmacro ^:private with-sandboxes-enabled [& body] - `(with-redefs [metastore/enable-sandboxes? (constantly true)] - ~@body)) - (deftest require-auth-test (testing "Must be authenticated to query for GTAPs" - (with-sandboxes-enabled + (premium-features-test/with-premium-features #{:sandboxes} (is (= (get middleware.u/response-unauthentic :body) (http/client :get 401 "mt/gtap"))) @@ -32,7 +29,7 @@ "Invokes `body` ensuring any `GroupTableAccessPolicy` created will be removed afterward. Leaving behind a GTAP can case referential integrity failures for any related `Card` that would be cleaned up as part of a `with-temp*` call" [& body] - `(with-sandboxes-enabled + `(premium-features-test/with-premium-features #{:sandboxes} (mt/with-model-cleanup [GroupTableAccessPolicy] ~@body))) @@ -44,16 +41,16 @@ (deftest validate-token-test (testing "POST /api/mt/gtap" (testing "Must have a valid token to use GTAPs" - (with-redefs [metastore/enable-sandboxes? (constantly false)] + (with-redefs [premium-features/enable-sandboxes? (constantly false)] (mt/with-temp* [Table [{table-id :id}] PermissionsGroup [{group-id :id}] Card [{card-id :id}]] - (is (re= #".*sandboxing is not enabled.*" - (mt/user-http-request :crowberto :post 403 "mt/gtap" - {:table_id table-id - :group_id group-id - :card_id card-id - :attribute_remappings {"foo" 1}})))))))) + (is (= "This API endpoint is only enabled if you have a premium token with the :sandboxes feature." + (mt/user-http-request :crowberto :post 402 "mt/gtap" + {:table_id table-id + :group_id group-id + :card_id card-id + :attribute_remappings {"foo" 1}})))))))) (deftest create-gtap-test (testing "POST /api/mt/gtap" (mt/with-temp* [Table [{table-id :id}] @@ -120,7 +117,7 @@ (mt/with-temp* [Table [{table-id :id}] PermissionsGroup [{group-id :id}] Card [{card-id :id}]] - (with-sandboxes-enabled + (premium-features-test/with-premium-features #{:sandboxes} (testing "Test that we can update only the attribute remappings for a GTAP" (mt/with-temp GroupTableAccessPolicy [{gtap-id :id} {:table_id table-id :group_id group-id diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj index 2c6c4386a1c4..e3bdeed9a248 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/pulse_test.clj @@ -3,7 +3,6 @@ [clojure.java.io :as io] [clojure.test :refer :all] [medley.core :as m] - [metabase-enterprise.sandbox.test-util :as mt.tu] [metabase.email.messages :as messages] [metabase.models :refer [Card Pulse PulseCard PulseChannel PulseChannelRecipient]] [metabase.models.pulse :as models.pulse] @@ -41,7 +40,7 @@ :user_id (mt/user->id :rasta)}]] (mt/with-temporary-setting-values [email-from-address "metamailman@metabase.com"] (mt/with-fake-inbox - (with-redefs [messages/render-pulse-email (fn [_ _ [{:keys [result]}]] + (with-redefs [messages/render-pulse-email (fn [_ _ _ [{:keys [result]}]] [{:result result}])] (mt/with-test-user nil (pulse/send-pulse! pulse))) @@ -118,7 +117,7 @@ :enabled :true :recipients [{:id (mt/user->id :rasta) :email "rasta@metabase.com"}]}]}) - (let [[{html :content} {attachment :content}] (get-in @mt/inbox ["rasta@metabase.com" 0 :body])] + (let [[{html :content} {_icon :content} {attachment :content}] (get-in @mt/inbox ["rasta@metabase.com" 0 :body])] (testing "email" (is (= 22 (html->row-count html)))) @@ -146,7 +145,7 @@ (mt/with-test-user nil (pulse/send-pulse! (models.pulse/retrieve-pulse pulse-id))) (let [email-results @mt/inbox - [{html :content} {attachment :content}] (get-in email-results ["rasta@metabase.com" 0 :body])] + [{html :content} {_icon :attachment} {attachment :content}] (get-in email-results ["rasta@metabase.com" 0 :body])] (testing "email" (is (= 22 (html->row-count html)))) diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj b/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj index f3b223cc40fc..73393962db62 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/query_processor/middleware/row_level_restrictions_test.clj @@ -107,7 +107,7 @@ {:query (mt/native-query {:query (format-honeysql - {:select [(identifier :venues :name)] + {:select [(identifier :venues :id) (identifier :venues :name)] :from [(identifier :venues)] :order-by [(identifier :venues :id)]})})}) @@ -271,10 +271,11 @@ (run-venues-count-query))))) (testing "Make sure that you can still use a SQL-based GTAP without needing to have SQL read perms for the Database" - (is (= [["Red Medicine"] ["Stout Burgers & Beers"]] - (mt/rows + (is (= [[1 "Red Medicine"] + [2 "Stout Burgers & Beers"]] + (mt/formatted-rows [int str] (mt/with-gtaps {:gtaps {:venues (venue-names-native-gtap-def)}} - (mt/run-mbql-query venues {:limit 2})))))) + (mt/run-mbql-query venues {:limit 2, :order-by [[:asc [:field (mt/id :venues :id)]]]})))))) (testing (str "When no card_id is included in the GTAP, should default to a query against the table, with the GTAP " "criteria applied") @@ -294,7 +295,7 @@ (mt/with-temp* [Collection [collection] Card [card {:collection_id (u/the-id collection)}]] (mt/with-group [group] - (perms/revoke-permissions! (perms-group/all-users) (mt/id)) + (perms/revoke-data-perms! (perms-group/all-users) (mt/id)) (perms/grant-collection-read-permissions! group collection) (mt/with-test-user :rasta (binding [qp.perms/*card-id* (u/the-id card)] @@ -327,14 +328,16 @@ (defn- row-level-restrictions-fk-drivers "Drivers to test row-level restrictions against foreign keys with. Includes BigQuery, which for whatever reason does - not normally have FK tests ran for it." + not normally have FK tests ran for it. Excludes Presto JDBC, because that driver does NOT support fetching foreign + keys from the JDBC metadata, even though we enable the feature in the UI." [] (cond-> (mt/normal-drivers-with-feature :nested-queries :foreign-keys) - (@tx.env/test-drivers :bigquery) (conj :bigquery))) + (@tx.env/test-drivers :bigquery) (conj :bigquery) + true (disj :presto-jdbc))) (deftest e2e-fks-test (mt/test-drivers (row-level-restrictions-fk-drivers) - (mt/with-bigquery-fks + (mt/with-bigquery-fks :bigquery (testing (str "1 - Creates a GTAP filtering question, looking for any checkins happening on or after 2014\n" "2 - Apply the `user` attribute, looking for only our user (i.e. `user_id` = 5)\n" "3 - Checkins are related to Venues, query for checkins, grouping by the Venue's price\n" @@ -674,7 +677,7 @@ {:gtaps {:reviews {:remappings {"user_id" [:dimension $product_id]}}} :attributes {"user_id" 1}}) ;; grant full data perms for products - (perms/grant-permissions! (perms-group/all-users) (perms/object-path + (perms/grant-permissions! (perms-group/all-users) (perms/data-perms-path (mt/id) (db/select-one-field :schema Table :id (mt/id :products)) (mt/id :products))) @@ -798,7 +801,7 @@ {:gtaps {:orders {:remappings {:user_id [:dimension $orders.user_id]}}} :attributes {:user_id "1"}}) ;; make sure the sandboxed group can still access the Products table, which is referenced below. - (perms/grant-permissions! &group (perms/object-path (mt/id) "PUBLIC" (mt/id :products))) + (perms/grant-permissions! &group (perms/data-perms-path (mt/id) "PUBLIC" (mt/id :products))) (letfn [(do-tests [] ;; create a query based on the sandboxed Table (testing "should be able to run the query. Results should come back with correct metadata" @@ -859,10 +862,6 @@ (is (seq metadata)) (db/update! Card card-id :result_metadata metadata))) -(defn- unset-query-metadata-for-gtap-card! [group table-name] - (let [card-id (db/select-one-field :card_id GroupTableAccessPolicy :group_id (u/the-id group), :table_id (mt/id table-name))] - (db/update! Card card-id :result_metadata nil))) - (deftest native-fk-remapping-test (testing "FK remapping should still work for questions with native sandboxes (EE #520)" (mt/dataset sample-dataset @@ -914,8 +913,11 @@ (mt/rows (mt/run-mbql-query orders {:limit 1}))))))))))))) (deftest pivot-query-test - ;; sample-dataset doesn't work on Redshift yet -- see #14784 - (mt/test-drivers (disj (mt/normal-drivers-with-feature :foreign-keys :nested-queries :left-join) :redshift) + (mt/test-drivers (disj + (mt/normal-drivers-with-feature :foreign-keys :nested-queries :left-join) + ;; this test relies on a FK relation between $product_id->products.category, so skip for Presto + ;; JDBC, because that driver doesn't support resolving FKs from the JDBC metadata + :presto-jdbc) (testing "Pivot table queries should work with sandboxed users (#14969)" (mt/dataset sample-dataset (mt/with-gtaps {:gtaps (mt/$ids @@ -923,28 +925,20 @@ :products {:remappings {:user_cat [:dimension $products.category]}}}) :attributes {:user_id 1, :user_cat "Widget"}} (perms/grant-permissions! &group (perms/table-query-path (Table (mt/id :people)))) - ;; not sure why Snowflake has slightly different results - (is (= (if (= driver/*driver* :snowflake) - [["Twitter" "Widget" 0 510.82] - ["Twitter" nil 0 407.93] - [nil "Widget" 1 510.82] - [nil nil 1 407.93] - ["Twitter" nil 2 918.75] - [nil nil 3 918.75]] - (->> [["Twitter" nil 0 401.51] - ["Twitter" "Widget" 0 498.59] - [nil nil 1 401.51] - [nil "Widget" 1 498.59] - ["Twitter" nil 2 900.1] - [nil nil 3 900.1]] - (sort-by (let [nil-first? (mt/sorts-nil-first? driver/*driver*) - sort-str (fn [s] - (cond - (some? s) s - nil-first? "A" - :else "Z"))] - (fn [[x y group]] - [group (sort-str x) (sort-str y)]))))) + (is (= (->> [["Twitter" nil 0 401.51] + ["Twitter" "Widget" 0 498.59] + [nil nil 1 401.51] + [nil "Widget" 1 498.59] + ["Twitter" nil 2 900.1] + [nil nil 3 900.1]] + (sort-by (let [nil-first? (mt/sorts-nil-first? driver/*driver* :type/Text) + sort-str (fn [s] + (cond + (some? s) s + nil-first? "A" + :else "Z"))] + (fn [[x y group]] + [group (sort-str x) (sort-str y)])))) (mt/formatted-rows [str str int 2.0] (qp.pivot/run-pivot-query (mt/mbql-query orders diff --git a/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj b/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj index f95485ea8eff..3363b1606ab2 100644 --- a/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj +++ b/enterprise/backend/test/metabase_enterprise/sandbox/test_util.clj @@ -6,7 +6,7 @@ [metabase.models.permissions-group :as perms-group] [metabase.models.table :refer [Table]] [metabase.models.user :refer [User]] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.server.middleware.session :as mw.session] [metabase.test.data :as data] [metabase.test.data.impl :as data.impl] @@ -62,13 +62,13 @@ (defn do-with-gtaps-for-user [args-fn test-user-name-or-user-id f] (letfn [(thunk [] ;; remove perms for All Users group - (perms/revoke-permissions! (perms-group/all-users) (data/db)) + (perms/revoke-data-perms! (perms-group/all-users) (data/db)) ;; create new perms group (users/with-group-for-user [group test-user-name-or-user-id] (let [{:keys [gtaps attributes]} (s/validate WithGTAPsArgs (args-fn))] ;; set user login_attributes (with-user-attributes test-user-name-or-user-id attributes - (metastore-test/with-metastore-token-features #{:sandboxes} + (premium-features-test/with-premium-features #{:sandboxes} ;; create Cards/GTAPs from defs (do-with-gtap-defs group gtaps (fn [] diff --git a/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj b/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj index f808b23312d2..6b38ee3b5467 100644 --- a/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj +++ b/enterprise/backend/test/metabase_enterprise/search/scoring_test.clj @@ -4,6 +4,18 @@ [metabase-enterprise.search.scoring :as ee-scoring] [metabase.search.scoring :as scoring])) +(deftest verified-score-test + (let [score #'ee-scoring/verified-score + item (fn [id status] {:moderated_status status + :id id + :model "card"}) + score (fn [items] (into [] (map :id) (reverse (sort-by score items))))] + (testing "verification bumps result" + ;; stable sort all with score 0 and then reverse to get descending rather than ascending + (is (= [3 2 1] (score [(item 1 nil) (item 2 nil) (item 3 nil)]))) + ;; verified item is promoted + (is (= [1 3 2] (score [(item 1 "verified") (item 2 nil) (item 3 nil)])))))) + (deftest official-collection-tests (testing "it should bump up the value of items in official collections" ;; using the ee implementation that isn't wrapped by enable-enhancements? check @@ -36,4 +48,28 @@ "examples of custom expressions" "custom expression examples"] (map :name (sort-by ee-score [a b c - (assoc d :collection_authority_level "official")])))))))) + (assoc d :collection_authority_level "official")]))))))) + (testing "It should bump up the value of verified items" + (let [search-string "foo" + dashboard-count #(assoc % :dashboardcard_count 0) + ee-score (comp :score + (partial scoring/score-and-result ee-scoring/scoring-impl search-string) + dashboard-count) + os-score (comp :score + (partial scoring/score-and-result scoring/oss-score-impl search-string) + dashboard-count) + labeled-results {:a {:name "foobar" :model "card" :id :a} + :b {:name "foo foo" :model "card" :id :b} + :c {:name "foo foo foo" :model "card" :id :c}} + {:keys [a b c]} labeled-results] + (doseq [item [a b c]] + (is (> (ee-score (assoc item :moderated_status "verified")) (ee-score item)) + (str "Item not greater for model: " (:model item)))) + (let [items (shuffle [a b c])] + (is (= (sort-by os-score items) (sort-by ee-score items)))) + ;; a is sorted lowest here (sort-by is ascending) + (is (= [:a :c :b] (map :id (sort-by ee-score [a b c])))) + ;; a is verified and is now last or highest score + (is (= [:c :b :a] + (map :id + (sort-by ee-score [(assoc a :moderated_status "verified") b c]))))))) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/cmd_test.clj b/enterprise/backend/test/metabase_enterprise/serialization/cmd_test.clj new file mode 100644 index 000000000000..4827803f64c8 --- /dev/null +++ b/enterprise/backend/test/metabase_enterprise/serialization/cmd_test.clj @@ -0,0 +1,73 @@ +(ns metabase-enterprise.serialization.cmd-test + (:require [clojure.test :as t] + [clojure.tools.logging :as log] + [metabase-enterprise.serialization.load :as load] + [metabase.cmd :as cmd] + [metabase.db.schema-migrations-test.impl :as schema-migrations-test.impl] + [metabase.models :refer [Card User]] + [metabase.models.permissions-group :as group] + [metabase.test :as mt] + [metabase.test.fixtures :as fixtures] + [metabase.util :as u] + [toucan.db :as db]) + (:import java.util.UUID)) + +(t/use-fixtures :once (fixtures/initialize :db :test-users)) + +(defmacro ^:private with-empty-h2-app-db + "Runs `body` under a new, blank, H2 application database (randomly named), in which all model tables have been + created via Liquibase schema migrations. After `body` is finished, the original app DB bindings are restored. + + Makes use of functionality in the `metabase.db.schema-migrations-test.impl` namespace since that already does + what we need." + [& body] + `(schema-migrations-test.impl/with-temp-empty-app-db [conn# :h2] + (schema-migrations-test.impl/run-migrations-in-range! conn# [0 999999]) ; this should catch all migrations) + ;; since the actual group defs are not dynamic, we need with-redefs to change them here + (with-redefs [group/all-users (#'group/get-or-create-magic-group! group/all-users-group-name) + group/admin (#'group/get-or-create-magic-group! group/admin-group-name) + group/metabot (#'group/get-or-create-magic-group! group/metabot-group-name)] + ~@body))) + +(t/deftest no-collections-test + (t/testing "Dumping a card when there are no active collection should work properly (#16931)" + ;; we need a blank H2 app db, temporarily, in order to run this test (to ensure we have no collections present, + ;; while also not deleting or messing with any existing user personal collections that the real app DB might have, + ;; since that will interfere with other tests) + ;; making use of the functionality in the metabase.db.schema-migrations-test.impl namespace for this (since it + ;; already does what we need) + (with-empty-h2-app-db + ;; create a single dummy user, to own a card + (let [user (db/simple-insert! User + :email "nobody@nowhere.com" + :first_name (mt/random-name) + :last_name (mt/random-name) + :password (str (UUID/randomUUID)) + :date_joined :%now + :is_active true + :is_superuser true)] + ;; then the card itself + (db/simple-insert! Card + :name "Single Card" + :display "Single Card" + :dataset_query {} + :creator_id (u/the-id user) + :visualization_settings "{}" + :created_at :%now + :updated_at :%now) + ;; serialize "everything" (which should just be the card and user), which should succeed if #16931 is fixed + (cmd/dump (str (System/getProperty "java.io.tmpdir") "/" (mt/random-name))))))) + +(t/deftest blank-target-db-test + (t/testing "Loading a dump into an empty app DB still works (#16639)" + (let [dump-dir (str (System/getProperty "java.io.tmpdir") "/" (mt/random-name)) + user-pre-insert-called? (atom false)] + (log/infof "Dumping to %s" dump-dir) + (cmd/dump dump-dir "--user" "crowberto@metabase.com") + (with-empty-h2-app-db + (with-redefs [load/pre-insert-user (fn [user] + (reset! user-pre-insert-called? true) + (assoc user :password "test-password"))] + (cmd/load dump-dir "--mode" :update + "--on-error" :abort) + (t/is (true? @user-pre-insert-called?))))))) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj b/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj index c07196d26529..e3386243540a 100644 --- a/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj +++ b/enterprise/backend/test/metabase_enterprise/serialization/load_test.clj @@ -2,15 +2,13 @@ (:refer-clojure :exclude [load]) (:require [clojure.data :as diff] [clojure.java.io :as io] + [clojure.string :as str] [clojure.test :refer [deftest is testing use-fixtures]] [metabase-enterprise.serialization.cmd :refer [dump load]] [metabase-enterprise.serialization.test-util :as ts] [metabase.models :refer [Card Collection Dashboard DashboardCard DashboardCardSeries Database Dependency Dimension Field FieldValues Metric NativeQuerySnippet Pulse PulseCard PulseChannel Segment Table User]] - [metabase.test.data.users :as test-users] - [metabase.util :as u] - [toucan.db :as db] [metabase.query-processor :as qp] [metabase.query-processor.middleware.permissions :as qp.perms] [metabase.query-processor.store :as qp.store] @@ -18,8 +16,11 @@ [metabase.shared.models.visualization-settings-test :as mb.viz-test] [metabase.shared.util.log :as log] [metabase.test :as mt] + [metabase.test.data.users :as test-users] [metabase.test.fixtures :as fixtures] - [metabase.util.i18n :refer [deferred-trs trs]]) + [metabase.util :as u] + [metabase.util.i18n :refer [trs]] + [toucan.db :as db]) (:import org.apache.commons.io.FileUtils)) (use-fixtures :once @@ -110,12 +111,36 @@ (type entity))) (defmethod assert-loaded-entity (type Card) - [card {:keys [query-results collections]}] - (query-res-match query-results card) - (collection-names-match collections card)) + [{card-name :name :as card} {:keys [query-results collections]}] + (testing (format "Card: %s" card-name) + (query-res-match query-results card) + (collection-names-match collections card) + (when (= "My Nested Card" card-name) + (testing "Visualization settings for a Card were persisted correctly" + (let [vs (:visualization_settings card) + col (-> (:column_settings vs) + first) + [col-key col-val] col + col-ref (mb.viz/parse-db-column-ref col-key) + {:keys [::mb.viz/field-id]} col-ref + [{col-name :name col-field-ref :fieldRef col-enabled :enabled :as tbl-col} & _] (:table.columns vs) + [_ col-field-id _] col-field-ref] + (is (some? (:table.columns vs))) + (is (some? (:column_settings vs))) + (is (integer? field-id)) + (is (= "latitude" (-> (db/select-one-field :name Field :id field-id) + str/lower-case))) + (is (= {:show_mini_bar true + :column_title "Parallel"} col-val)) + (is (= "Venue Category" col-name)) + (is (true? col-enabled)) + (is (integer? col-field-id) "fieldRef within table.columns was properly serialized and loaded") + (is (= "category_id" (-> (db/select-one-field :name Field :id col-field-id) + str/lower-case)))))) + card)) (defn- collection-parent-name [collection] - (let [[_ parent-id] (re-matches #".*/(\d+)/$" (:location collection))] + (let [[_ ^String parent-id] (re-matches #".*/(\d+)/$" (:location collection))] (db/select-one-field :name Collection :id (Integer. parent-id)))) (defmethod assert-loaded-entity (type Collection) @@ -130,8 +155,10 @@ (collection-parent-name collection))) "Deeply Nested Personal Collection" (is (= "Nested Personal Collection" (collection-parent-name collection))) - "Felicia's Personal Collection" (is false "Should not have loaded different user's PC") - "Felicia's Nested Collection" (is false "Should not have loaded different user's PC")) + "Felicia's Personal Collection" (is (nil? (:name collection)) + "Should not have loaded different user's PC") + "Felicia's Nested Collection" (is (nil? (:name collection)) + "Should not have loaded different user's PC")) collection) (defmethod assert-loaded-entity (type NativeQuerySnippet) @@ -175,12 +202,12 @@ ;; check that the linked :card_id matches the expected name for each in the series ;; based on the entities declared in test_util.clj (let [series-pos (:position series) - expected-name (case series-pos + expected-name (case (int series-pos) 0 "My Card" 1 "My Nested Card" 2 ts/root-card-name)] (is (= expected-name (db/select-one-field :name Card :id (:card_id series)))) - (case series-pos + (case (int series-pos) 1 (testing "Top level click action was preserved for dashboard card" (let [viz-settings (:visualization_settings dashcard) @@ -240,7 +267,7 @@ ;; in case it already exists (u/ignore-exceptions (delete-directory! dump-dir)) - (mt/test-drivers (-> (mt/normal-drivers-with-feature :basic-aggregations :binning :expressions) + (mt/test-drivers (-> (mt/normal-drivers-with-feature :basic-aggregations :binning :expressions :foreign-keys) ;; We will run this roundtrip test against any database supporting these features ^ except ;; certain ones for specific reasons, outlined below. ;; @@ -261,7 +288,10 @@ :sqlserver ; ORDER BY not allowed not allowed in derived tables (subselects) :vertica ; bare table name doesn't work; it's test_data_venues instead of venues :sqlite ; foreign-keys is not supported by this driver - :sparksql)) ; foreign-keys is not supported by this driver + :sparksql ; foreign-keys is not supported by this driver + ;; foreign-keys is not supported by the below driver even though it has joins + :bigquery-cloud-sdk + )) (let [fingerprint (ts/with-world (qp.store/fetch-and-store-database! db-id) @@ -345,7 +375,7 @@ [Card (Card card-id-with-native-snippet)] [Card (Card card-join-card-id)]]})] (with-world-cleanup - (load dump-dir {:on-error :continue :mode :update}) + (load dump-dir {:on-error :continue :mode :skip}) (mt/with-db (db/select-one Database :name ts/temp-db-name) (doseq [[model entity] (:entities fingerprint)] (testing (format "%s \"%s\"" (type model) (:name entity)) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj b/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj index bb9afe31a785..32b5ed823316 100644 --- a/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj +++ b/enterprise/backend/test/metabase_enterprise/serialization/names_test.clj @@ -3,8 +3,8 @@ [metabase-enterprise.serialization.names :as names] [metabase-enterprise.serialization.test-util :as ts] [metabase.models :refer [Card Collection Dashboard Database Field Metric NativeQuerySnippet Segment Table]] - [metabase.util :as u] - [metabase.test :as mt])) + [metabase.test :as mt] + [metabase.util :as u])) (deftest safe-name-test (are [s expected] (= (names/safe-name {:name s}) expected) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/serialize_test.clj b/enterprise/backend/test/metabase_enterprise/serialization/serialize_test.clj index d79f62834f83..0ada5e3610ed 100644 --- a/enterprise/backend/test/metabase_enterprise/serialization/serialize_test.clj +++ b/enterprise/backend/test/metabase_enterprise/serialization/serialize_test.clj @@ -3,8 +3,8 @@ [clojure.test :refer :all] [metabase-enterprise.serialization.serialize :as serialize] [metabase-enterprise.serialization.test-util :as ts] - [metabase.models :refer [Card Collection Dashboard Database Field Metric NativeQuerySnippet Segment - Table]])) + [metabase.models :refer [Card Collection Dashboard Database Dependency Field Metric NativeQuerySnippet + Segment Table]])) (defn- all-ids-are-fully-qualified-names? [m] @@ -31,7 +31,8 @@ [Table table-id] [Field numeric-field-id] [Database db-id] - [NativeQuerySnippet snippet-id]]] + [NativeQuerySnippet snippet-id] + [Dependency dependency-id]]] (testing (name model) (let [serialization (serialize/serialize (model id))] (testing (format "\nserialization = %s" (pr-str serialization)) diff --git a/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj b/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj index 901b44dea159..4a00d54bdbf1 100644 --- a/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj +++ b/enterprise/backend/test/metabase_enterprise/serialization/test_util.clj @@ -1,15 +1,14 @@ (ns metabase-enterprise.serialization.test-util (:require [metabase-enterprise.serialization.names :as names] - [metabase.models :refer [Card Collection Dashboard DashboardCard DashboardCardSeries Database Field Metric - NativeQuerySnippet Pulse PulseCard Segment Table User]] + [metabase.models :refer [Card Collection Dashboard DashboardCard DashboardCardSeries Database Dependency + Field Metric NativeQuerySnippet Pulse PulseCard Segment Table User]] [metabase.models.collection :as collection] [metabase.query-processor.store :as qp.store] [metabase.shared.models.visualization-settings :as mb.viz] [metabase.test :as mt] [metabase.test.data :as data] [toucan.db :as db] - [toucan.util.test :as tt] - [metabase-enterprise.serialization.names :refer [fully-qualified-name]])) + [toucan.util.test :as tt])) (def root-card-name "My Root Card \\ with a/nasty: (*) //n`me ' * ? \" < > | ŠĐž") (def temp-db-name "Fingerprint test-data copy") @@ -122,6 +121,11 @@ [:field ~'category-pk-field-id {:join-alias "cat"}]]}]}}}] + Dependency [{~'dependency-id :id} {:model "Card" + :model_id ~'card-id + :dependent_on_model "Segment" + :dependent_on_id ~'segment-id + :created_at :%now}] Card [{~'card-arch-id :id} {;:archived true :table_id ~'table-id @@ -146,7 +150,16 @@ :collection_id ~'collection-id :dataset_query {:type :query :database ~'db-id - :query {:source-table (str "card__" ~'card-id)}}}] + :query {:source-table (str "card__" ~'card-id)}} + :visualization_settings + {:table.columns [{:name "Venue Category" + :fieldRef [:field ~'category-field-id nil] + :enabled true}] + :column_settings {(keyword (format + "[\"ref\",[\"field\",%d,null]]" + ~'latitude-field-id)) + {:show_mini_bar true + :column_title "Parallel"}}}}] Card [{~'card-id-nested-query :id} {:table_id ~'table-id :name "My Nested Query Card" diff --git a/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj b/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj index 3ac45958159c..22845dcc4e3c 100644 --- a/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sso/integrations/jwt_test.clj @@ -9,7 +9,7 @@ [metabase.models.permissions-group :as group :refer [PermissionsGroup]] [metabase.models.permissions-group-membership :refer [PermissionsGroupMembership]] [metabase.models.user :refer [User]] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.test :as mt] [metabase.test.fixtures :as fixtures] [metabase.util :as u] @@ -32,23 +32,23 @@ (deftest sso-prereqs-test (testing "SSO requests fail if SAML hasn't been enabled" (mt/with-temporary-setting-values [jwt-enabled false] - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (is (= "SSO has not been enabled and/or configured" (saml-test/client :get 400 "/auth/sso")))) - (testing "SSO requests fail if they don't have a valid metastore token" - (metastore-test/with-metastore-token-features nil + (testing "SSO requests fail if they don't have a valid premium-features token" + (premium-features-test/with-premium-features nil (is (= "SSO requires a valid token" (saml-test/client :get 403 "/auth/sso"))))))) (testing "SSO requests fail if SAML is enabled but hasn't been configured" - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (mt/with-temporary-setting-values [jwt-enabled true] (is (= "JWT SSO has not been enabled and/or configured" (saml-test/client :get 400 "/auth/sso")))))) (testing "The IdP provider certificate must also be included for SSO to be configured" - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (mt/with-temporary-setting-values [jwt-enabled true jwt-identity-provider-uri default-idp-uri] (is (= "JWT SSO has not been enabled and/or configured" @@ -63,7 +63,7 @@ (defmacro ^:private with-jwt-default-setup [& body] `(disable-other-sso-types (fn [] - (saml-test/with-valid-metastore-token + (saml-test/with-valid-premium-features-token (saml-test/call-with-login-attributes-cleared! (fn [] (call-with-default-jwt-config diff --git a/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj b/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj index 8b492a9eb330..b03efd7aef9c 100644 --- a/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj +++ b/enterprise/backend/test/metabase_enterprise/sso/integrations/saml_test.clj @@ -9,7 +9,7 @@ [metabase.models.permissions-group-membership :refer [PermissionsGroupMembership]] [metabase.models.user :refer [User]] [metabase.public-settings :as public-settings] - [metabase.public-settings.metastore-test :as metastore-test] + [metabase.public-settings.premium-features-test :as premium-features-test] [metabase.server.middleware.session :as mw.session] [metabase.test :as mt] [metabase.test.fixtures :as fixtures] @@ -32,11 +32,11 @@ (use-fixtures :each disable-other-sso-types) -(defmacro with-valid-metastore-token - "Stubs the `metastore/enable-sso?` function to simulate a valid token. This needs to be included to test any of the +(defmacro with-valid-premium-features-token + "Stubs the `premium-features/enable-sso?` function to simulate a valid token. This needs to be included to test any of the SSO features" [& body] - `(metastore-test/with-metastore-token-features #{:sso} + `(premium-features-test/with-premium-features #{:sso} ~@body)) (defn client @@ -107,26 +107,26 @@ g9oYBkdxlhK9zZvkjCgaLCen+0aY67A=") (testing "make sure our test certificate is actually valid" (is (some? (#'sso-settings/validate-saml-idp-cert default-idp-cert))))) -(deftest require-valid-metastore-token-test - (testing "SSO requests fail if they don't have a valid metastore token" - (metastore-test/with-metastore-token-features #{} +(deftest require-valid-premium-features-token-test + (testing "SSO requests fail if they don't have a valid premium-features token" + (premium-features-test/with-premium-features #{} (is (= "SSO requires a valid token" (client :get 403 "/auth/sso")))))) (deftest require-saml-enabled-test (testing "SSO requests fail if SAML hasn't been enabled" - (with-valid-metastore-token + (with-valid-premium-features-token (mt/with-temporary-setting-values [saml-enabled false] (is (some? (client :get 400 "/auth/sso")))))) (testing "SSO requests fail if SAML is enabled but hasn't been configured" - (with-valid-metastore-token + (with-valid-premium-features-token (mt/with-temporary-setting-values [saml-enabled true saml-identity-provider-uri nil] (is (some? (client :get 400 "/auth/sso")))))) (testing "The IDP provider certificate must also be included for SSO to be configured" - (with-valid-metastore-token + (with-valid-premium-features-token (mt/with-temporary-setting-values [saml-enabled true saml-identity-provider-uri default-idp-uri saml-identity-provider-certificate nil] @@ -148,7 +148,7 @@ g9oYBkdxlhK9zZvkjCgaLCen+0aY67A=") (u/ignore-exceptions (db/update-where! User {} :login_attributes nil))))) (defmacro ^:private with-saml-default-setup [& body] - `(with-valid-metastore-token + `(with-valid-premium-features-token (call-with-login-attributes-cleared! (fn [] (call-with-default-saml-config diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_config/index.js b/enterprise/frontend/src/metabase-enterprise/advanced_config/index.js new file mode 100644 index 000000000000..bad7e1d117b3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_config/index.js @@ -0,0 +1,23 @@ +import { t } from "ttag"; +import _ from "underscore"; +import { updateIn } from "icepick"; +import { PLUGIN_ADMIN_SETTINGS_UPDATES } from "metabase/plugins"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; + +if (hasPremiumFeature("advanced_config")) { + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => + updateIn(sections, ["general", "settings"], settings => { + const index = settings.findIndex(({ key }) => key === "admin-email"); + + return [ + ..._.head(settings, index + 1), + { + key: "subscription-allowed-domains", + display_name: t`Approved domains for notifications`, + type: "string", + }, + ..._.tail(settings, index + 1), + ]; + }), + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.jsx b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.jsx new file mode 100644 index 000000000000..e9661f5cdbe5 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.jsx @@ -0,0 +1,66 @@ +import React from "react"; +import { t, jt } from "ttag"; + +import { color } from "metabase/lib/colors"; +import MetabaseSettings from "metabase/lib/settings"; + +import { + PermissionIcon, + DataPermissionsHelpRoot, + DataPermissionsHelpFooter, + DataPermissionsHelpContent, + DataPermissionsHelpLink, + DataPermissionsHelpLinkIcon, +} from "./DataPermissionsHelp.styled"; + +export const DataPermissionsHelp = () => ( + + +

{t`About data permissions`}

+

{t`Each of your user groups can have a level of access for each of your databases on the tables they contain.`}

+

{jt`Users can be members of multiple groups, and are given the ${( + {t`most permissive`} + )} level of access for a database or table across all the groups they’re a member of.`}

+

{t`Unless a user group's access for a given database is set to “block", they’ll be able to view any saved question based on that data if they have access to the collection it’s saved in.`}

+

{t`Access levels`}

+ +

+ + {t`Unrestricted access`} +

+

{t`Users can use the visual query builder to ask questions based on all tables in this database. A user group must have Unrestricted access for a database if you want to give them access to the SQL/native query editor.`}

+ +

+ + {t`Granular access`} +

+

{t`Restrict user access to specific tables in a database. When you select this option, you’ll be taken to the table-level view of that database to set the access level for each table.`}

+ +

+ + {t`No self-service access`} +

+

{t`Prevent users from creating new ad hoc queries or questions based on this data, or from seeing this data in the Browse Data screen. Users with this level of access can still see saved questions and charts based on this data in Collections they have access to.`}

+ +

+ + {t`Block`} +

+

{t`Ensure users can’t ever see the data from this database regardless of their permissions at the Collection level. Keep in mind that if a user belongs to another group that does have data access, that setting will take precedence, and the user's access will not be blocked.`}

+ +

{t`Only available in certain Metabase plans.`}

+
+ + + + + {t`Learn more about permissions`} + + +
+); diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.styled.jsx b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.styled.jsx new file mode 100644 index 000000000000..c6fcb81d37a6 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp.styled.jsx @@ -0,0 +1,71 @@ +import styled from "styled-components"; +import Icon from "metabase/components/Icon"; +import { color, lighten } from "metabase/lib/colors"; +import ExternalLink from "metabase/components/ExternalLink"; + +export const DataPermissionsHelpRoot = styled.div` + h2 { + margin-top: 2rem; + margin-bottom: 1rem; + font-size: 18px; + line-height: 20px; + + &:first-of-type { + margin-top: 8px; + } + } + + h3 { + margin-top: 1.5rem; + font-size: 14px; + line-height: 20px; + } + + h2 + h3 { + margin-top: 1rem; + } + + p { + font-size: 13px; + line-height: 18px; + margin: 0.5rem 0; + } +`; + +export const PermissionIcon = styled(Icon).attrs({ size: 16 })` + padding-right: 0.375rem; + vertical-align: text-bottom; + color: ${props => color(props.color)}; +`; + +export const DataPermissionsHelpContent = styled.div` + padding: 1rem 2rem; +`; + +export const DataPermissionsHelpFooter = styled.footer` + padding: 2rem; + border-top: 1px solid ${color("border")}; +`; + +export const DataPermissionsHelpLink = styled(ExternalLink)` + display: flex; + align-items: center; + padding: 16px 24px; + font-size: 14px; + font-weight: 700; + line-height: 20px; + color: ${color("text-dark")}; + border: 1px solid ${color("border")}; + border-radius: 8px; + transition: all 200ms; + + &:hover { + border-color: ${color("brand")}; + background-color: ${lighten("brand", 0.6)}; + } +`; + +export const DataPermissionsHelpLinkIcon = styled(Icon)` + color: ${color("text-light")}; + margin-right: 1rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/advanced_permissions/index.js b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/index.js new file mode 100644 index 000000000000..2e5daf30cdae --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/advanced_permissions/index.js @@ -0,0 +1,30 @@ +import { t } from "ttag"; + +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import { DataPermissionsHelp } from "metabase-enterprise/advanced_permissions/components/DataPermissionsHelp/DataPermissionsHelp"; +import { PLUGIN_ADVANCED_PERMISSIONS } from "metabase/plugins"; + +const BLOCK_PERMISSION_OPTION = { + label: t`Block`, + value: "block", + icon: "close", + iconColor: "danger", +}; + +if (hasPremiumFeature("advanced_permissions")) { + PLUGIN_ADVANCED_PERMISSIONS.DataPermissionsHelp = DataPermissionsHelp; + + const addBlockPermissionWhenSelected = (options, value) => + value === BLOCK_PERMISSION_OPTION.value + ? [...options, BLOCK_PERMISSION_OPTION] + : options; + + PLUGIN_ADVANCED_PERMISSIONS.addTablePermissionOptions = addBlockPermissionWhenSelected; + PLUGIN_ADVANCED_PERMISSIONS.addSchemaPermissionOptions = addBlockPermissionWhenSelected; + PLUGIN_ADVANCED_PERMISSIONS.addDatabasePermissionOptions = options => [ + ...options, + BLOCK_PERMISSION_OPTION, + ]; + PLUGIN_ADVANCED_PERMISSIONS.isBlockPermission = value => + value === BLOCK_PERMISSION_OPTION.value; +} diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx index 776fad5a170c..dfce219eb80a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.jsx @@ -3,39 +3,50 @@ import React from "react"; import Radio from "metabase/components/Radio"; +import { + AuditContentRoot, + AuditContentHeading, + AuditContentTabs, + AuditContentData, +} from "./AuditContent.styled"; + export default class AuditContent extends React.Component { render() { const { title, subtitle, tabs, children, location, ...props } = this.props; // HACK: remove the last component to get the base page path. won't work with tabs using IndexRoute (IndexRedirect ok) const pagePath = location && location.pathname.replace(/\/\w+$/, ""); + + const hasHeading = title || subtitle; return ( -
-
-

{title}

- {subtitle &&
{subtitle}
} -
+ + {hasHeading && ( + + {title &&

{title}

} + {subtitle &&
{subtitle}
} +
+ )} {tabs && ( -
+ tab.component)} // hide tabs that aren't implemented + options={tabs.filter(tab => tab.component)} // hide tabs that are not implemented optionValueFn={tab => `${pagePath}/${tab.path}`} optionNameFn={tab => tab.title} optionKeyFn={tab => tab.path} onChange={this.props.router.push} /> -
+ )} -
+ {/* This allows the parent component to inject props into child route components, e.x. userId */} {React.Children.count(children) === 1 && // NOTE: workaround for https://github.com/facebook/react/issues/12136 !Array.isArray(children) ? React.cloneElement(React.Children.only(children), props) : children} -
-
+ + ); } } diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.styled.jsx new file mode 100644 index 000000000000..2c78eb91b0e3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditContent.styled.jsx @@ -0,0 +1,23 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; + +export const AuditContentRoot = styled.div` + flex: 1 0 auto; + flex-direction: column; + padding-bottom: 2rem; +`; + +export const AuditContentHeading = styled.div` + padding: 2rem 2rem 0 2rem; +`; + +export const AuditContentTabs = styled.div` + border-bottom: 1px solid ${color("border")}; + padding: 0 2rem; + margin-top: 0.5rem; +`; + +export const AuditContentData = styled.div` + height: 100%; + padding: 0 2rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.jsx new file mode 100644 index 000000000000..f4a6503e2b04 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.jsx @@ -0,0 +1,95 @@ +import React, { useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import { formatChannels } from "metabase/lib/notifications"; +import Button from "metabase/components/Button"; +import CheckBox from "metabase/components/CheckBox"; +import FormMessage from "metabase/components/form/FormMessage"; +import ModalContent from "metabase/components/ModalContent"; +import { CheckboxLabel } from "./AuditNotificationDeleteModal.styled"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + onDelete: PropTypes.func, + onClose: PropTypes.func, +}; + +const AuditNotificationDeleteModal = ({ item, type, onDelete, onClose }) => { + const [error, setError] = useState(); + const [checked, setChecked] = useState(false); + + const handleDeleteClick = async () => { + try { + await onDelete(item, true); + onClose(true); + } catch (error) { + setError(error); + } + }; + + const handleCheckedChange = event => { + setChecked(event.target.checked); + }; + + const handleClose = () => onClose(true); + + const modalFooter = [ + error ? : null, + , + , + ]; + const checkboxLabel = ( + {getChannelMessage(item, type)} + ); + + return ( + + + + ); +}; + +AuditNotificationDeleteModal.propTypes = propTypes; + +const getTitleMessage = (item, type) => { + switch (type) { + case "alert": + return t`Delete this alert?`; + case "pulse": + return t`Delete this subscription to ${item.name}?`; + } +}; + +const getChannelMessage = (item, type) => { + const channelMessage = formatChannels(item.channels); + + switch (type) { + case "alert": + return t`This alert will no longer be ${channelMessage}.`; + case "pulse": + return t`This dashboard will no longer be ${channelMessage}.`; + } +}; + +export default AuditNotificationDeleteModal; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.styled.jsx new file mode 100644 index 000000000000..d87da339081a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/AuditNotificationDeleteModal.styled.jsx @@ -0,0 +1,8 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import CheckBox from "metabase/components/CheckBox"; + +export const CheckboxLabel = styled(CheckBox.Label)` + color: ${color("danger")}; + font-size: 1.12em; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/index.js new file mode 100644 index 000000000000..512ebf74457c --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationDeleteModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditNotificationDeleteModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.jsx new file mode 100644 index 000000000000..e53fc697a4c4 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.jsx @@ -0,0 +1,107 @@ +import React, { useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Button from "metabase/components/Button"; +import FormMessage from "metabase/components/form/FormMessage"; +import ModalContent from "metabase/components/ModalContent"; +import UserPicker from "metabase/components/UserPicker"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + users: PropTypes.array.isRequired, + onUpdate: PropTypes.func, + onDelete: PropTypes.func, + onClose: PropTypes.func, +}; + +const AuditNotificationEditModal = ({ + item, + type, + users, + onUpdate, + onDelete, + onClose, +}) => { + const [channels, setChannels] = useState(item.channels); + const [error, setError] = useState(); + const hasRecipients = channels.some(c => c.recipients.length > 0); + + const handleRecipientsChange = (recipients, index) => { + const newChannels = [...channels]; + newChannels[index] = { ...channels[index], recipients }; + setChannels(newChannels); + }; + + const handleUpdateClick = async () => { + try { + await onUpdate(item, channels); + onClose(true); + } catch (error) { + setError(error); + } + }; + + const handleDeleteClick = () => { + onDelete(item); + }; + + const handleClose = () => onClose(true); + + const modalFooter = [ + error ? : null, + , + , + , + ]; + + return ( + + {channels.map((channel, index) => ( + handleRecipientsChange(recipients, index)} + /> + ))} + + ); +}; + +AuditNotificationEditModal.propTypes = propTypes; + +const getTitleMessage = (item, type) => { + switch (type) { + case "alert": + return t`${item.card.name} alert recipients`; + case "pulse": + return t`${item.name} recipients`; + } +}; + +const getDeleteMessage = type => { + switch (type) { + case "alert": + return t`Delete this alert`; + case "pulse": + return t`Delete this subscription`; + } +}; + +export default AuditNotificationEditModal; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.styled.jsx new file mode 100644 index 000000000000..d1e323f6d074 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/AuditNotificationEditModal.styled.jsx @@ -0,0 +1,11 @@ +import styled from "styled-components"; +import Button from "metabase/components/Button"; +import { space } from "metabase/styled-components/theme"; + +export const ModalButton = styled(Button)` + margin-right: ${({ fullwidth }) => (fullwidth ? "auto" : "")}; + + &:not(:first-child) { + margin-left: ${space(2)}; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/index.js new file mode 100644 index 000000000000..ea5d8983572e --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditNotificationEditModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditNotificationEditModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx index 101a28c09192..8ae3681ac29a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.jsx @@ -1,7 +1,10 @@ import React from "react"; import PropTypes from "prop-types"; +import Button from "metabase/components/Button"; + import _ from "underscore"; +import { AuditParametersInput } from "./AuditParameters.styled"; const DEBOUNCE_PERIOD = 300; @@ -12,7 +15,15 @@ const propTypes = { placeholder: PropTypes.string.isRequired, }), ), + buttons: PropTypes.arrayOf( + PropTypes.shape({ + key: PropTypes.string.isRequired, + onClick: PropTypes.func.isRequired, + label: PropTypes.string.isRequired, + }), + ), children: PropTypes.func, + hasResults: PropTypes.bool, }; export default class AuditParameters extends React.Component { @@ -24,37 +35,55 @@ export default class AuditParameters extends React.Component { }; } - changeValue = (key: string, value: string) => { + changeValue = (key, value) => { this.setState({ inputValues: { ...this.state.inputValues, [key]: value }, }); this.commitValueDebounced(key, value); }; - commitValueDebounced = _.debounce((key: string, value: string) => { + commitValueDebounced = _.debounce((key, value) => { this.setState({ committedValues: { ...this.state.committedValues, [key]: value }, }); }, DEBOUNCE_PERIOD); render() { - const { parameters, children } = this.props; + const { parameters, children, buttons, hasResults } = this.props; const { inputValues, committedValues } = this.state; + + const isEmpty = + hasResults === false && + inputValues && + Object.values(inputValues).every(v => v === ""); + return (
- {parameters.map(({ key, placeholder }) => ( - ( + { - this.changeValue(key, e.target.value); + disabled={isEmpty || disabled} + onChange={value => { + this.changeValue(key, value); }} + icon={icon} /> ))} + {buttons?.map(({ key, label, disabled, onClick }) => ( + + ))}
{children && children(committedValues)}
diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.styled.jsx new file mode 100644 index 000000000000..5186c316d8d1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditParameters.styled.jsx @@ -0,0 +1,12 @@ +import styled from "styled-components"; + +import TextInput from "metabase/components/TextInput"; + +export const AuditParametersInput = styled(TextInput)` + display: inline-flex; + width: 240px; + + & + & { + margin-left: 1rem; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx index 1d6f1556ca9d..c5169e8b50c1 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/AuditSidebar.jsx @@ -1,5 +1,6 @@ /* eslint-disable react/prop-types */ import React from "react"; +import { t } from "ttag"; import { IndexLink } from "react-router"; import Link from "metabase/components/Link"; @@ -54,21 +55,22 @@ const AuditSidebar = ({ className, style, children }: Props) => ( const AuditAppSidebar = (props: Props) => ( - {/* - - */} - - + + - - - - + + + + - - - - + + + + + ); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.jsx new file mode 100644 index 000000000000..dd7c5cde4383 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.jsx @@ -0,0 +1,53 @@ +import React, { useCallback, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import FormMessage from "metabase/components/form/FormMessage"; +import { ModalMessage } from "./UnsubscribeUserForm.styled"; + +const propTypes = { + user: PropTypes.object.isRequired, + onUnsubscribe: PropTypes.func, + onClose: PropTypes.func, +}; + +const UnsubscribeUserForm = ({ user, onUnsubscribe, onClose }) => { + const [error, setError] = useState(); + + const handleConfirmClick = useCallback(async () => { + try { + await onUnsubscribe(user); + onClose(); + } catch (error) { + setError(error); + } + }, [user, onUnsubscribe, onClose]); + + return ( + : null, + , + , + ]} + onClose={onClose} + > + + {t`This will delete any dashboard subscriptions or alerts ${user.common_name} has created, and remove them as a recipient from any other subscriptions or alerts.`} + + + {t`This does not affect email distribution lists that are managed outside of Metabase.`} + + + ); +}; + +UnsubscribeUserForm.propTypes = propTypes; + +export default UnsubscribeUserForm; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.styled.jsx new file mode 100644 index 000000000000..748448f0e643 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; + +export const ModalMessage = styled.div` + &:not(:last-child) { + margin-bottom: 1rem; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.unit.spec.js new file mode 100644 index 000000000000..1844a95fe400 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/UnsubscribeUserForm.unit.spec.js @@ -0,0 +1,54 @@ +import React from "react"; +import { render, screen, waitFor } from "@testing-library/react"; +import UnsubscribeUserForm from "./UnsubscribeUserForm"; + +const getUser = () => ({ + id: 1, + common_name: "John Doe", +}); + +describe("UnsubscribeUserForm", () => { + it("should close on successful submit", () => { + const user = getUser(); + const onUnsubscribe = jest.fn().mockResolvedValue(); + const onClose = jest.fn(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalled(); + expect(onClose).toHaveBeenCalled(); + }); + }); + + it("should display a message on submit failure", () => { + const user = getUser(); + const error = { data: { message: "error" } }; + const onUnsubscribe = jest.fn().mockRejectedValue(); + const onClose = jest.fn(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalled(); + expect(onClose).not.toHaveBeenCalled(); + expect(screen.getByText(error.data.message)).toBeInTheDocument(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/index.js new file mode 100644 index 000000000000..2d548055fc84 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/components/UnsubscribeUserForm/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeUserForm"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/AuditAlertDeleteModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/AuditAlertDeleteModal.jsx new file mode 100644 index 000000000000..0bb4328aff24 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/AuditAlertDeleteModal.jsx @@ -0,0 +1,23 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import AuditNotificationDeleteModal from "../../components/AuditNotificationDeleteModal"; + +const mapStateToProps = (state, { alert }) => ({ + item: alert, + type: "alert", +}); + +const mapDispatchToProps = { + onDelete: alert => Alerts.actions.setArchived(alert, true), +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => Number.parseInt(props.params.alertId), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationDeleteModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/index.js new file mode 100644 index 000000000000..234e762b8d3a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertDeleteModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditAlertDeleteModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/AuditAlertEditModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/AuditAlertEditModal.jsx new file mode 100644 index 000000000000..11bf8424aa7b --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/AuditAlertEditModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import Users from "metabase/entities/users"; +import AuditNotificationEditModal from "../../components/AuditNotificationEditModal"; + +const mapStateToProps = (state, { alert }) => ({ + item: alert, + type: "alert", +}); + +const mapDispatchToProps = { + onUpdate: (alert, channels) => Alerts.actions.setChannels(alert, channels), + onDelete: alert => + push(`/admin/audit/subscriptions/alerts/${alert.id}/delete`), +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => Number.parseInt(props.params.alertId), + }), + Users.loadList(), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationEditModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/index.js new file mode 100644 index 000000000000..0e165a7bf2dd --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertEditModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditAlertEditModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/AuditAlertTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/AuditAlertTable.jsx new file mode 100644 index 000000000000..4da7ab4b7fa4 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/AuditAlertTable.jsx @@ -0,0 +1,22 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { t } from "ttag"; +import { AuditEntitiesTable } from "../AuditEntitiesTable"; +import * as AlertCards from "../../lib/cards/alerts"; + +const mapStateToProps = (state, props) => ({ + table: AlertCards.table(), + placeholder: t`Filter by question name`, + getExtraDataForClick: () => ({ type: "alert" }), + entities: state.entities.alerts, +}); + +const mapDispatchToProps = { + onRemoveRow: ({ pulse_id }) => + push(`/admin/audit/subscriptions/alerts/${pulse_id}/delete`), +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(AuditEntitiesTable); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/index.js new file mode 100644 index 000000000000..93ec06ea0a13 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditAlertTable/index.js @@ -0,0 +1 @@ +export { default } from "./AuditAlertTable"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx index d222bfd13eeb..e3c342828afe 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditDashboard.jsx @@ -9,7 +9,7 @@ import DashboardData from "metabase/dashboard/hoc/DashboardData"; const DashboardWithData = DashboardData(Dashboard); -import { AuditMode } from "../lib/util"; +import { AuditMode } from "../lib/mode"; import type { AuditCard } from "../types"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/AuditEntitiesTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/AuditEntitiesTable.jsx new file mode 100644 index 000000000000..467ca81329cc --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/AuditEntitiesTable.jsx @@ -0,0 +1,22 @@ +import React from "react"; +import PropTypes from "prop-types"; +import _ from "underscore"; + +import AuditTableWithSearch from "../AuditTableWithSearch"; +import { usePrevious } from "metabase/hooks/use-previous"; + +const propTypes = { + entities: PropTypes.array, +}; + +export const AuditEntitiesTable = ({ entities, ...rest }) => { + const previousEntities = usePrevious(entities); + + const shouldReload = + previousEntities?.length === entities?.length && + !_.isEqual(previousEntities, entities); + + return ; +}; + +AuditEntitiesTable.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/index.js new file mode 100644 index 000000000000..c125725c068e --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditEntitiesTable/index.js @@ -0,0 +1 @@ +export * from "./AuditEntitiesTable"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/AuditSubscriptionDeleteModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/AuditSubscriptionDeleteModal.jsx new file mode 100644 index 000000000000..d5eee26ba500 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/AuditSubscriptionDeleteModal.jsx @@ -0,0 +1,23 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import AuditNotificationDeleteModal from "../../components/AuditNotificationDeleteModal"; + +const mapStateToProps = (state, { pulse }) => ({ + item: pulse, + type: "alert", +}); + +const mapDispatchToProps = { + onDelete: pulse => Pulses.actions.setArchived(pulse, true), +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => Number.parseInt(props.params.pulseId), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationDeleteModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/index.js new file mode 100644 index 000000000000..48113ab0e9eb --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionDeleteModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptionDeleteModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/AuditSubscriptionEditModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/AuditSubscriptionEditModal.jsx new file mode 100644 index 000000000000..9598a0963c86 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/AuditSubscriptionEditModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import Users from "metabase/entities/users"; +import AuditNotificationEditModal from "../../components/AuditNotificationEditModal"; + +const mapStateToProps = (state, { pulse }) => ({ + item: pulse, + type: "pulse", +}); + +const mapDispatchToProps = { + onUpdate: (pulse, channels) => Pulses.actions.setChannels(pulse, channels), + onDelete: alert => + push(`/admin/audit/subscriptions/subscriptions/${alert.id}/delete`), +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => Number.parseInt(props.params.pulseId), + }), + Users.loadList(), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(AuditNotificationEditModal); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/index.js new file mode 100644 index 000000000000..995c595d11ef --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionEditModal/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptionEditModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/AuditSubscriptionTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/AuditSubscriptionTable.jsx new file mode 100644 index 000000000000..132b5d5e7040 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/AuditSubscriptionTable.jsx @@ -0,0 +1,22 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { t } from "ttag"; +import * as SubscriptionCards from "../../lib/cards/subscriptions"; +import { AuditEntitiesTable } from "../AuditEntitiesTable"; + +const mapStateToProps = (state, props) => ({ + table: SubscriptionCards.table(), + placeholder: t`Filter by dashboard name`, + getExtraDataForClick: () => ({ type: "subscription" }), + entities: state.entities.pulses, +}); + +const mapDispatchToProps = { + onRemoveRow: ({ pulse_id }) => + push(`/admin/audit/subscriptions/subscriptions/${pulse_id}/delete`), +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(AuditEntitiesTable); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/index.js new file mode 100644 index 000000000000..e0162dadb3b0 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditSubscriptionTable/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptionTable"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx index 1c8f744d1755..4312d39cc120 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.jsx @@ -12,37 +12,49 @@ import Question from "metabase-lib/lib/Question"; import { getMetadata } from "metabase/selectors/metadata"; import { usePagination } from "metabase/hooks/use-pagination"; -import { AuditMode } from "../lib/util"; +import { AuditMode } from "../lib/mode"; import QuestionLoadAndDisplay from "./QuestionLoadAndDisplay"; import "./AuditTableVisualization"; +import { PaginationControlsContainer } from "./AuditTable.styled"; const mapStateToProps = state => ({ metadata: getMetadata(state), }); -const mapDispatchToProps = { - onChangeLocation: push, -}; - const DEFAULT_PAGE_SIZE = 100; AuditTable.propTypes = { metadata: PropTypes.object.isRequired, table: PropTypes.object.isRequired, - onChangeLocation: PropTypes.func.isRequired, pageSize: PropTypes.number.isRequired, + reload: PropTypes.bool, + children: PropTypes.node, + dispatch: PropTypes.func.isRequired, + onLoad: PropTypes.func, + mode: PropTypes.shape({ + name: PropTypes.string.isRequired, + drills: PropTypes.func.isRequired, + }), }; function AuditTable({ metadata, table, - onChangeLocation, pageSize = DEFAULT_PAGE_SIZE, + mode = AuditMode, + children, + dispatch, + onLoad, ...rest }) { const [loadedCount, setLoadedCount] = useState(0); const { handleNextPage, handlePreviousPage, page } = usePagination(); + const handleOnLoad = results => { + setLoadedCount(results[0].row_count); + onLoad(results); + }; + const card = chain(table.card) .assoc("display", "audit-table") .assocIn(["dataset_query", "limit"], pageSize) @@ -51,6 +63,7 @@ function AuditTable({ const question = new Question(card, metadata); const shouldShowPagination = page > 0 || loadedCount === pageSize; + const handleChangeLocation = url => dispatch(push(url)); return (
@@ -59,13 +72,14 @@ function AuditTable({ className="mt3" question={question} metadata={metadata} - mode={AuditMode} - onChangeLocation={onChangeLocation} + mode={mode} + onChangeLocation={handleChangeLocation} onChangeCardAndRun={() => {}} - onLoad={results => setLoadedCount(results[0].row_count)} + onLoad={handleOnLoad} + dispatch={dispatch} {...rest} /> -
+ {shouldShowPagination && ( )} -
+ + {children}
); } -export default _.compose( - connect( - mapStateToProps, - mapDispatchToProps, - ), -)(AuditTable); +export default _.compose(connect(mapStateToProps))(AuditTable); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.styled.jsx new file mode 100644 index 000000000000..18408a6e6a2b --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTable.styled.jsx @@ -0,0 +1,9 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; + +export const PaginationControlsContainer = styled.div` + display: flex; + justify-content: flex-end; + padding-top: 1rem; + border-top: 1px solid ${color("border")}; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx index de646a03035e..604bad334902 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.jsx @@ -1,5 +1,8 @@ import React from "react"; import PropTypes from "prop-types"; +import { t } from "ttag"; +import _ from "underscore"; +import cx from "classnames"; import { registerVisualization } from "metabase/visualizations/index"; @@ -10,29 +13,32 @@ import Table from "metabase/visualizations/visualizations/Table"; import EmptyState from "metabase/components/EmptyState"; import Icon from "metabase/components/Icon"; +import CheckBox from "metabase/components/CheckBox"; +import { RemoveRowButton } from "./AuditTableVisualization.styled"; +import { getRowValuesByColumns, getColumnName } from "../lib/mode"; import NoResults from "assets/img/no_results.svg"; -import { t } from "ttag"; - -import _ from "underscore"; -import cx from "classnames"; - -const getColumnName = column => column.remapped_to || column.name; - const propTypes = { series: PropTypes.array, visualizationIsClickable: PropTypes.func, onVisualizationClick: PropTypes.func, onSortingChange: PropTypes.func, + onRemoveRow: PropTypes.func, settings: PropTypes.object, isSortable: PropTypes.bool, sorting: PropTypes.shape({ column: PropTypes.string.isRequired, isAscending: PropTypes.bool.isRequired, }), + isSelectable: PropTypes.bool, + rowChecked: PropTypes.object, + onAllSelectClick: PropTypes.func, + onRowSelectClick: PropTypes.func, }; +const ROW_ID_IDX = 0; + export default class AuditTableVisualization extends React.Component { static identifier = "audit-table"; static noHeader = true; @@ -42,6 +48,14 @@ export default class AuditTableVisualization extends React.Component { static settings = Table.settings; static columnSettings = Table.columnSettings; + state = { + rerender: {}, + }; + + constructor(props) { + super(props); + } + handleColumnHeaderClick = column => { const { isSortable, onSortingChange, sorting } = this.props; @@ -57,6 +71,23 @@ export default class AuditTableVisualization extends React.Component { }); }; + handleAllSelectClick = (e, rows) => { + const { onAllSelectClick } = this.props; + this.setState({ rerender: {} }); + onAllSelectClick({ ...e, rows }); + }; + + handleRowSelectClick = (e, row, rowIndex) => { + const { onRowSelectClick } = this.props; + this.setState({ rerender: {} }); + onRowSelectClick({ ...e, row: row, rowIndex: rowIndex }); + }; + + handleRemoveRowClick = (row, cols) => { + const rowData = getRowValuesByColumns(row, cols); + this.props.onRemoveRow(rowData); + }; + render() { const { series: [ @@ -69,8 +100,12 @@ export default class AuditTableVisualization extends React.Component { onVisualizationClick, settings, isSortable, + isSelectable, + rowChecked, + onRemoveRow, } = this.props; + const canRemoveRows = !!onRemoveRow; const columnIndexes = settings["table.columns"] .filter(({ enabled }) => enabled) .map(({ name }) => _.findIndex(cols, col => col.name === name)); @@ -83,11 +118,18 @@ export default class AuditTableVisualization extends React.Component { /> ); } - return ( + {isSelectable && ( + + )} {columnIndexes.map(colIndex => { const column = cols[colIndex]; const isSortedByColumn = @@ -119,12 +161,30 @@ export default class AuditTableVisualization extends React.Component { {rows.map((row, rowIndex) => ( + {isSelectable && ( + + )} + {columnIndexes.map(colIndex => { const value = row[colIndex]; const column = cols[colIndex]; const clicked = { column, value, origin: { row, cols } }; const clickable = visualizationIsClickable(clicked); - const columnSettings = settings.column(column); + const columnSettings = { + ...settings.column(column), + ...settings["table.columns"][colIndex], + }; return ( ); })} + + {canRemoveRows && ( + + )} ))} diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.styled.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.styled.jsx new file mode 100644 index 000000000000..56a93fddf0ad --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableVisualization.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import IconButtonWrapper from "metabase/components/IconButtonWrapper"; + +export const RemoveRowButton = styled(IconButtonWrapper)` + color: ${color("text-light")}; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx index c0f929c6130d..31452a94483b 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/AuditTableWithSearch.jsx @@ -1,4 +1,7 @@ import React from "react"; +import PropTypes from "prop-types"; + +import Icon from "metabase/components/Icon"; import AuditTable from "./AuditTable"; import AuditParameters from "../components/AuditParameters"; @@ -6,20 +9,18 @@ import AuditParameters from "../components/AuditParameters"; import { t } from "ttag"; import { updateIn } from "icepick"; -import type { AuditDashCard } from "../types"; - -type Props = { - placeholder?: string, - table: AuditDashCard, +const propTypes = { + placeholder: PropTypes.string, + table: PropTypes.object, }; // AuditTable but with a default search parameter that gets appended to `args` -const AuditTableWithSearch = ({ - placeholder = t`Search`, - table, - ...props -}: Props) => ( - +const AuditTableWithSearch = ({ placeholder = t`Search`, table, ...props }) => ( + }, + ]} + > {({ search }) => ( ); +AuditTableWithSearch.propTypes = propTypes; + export default AuditTableWithSearch; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx index 6666eabf8631..df1de4e1fedb 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/QuestionLoadAndDisplay.jsx @@ -1,34 +1,57 @@ -/* eslint-disable react/prop-types */ -import React from "react"; - +import React, { useEffect, useRef, useImperativeHandle } from "react"; +import PropTypes from "prop-types"; import QuestionResultLoader from "metabase/containers/QuestionResultLoader"; import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper"; import Visualization from "metabase/visualizations/components/Visualization"; +const propTypes = { + question: PropTypes.object, + keepPreviousWhileLoading: PropTypes.bool, + reload: PropTypes.bool, + onLoad: PropTypes.func, + reloadRef: PropTypes.shape({ current: PropTypes.func }), +}; + const QuestionLoadAndDisplay = ({ question, - onLoad, keepPreviousWhileLoading, + reload, + onLoad, + reloadRef, ...props -}) => ( - - {({ loading, error, ...resultProps }) => { - const shouldShowLoader = loading && resultProps.results == null; - return ( - - - - ); - }} - -); +}) => { + const reloadFnRef = useRef(null); + + useImperativeHandle(reloadRef, () => () => reloadFnRef.current?.()); + + useEffect(() => { + reload && reloadFnRef.current?.(); + }, [reload]); + + return ( + + {({ loading, error, reload, ...resultProps }) => { + const shouldShowLoader = loading && resultProps.results == null; + reloadFnRef.current = reload; + + return ( + + + + ); + }} + + ); +}; + +QuestionLoadAndDisplay.propTypes = propTypes; export default QuestionLoadAndDisplay; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/UnsubscribeUserModal.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/UnsubscribeUserModal.jsx new file mode 100644 index 000000000000..3303781ef94c --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/UnsubscribeUserModal.jsx @@ -0,0 +1,24 @@ +import { connect } from "react-redux"; +import { t } from "ttag"; +import _ from "underscore"; +import Users from "metabase/entities/users"; +import { addUndo } from "metabase/redux/undo"; +import { AuditApi } from "../../lib/services"; +import UnsubscribeUserForm from "../../components/UnsubscribeUserForm"; + +const mapDispatchToProps = dispatch => ({ + onUnsubscribe: async ({ id }) => { + await AuditApi.unsubscribe_user({ id }); + dispatch(addUndo({ message: t`Unsubscribe successful` })); + }, +}); + +export default _.compose( + Users.load({ + id: (state, props) => Number.parseInt(props.params.userId), + }), + connect( + null, + mapDispatchToProps, + ), +)(UnsubscribeUserForm); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/index.js new file mode 100644 index 000000000000..5f15a735bebd --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/containers/UnsubscribeUserModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeUserModal"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/index.js index 884359248e15..774278101093 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/index.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/index.js @@ -1,11 +1,23 @@ -import { PLUGIN_ADMIN_NAV_ITEMS, PLUGIN_ADMIN_ROUTES } from "metabase/plugins"; - -import { hasPremiumFeature } from "metabase-enterprise/settings"; import { t } from "ttag"; - -import getAuditRoutes from "./routes"; +import { + PLUGIN_ADMIN_NAV_ITEMS, + PLUGIN_ADMIN_ROUTES, + PLUGIN_ADMIN_USER_MENU_ITEMS, + PLUGIN_ADMIN_USER_MENU_ROUTES, +} from "metabase/plugins"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import getAuditRoutes, { getUserMenuRotes } from "./routes"; if (hasPremiumFeature("audit_app")) { PLUGIN_ADMIN_NAV_ITEMS.push({ name: t`Audit`, path: "/admin/audit" }); PLUGIN_ADMIN_ROUTES.push(getAuditRoutes); + + PLUGIN_ADMIN_USER_MENU_ITEMS.push(user => [ + { + title: t`Unsubscribe from all subscriptions / alerts`, + link: `/admin/people/${user.id}/unsubscribe`, + }, + ]); + + PLUGIN_ADMIN_USER_MENU_ROUTES.push(getUserMenuRotes); } diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/alerts.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/alerts.js new file mode 100644 index 000000000000..226063eddb3a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/alerts.js @@ -0,0 +1,27 @@ +export const table = () => ({ + card: { + name: "Alerts", + display: "table", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.alerts/table", + args: [], + }, + visualization_settings: { + "table.columns": [ + { name: "card_id", enabled: true }, + { name: "pulse_id", enabled: false }, + { name: "recipients", enabled: true }, + { name: "subscription_type", enabled: true }, + { name: "collection_id", enabled: true }, + { name: "frequency", enabled: true }, + { name: "creator_id", enabled: true }, + { + name: "created_at", + enabled: true, + date_format: "M/D/YYYY", + }, + ], + }, + }, +}); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js index a37c8ac9056f..f371582cc196 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboard_detail.js @@ -4,7 +4,7 @@ export const viewsByTime = (dashboardId: number) => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/views-by-time", + fn: "metabase-enterprise.audit-app.pages.dashboard-detail/views-by-time", args: [dashboardId, "day"], // FIXME: should this be automatic? }, }, @@ -16,7 +16,8 @@ export const revisionHistory = (dashboardId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/revision-history", + fn: + "metabase-enterprise.audit-app.pages.dashboard-detail/revision-history", args: [dashboardId], }, visualization_settings: { @@ -36,7 +37,7 @@ export const cards = (dashboardId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/cards", + fn: "metabase-enterprise.audit-app.pages.dashboard-detail/cards", args: [dashboardId], }, }, @@ -48,7 +49,7 @@ export const auditLog = (dashboardId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboard-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.dashboard-detail/audit-log", args: [dashboardId], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js index 43fa6dc454c5..2596f383f9a9 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/dashboards.js @@ -1,23 +1,11 @@ -// DEPRECATED: use `views-and-saves-by-time ` instead. -export const viewsPerDay = () => ({ - card: { - name: "Total dashboard views per day", - display: "line", - dataset_query: { - type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/views-per-day", - args: [], - }, - }, -}); - export const viewsAndSavesByTime = () => ({ card: { name: "Dashboard views and saves per day", display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/views-and-saves-by-time", + fn: + "metabase-enterprise.audit-app.pages.dashboards/views-and-saves-by-time", args: ["day"], }, visualization_settings: { @@ -33,7 +21,7 @@ export const mostPopularAndSpeed = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.dashboards/most-popular-with-avg-speed", + "metabase-enterprise.audit-app.pages.dashboards/most-popular-with-avg-speed", args: [], }, }, @@ -45,7 +33,8 @@ export const mostCommonQuestions = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/most-common-questions", + fn: + "metabase-enterprise.audit-app.pages.dashboards/most-common-questions", args: [], }, }, @@ -57,7 +46,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.dashboards/table", + fn: "metabase-enterprise.audit-app.pages.dashboards/table", args: [], }, visualization_settings: { @@ -67,6 +56,7 @@ export const table = (searchString?: string) => ({ { name: "average_execution_time_ms", enabled: true }, { name: "cards", enabled: true }, { name: "saved_by_id", enabled: true }, + { name: "cache_ttl", enabled: true }, { name: "public_link", enabled: true, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js index 89f9d60d61b4..438987f6365e 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/database_detail.js @@ -4,7 +4,7 @@ export const auditLog = (databaseId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.database-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.database-detail/audit-log", args: [databaseId], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js index 99864486fc86..98cebbc4537c 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/databases.js @@ -5,7 +5,7 @@ export const totalQueryExecutionsByDb = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.databases/total-query-executions-by-db", + "metabase-enterprise.audit-app.pages.databases/total-query-executions-by-db", args: [], }, visualization_settings: { @@ -27,7 +27,7 @@ export const queryExecutionsPerDbPerDay = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.databases/query-executions-per-db-per-day", + "metabase-enterprise.audit-app.pages.databases/query-executions-per-db-per-day", args: [], }, visualization_settings: { @@ -43,7 +43,8 @@ export const queryExecutionsByTime = () => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.databases/query-executions-by-time", + fn: + "metabase-enterprise.audit-app.pages.databases/query-executions-by-time", args: ["day"], }, visualization_settings: { @@ -59,7 +60,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.databases/table", + fn: "metabase-enterprise.audit-app.pages.databases/table", args: searchString ? [searchString] : [], }, visualization_settings: { @@ -68,6 +69,7 @@ export const table = (searchString?: string) => ({ { name: "schemas", enabled: true }, { name: "tables", enabled: true }, { name: "sync_schedule", enabled: true }, + { name: "cache_ttl", enabled: true }, { name: "added_on", enabled: true, date_format: "M/D/YYYY, h:mm A" }, ], }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js index 0475ffc76c8d..87ef2477ddde 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/downloads.js @@ -4,7 +4,7 @@ export const perDayBySize = () => ({ display: "scatter", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/per-day-by-size", + fn: "metabase-enterprise.audit-app.pages.downloads/per-day-by-size", args: [], }, visualization_settings: { @@ -21,7 +21,7 @@ export const perUser = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/per-user", + fn: "metabase-enterprise.audit-app.pages.downloads/per-user", args: [], }, visualization_settings: { @@ -37,7 +37,7 @@ export const bySize = () => ({ display: "bar", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/by-size", + fn: "metabase-enterprise.audit-app.pages.downloads/by-size", args: [], }, }, @@ -49,7 +49,7 @@ export const table = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.downloads/table", + fn: "metabase-enterprise.audit-app.pages.downloads/table", args: [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js index 35580beb0db4..672428eeb2c1 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/queries.js @@ -5,7 +5,7 @@ export const viewsAndAvgExecutionTimeByDay = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.queries/views-and-avg-execution-time-by-day", + "metabase-enterprise.audit-app.pages.queries/views-and-avg-execution-time-by-day", args: [], }, visualization_settings: { @@ -25,7 +25,7 @@ export const mostPopular = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.queries/most-popular", + fn: "metabase-enterprise.audit-app.pages.queries/most-popular", args: [], }, visualization_settings: { @@ -41,7 +41,7 @@ export const slowest = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.queries/slowest", + fn: "metabase-enterprise.audit-app.pages.queries/slowest", args: [], }, visualization_settings: { @@ -51,6 +51,45 @@ export const slowest = () => ({ }, }); +export const bad_table = ( + errorFilter, + dbFilter, + collectionFilter, + sortColumn, + sortDirection, +) => ({ + card: { + name: "Failing Questions", + display: "table", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.queries/bad-table", + args: [ + errorFilter, + dbFilter, + collectionFilter, + sortColumn, + sortDirection, + ], + }, + visualization_settings: { + "table.columns": [ + { name: "card_id", enabled: true }, + { name: "error_substr", enabled: true }, + { name: "collection_id", enabled: true }, + { name: "database_id", enabled: true }, + { name: "schema", enabled: true }, + { name: "table_id", enabled: true }, + { name: "last_run_at", enabled: true }, + { name: "total_runs", enabled: true }, + { name: "num_dashboards", enabled: true }, + { name: "user_id", enabled: true }, + { name: "updated_at", enabled: true }, + ], + }, + }, +}); + export const table = ( questionFilter, collectionFilter, @@ -62,7 +101,7 @@ export const table = ( display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.queries/table", + fn: "metabase-enterprise.audit-app.pages.queries/table", args: [questionFilter, collectionFilter, sortColumn, sortDirection], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js index a2a749904166..23b23a3b3c6a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/query_detail.js @@ -4,7 +4,7 @@ export const details = (queryHash: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.query-detail/details", + fn: "metabase-enterprise.audit-app.pages.query-detail/details", args: [queryHash], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js index 52024fbbfbf3..59b76e5f5f3b 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/question_detail.js @@ -5,7 +5,7 @@ export const viewsByTime = questionId => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.question-detail/cached-views-by-time", + "metabase-enterprise.audit-app.pages.question-detail/cached-views-by-time", args: [questionId, "day"], }, visualization_settings: { @@ -34,7 +34,7 @@ export const averageExecutionTime = questionId => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.question-detail/avg-execution-time-by-time", + "metabase-enterprise.audit-app.pages.question-detail/avg-execution-time-by-time", args: [questionId, "day"], }, }, @@ -46,7 +46,8 @@ export const revisionHistory = (questionId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.question-detail/revision-history", + fn: + "metabase-enterprise.audit-app.pages.question-detail/revision-history", args: [questionId], }, visualization_settings: { @@ -66,13 +67,24 @@ export const auditLog = (questionId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.question-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.question-detail/audit-log", args: [questionId], }, visualization_settings: { "table.columns": [ { name: "user_id", enabled: true }, { name: "when", enabled: true }, + { + name: "what", + enabled: true, + // This needs to combinatorially explore the metadata boolean space: if n grows above 2, replace mustache + markdown_template: ` +{{#json.ignore_cache}}Requested un-cached results{{/json.ignore_cache}} +{{^json.ignore_cache}} +{{#json.cached}}Viewed (cached){{/json.cached}} +{{^json.cached}}Viewed{{/json.cached}} +{{/json.ignore_cache}}`, + }, ], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js index 824d207daa83..57c800c87f64 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/schemas.js @@ -4,7 +4,7 @@ export const mostQueried = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.schemas/most-queried", + fn: "metabase-enterprise.audit-app.pages.schemas/most-queried", args: [], }, }, @@ -16,7 +16,7 @@ export const slowestSchemas = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.schemas/slowest-schemas", + fn: "metabase-enterprise.audit-app.pages.schemas/slowest-schemas", args: [], }, }, @@ -28,7 +28,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.schemas/table", + fn: "metabase-enterprise.audit-app.pages.schemas/table", args: searchString ? [searchString] : [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/subscriptions.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/subscriptions.js new file mode 100644 index 000000000000..cdebb07552a1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/subscriptions.js @@ -0,0 +1,31 @@ +export const table = () => ({ + card: { + name: "Subscriptions", + display: "table", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.dashboard-subscriptions/table", + args: [], + }, + visualization_settings: { + "table.columns": [ + { name: "dashboard_id", enabled: true }, + { name: "pulse_id", enabled: false }, + { name: "recipients", enabled: true }, + { name: "subscription_type", enabled: true }, + { name: "collection_id", enabled: true }, + { name: "frequency", enabled: true }, + { name: "creator_id", enabled: true }, + { + name: "created_at", + enabled: true, + date_format: "M/D/YYYY", + }, + { + name: "num_filters", + enabled: true, + }, + ], + }, + }, +}); diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js index 267e69c2f97e..0f6b6e1b39e5 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/table_detail.js @@ -4,7 +4,7 @@ export const auditLog = (tableId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.table-detail/audit-log", + fn: "metabase-enterprise.audit-app.pages.table-detail/audit-log", args: [tableId], }, visualization_settings: { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js index ea3e557714c9..14cdb8b5568a 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/tables.js @@ -4,7 +4,7 @@ export const mostQueried = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.tables/most-queried", + fn: "metabase-enterprise.audit-app.pages.tables/most-queried", args: [], }, visualization_settings: { @@ -20,7 +20,7 @@ export const leastQueried = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.tables/least-queried", + fn: "metabase-enterprise.audit-app.pages.tables/least-queried", args: [], }, visualization_settings: { @@ -36,7 +36,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.tables/table", + fn: "metabase-enterprise.audit-app.pages.tables/table", args: searchString ? [searchString] : [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js index b4be3a0e4bdb..b909cc88af44 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/user_detail.js @@ -4,7 +4,7 @@ export const table = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/table", + fn: "metabase-enterprise.audit-app.pages.user-detail/table", args: [userId], }, }, @@ -16,7 +16,8 @@ export const mostViewedDashboards = (userId: number) => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/most-viewed-dashboards", + fn: + "metabase-enterprise.audit-app.pages.user-detail/most-viewed-dashboards", args: [userId], }, visualization_settings: { @@ -32,7 +33,8 @@ export const mostViewedQuestions = (userId: number) => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/most-viewed-questions", + fn: + "metabase-enterprise.audit-app.pages.user-detail/most-viewed-questions", args: [userId], }, visualization_settings: { @@ -48,7 +50,8 @@ export const objectViewsByTime = (userId: number) => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/object-views-by-time", + fn: + "metabase-enterprise.audit-app.pages.user-detail/object-views-by-time", args: [userId, "card", "day"], }, }, @@ -58,7 +61,8 @@ export const objectViewsByTime = (userId: number) => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/object-views-by-time", + fn: + "metabase-enterprise.audit-app.pages.user-detail/object-views-by-time", args: [userId, "dashboard", "day"], }, }, @@ -71,7 +75,7 @@ export const queryViews = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/query-views", + fn: "metabase-enterprise.audit-app.pages.user-detail/query-views", args: [userId], }, visualization_settings: { @@ -93,7 +97,7 @@ export const dashboardViews = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/dashboard-views", + fn: "metabase-enterprise.audit-app.pages.user-detail/dashboard-views", args: [userId], }, visualization_settings: { @@ -112,7 +116,7 @@ export const createdDashboards = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/created-dashboards", + fn: "metabase-enterprise.audit-app.pages.user-detail/created-dashboards", args: [userId], }, }, @@ -124,7 +128,7 @@ export const createdQuestions = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/created-questions", + fn: "metabase-enterprise.audit-app.pages.user-detail/created-questions", args: [userId], }, }, @@ -136,7 +140,7 @@ export const downloads = (userId: number) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.user-detail/downloads", + fn: "metabase-enterprise.audit-app.pages.user-detail/downloads", args: [userId], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js index 814c7b922713..2e02148f3b76 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/cards/users.js @@ -4,7 +4,7 @@ export const activeAndNewByTime = () => ({ display: "line", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/active-and-new-by-time", + fn: "metabase-enterprise.audit-app.pages.users/active-and-new-by-time", args: ["day"], }, visualization_settings: { @@ -26,7 +26,7 @@ export const activeUsersAndQueriesByDay = () => ({ dataset_query: { type: "internal", fn: - "metabase-enterprise.audit.pages.users/active-users-and-queries-by-day", + "metabase-enterprise.audit-app.pages.users/active-users-and-queries-by-day", args: [], }, visualization_settings: { @@ -47,7 +47,7 @@ export const mostActive = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/most-active", + fn: "metabase-enterprise.audit-app.pages.users/most-active", args: [], }, visualization_settings: { @@ -65,7 +65,7 @@ export const mostSaves = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/most-saves", + fn: "metabase-enterprise.audit-app.pages.users/most-saves", args: [], }, visualization_settings: { @@ -81,7 +81,8 @@ export const queryExecutionTimePerUser = () => ({ display: "row", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/query-execution-time-per-user", + fn: + "metabase-enterprise.audit-app.pages.users/query-execution-time-per-user", args: [], }, }, @@ -93,7 +94,7 @@ export const table = (searchString?: string) => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/table", + fn: "metabase-enterprise.audit-app.pages.users/table", args: searchString ? [searchString] : [], }, visualization_settings: { @@ -114,7 +115,7 @@ export const auditLog = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/query-views", + fn: "metabase-enterprise.audit-app.pages.users/query-views", args: [], }, visualization_settings: { @@ -135,7 +136,7 @@ export const auditLog = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/dashboard-views", + fn: "metabase-enterprise.audit-app.pages.users/dashboard-views", args: [], }, }, @@ -148,7 +149,7 @@ export const dashboardViews = () => ({ display: "table", dataset_query: { type: "internal", - fn: "metabase-enterprise.audit.pages.users/dashboard-views", + fn: "metabase-enterprise.audit-app.pages.users/dashboard-views", args: [], }, }, diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/util.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/mode.js similarity index 61% rename from enterprise/frontend/src/metabase-enterprise/audit_app/lib/util.js rename to enterprise/frontend/src/metabase-enterprise/audit_app/lib/mode.js index 7f214845fcb7..42513af79796 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/util.js +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/mode.js @@ -1,14 +1,22 @@ +import { push } from "react-router-redux"; import _ from "underscore"; -import Question from "metabase-lib/lib/Question"; +export const getColumnName = column => column.remapped_to || column.name; -import type { - ClickObject, - QueryMode, -} from "metabase-types/types/Visualization"; +export const getRowValuesByColumns = (row, cols) => + cols.reduce((acc, col, index) => { + const columnName = getColumnName(col); + return { + ...acc, + [columnName]: row[index], + }; + }, {}); -const columnNameToUrl = { +export const columnNameToUrl = { + // No admin page for collections but still want to link to it + collection_id: value => `/collection/${value}`, user_id: value => `/admin/audit/member/${value}`, + creator_id: value => `/admin/audit/member/${value}`, viewed_by_id: value => `/admin/audit/member/${value}`, saved_by_id: value => `/admin/audit/member/${value}`, dashboard_id: value => `/admin/audit/dashboard/${value}`, @@ -20,19 +28,24 @@ const columnNameToUrl = { // NOTE: query_hash uses standard Base64 encoding which isn't URL safe so make sure to escape it query_hash: value => `/admin/audit/query/${encodeURIComponent(String(value))}`, + recipients: (_, clicked) => { + const pulseIdIndex = clicked.origin.cols.findIndex( + col => getColumnName(col) === "pulse_id", + ); + const pulseId = clicked.origin.row[pulseIdIndex]; + + return clicked.extraData.type === "subscription" + ? `/admin/audit/subscriptions/subscriptions/${pulseId}/edit` + : `/admin/audit/subscriptions/alerts/${pulseId}/edit`; + }, }; -const AuditDrill = ({ - question, - clicked, -}: { - question: Question, - clicked?: ClickObject, -}) => { +const AuditDrill = ({ question, clicked }) => { if (!clicked) { return []; } const metricAndDimensions = [clicked].concat(clicked.dimensions || []); + for (const { column, value } of metricAndDimensions) { if (column && columnNameToUrl[column.name] != null && value != null) { return [ @@ -40,8 +53,9 @@ const AuditDrill = ({ name: "detail", title: `View this`, default: true, - url() { - return columnNameToUrl[column.name](value); + action() { + const url = columnNameToUrl[column.name](value, clicked); + return push(url); }, }, ]; @@ -72,7 +86,7 @@ const AuditDrill = ({ return []; }; -export const AuditMode: QueryMode = { +export const AuditMode = { name: "audit", drills: () => [AuditDrill], }; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/lib/services.js b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/services.js new file mode 100644 index 000000000000..a75abe4ac6be --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/lib/services.js @@ -0,0 +1,5 @@ +import { DELETE } from "metabase/lib/api"; + +export const AuditApi = { + unsubscribe_user: DELETE("/api/ee/audit-app/user/:id/subscriptions"), +}; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx index d63f3b3363b7..4f6d154e4e00 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditQueryDetail.jsx @@ -64,6 +64,7 @@ import { getMetadata } from "metabase/selectors/metadata"; import NativeQuery from "metabase-lib/lib/queries/NativeQuery"; +import ExplicitSize from "metabase/components/ExplicitSize"; import { loadMetadataForCard } from "metabase/query_builder/actions"; const mapStateToProps = state => ({ metadata: getMetadata(state) }); @@ -73,13 +74,23 @@ const mapDispatchToProps = { loadMetadataForCard }; mapStateToProps, mapDispatchToProps, ) +@ExplicitSize() class QueryBuilderReadOnly extends React.Component { + state = { + isNativeEditorOpen: false, + }; + + setIsNativeEditorOpen = open => { + this.setState({ isNativeEditorOpen: open }); + }; + componentDidMount() { const { card, loadMetadataForCard } = this.props; loadMetadataForCard(card); } + render() { - const { card, metadata } = this.props; + const { card, metadata, height } = this.props; const question = new Question(card, metadata); const query = question.query(); @@ -91,6 +102,9 @@ class QueryBuilderReadOnly extends React.Component { query={query} location={{ query: {} }} readOnly + viewHeight={height} + isNativeEditorOpen={this.state.isNativeEditorOpen} + setIsNativeEditorOpen={this.setIsNativeEditorOpen} /> ); } else { diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/AuditSubscriptions.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/AuditSubscriptions.jsx new file mode 100644 index 000000000000..860846f84bac --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/AuditSubscriptions.jsx @@ -0,0 +1,49 @@ +import React from "react"; +import { t } from "ttag"; + +import AuditContent from "../../components/AuditContent"; +import AuditAlertTable from "../../containers/AuditAlertTable"; +import AuditSubscriptionTable from "../../containers/AuditSubscriptionTable"; +import AuditAlertEditModal from "../../containers/AuditAlertEditModal"; +import AuditAlertDeleteModal from "../../containers/AuditAlertDeleteModal"; +import AuditSubscriptionEditModal from "../../containers/AuditSubscriptionEditModal"; +import AuditSubscriptionDeleteModal from "../../containers/AuditSubscriptionDeleteModal"; + +const AuditSubscriptions = props => ( + +); + +AuditSubscriptions.tabs = [ + { + path: "subscriptions", + title: t`Subscriptions`, + component: AuditSubscriptionTable, + modals: [ + { + path: ":pulseId/edit", + modal: AuditSubscriptionEditModal, + }, + { + path: ":pulseId/delete", + modal: AuditSubscriptionDeleteModal, + }, + ], + }, + { + path: "alerts", + title: t`Alerts`, + component: AuditAlertTable, + modals: [ + { + path: ":alertId/edit", + modal: AuditAlertEditModal, + }, + { + path: ":alertId/delete", + modal: AuditAlertDeleteModal, + }, + ], + }, +]; + +export default AuditSubscriptions; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/index.js b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/index.js new file mode 100644 index 000000000000..b6e16b1e0544 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/pages/AuditSubscriptions/index.js @@ -0,0 +1 @@ +export { default } from "./AuditSubscriptions"; diff --git a/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx b/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx index a658c5b5fbf1..72444a17dad0 100644 --- a/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx +++ b/enterprise/frontend/src/metabase-enterprise/audit_app/routes.jsx @@ -1,11 +1,13 @@ import React from "react"; import { Route } from "metabase/hoc/Title"; +import { ModalRoute } from "metabase/hoc/ModalRoute"; import { IndexRoute, IndexRedirect } from "react-router"; import { t } from "ttag"; import _ from "underscore"; import AuditApp from "./containers/AuditApp"; +import UnsubscribeUserModal from "./containers/UnsubscribeUserModal/UnsubscribeUserModal"; import AuditOverview from "./pages/AuditOverview"; @@ -15,17 +17,15 @@ import AuditSchemas from "./pages/AuditSchemas"; import AuditSchemaDetail from "./pages/AuditSchemaDetail"; import AuditTables from "./pages/AuditTables"; import AuditTableDetail from "./pages/AuditTableDetail"; - import AuditQuestions from "./pages/AuditQuestions"; import AuditQuestionDetail from "./pages/AuditQuestionDetail"; import AuditDashboards from "./pages/AuditDashboards"; import AuditDashboardDetail from "./pages/AuditDashboardDetail"; import AuditQueryDetail from "./pages/AuditQueryDetail"; - import AuditUsers from "./pages/AuditUsers"; import AuditUserDetail from "./pages/AuditUserDetail"; - import AuditDownloads from "./pages/AuditDownloads"; +import AuditSubscriptions from "./pages/AuditSubscriptions"; type Page = { tabs?: Tab[], @@ -42,13 +42,24 @@ function getPageRoutes(path, page: Page) { // add a redirect for the default tab const defaultTab = getDefaultTab(page); if (defaultTab) { - subRoutes.push(); + subRoutes.push( + , + ); } // add sub routes for each tab if (page.tabs) { subRoutes.push( ...page.tabs.map(tab => ( - + + {tab.modals && + tab.modals.map(modal => ( + + ))} + )), ); } @@ -72,7 +83,7 @@ function getDefaultTab(page: Page): ?Tab { } const getRoutes = (store: any) => ( - + {/* */} @@ -92,7 +103,12 @@ const getRoutes = (store: any) => ( {getPageRoutes("downloads", AuditDownloads)} {getPageRoutes("members", AuditUsers)} {getPageRoutes("member/:userId", AuditUserDetail)} + {getPageRoutes("subscriptions", AuditSubscriptions)} ); +export const getUserMenuRotes = () => ( + +); + export default getRoutes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.jsx new file mode 100644 index 000000000000..e9ceac8762c2 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.jsx @@ -0,0 +1,41 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import { formDomOnlyProps } from "metabase/lib/redux"; +import { + CacheTTLFieldContainer, + FieldText, + Input, +} from "./CacheTTLField.styled"; + +const propTypes = { + field: PropTypes.shape({ + name: PropTypes.string.isRequired, + value: PropTypes.number, + error: PropTypes.string, + }), + message: PropTypes.string, +}; + +export function CacheTTLField({ field, message, ...props }) { + const hasError = !!field.error; + return ( + + {message && ( + + {message} + + )} + + {t`hours`} + + ); +} + +CacheTTLField.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.styled.js b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.styled.js new file mode 100644 index 000000000000..40d1e32647fa --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.styled.js @@ -0,0 +1,33 @@ +import styled, { css } from "styled-components"; +import { color } from "metabase/lib/colors"; +import NumericInput from "metabase/components/NumericInput"; + +export const CacheTTLFieldContainer = styled.div` + display: flex; + align-items: center; +`; + +export const FieldText = styled.span` + color: ${props => color(props.hasError ? "error" : "text-dark")}; + ${props => css`margin-${props.margin}: 10px;`} +`; + +export const Input = styled(NumericInput)` + width: 50px; + text-align: center; + + color: ${props => color(props.hasError ? "error" : "text-dark")}; + font-weight: bold; + padding: 0.75em; + + border: 1px solid ${color("border")}; + border-radius: 4px; + outline: none; + + :focus, + :hover { + border-color: ${color("brand")}; + } + + transition: border 300ms ease-in-out; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.unit.spec.js new file mode 100644 index 000000000000..1c2bc1a09ac5 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/CacheTTLField.unit.spec.js @@ -0,0 +1,64 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { CacheTTLField } from "./CacheTTLField"; + +function setup({ name = "cache_ttl", message, value }) { + const onChange = jest.fn(); + render( + + Label + + , + ); + const field = screen.getByLabelText("Label"); + return { field, onChange }; +} + +describe("CacheTTLField", () => { + [ + { value: 0, expected: "0" }, + { value: 1, expected: "1" }, + { value: 12, expected: "12" }, + ].forEach(({ value, expected }) => { + it(`displays ${value} value as ${expected}`, () => { + const { field } = setup({ value }); + expect(field).toHaveValue(expected); + }); + }); + + it("displays a placeholder for null values", () => { + const { field } = setup({ value: null }); + + expect(field).toHaveAttribute("placeholder", "24"); + expect(field).toHaveValue(""); + }); + + it("displays message", () => { + setup({ message: "Cache results for" }); + expect(screen.queryByText("Cache results for")).toBeInTheDocument(); + }); + + it("calls onChange correctly", () => { + const { field, onChange } = setup({ value: 4 }); + + userEvent.clear(field); + userEvent.type(field, "14"); + field.blur(); + + expect(onChange).toHaveBeenLastCalledWith(14); + }); + + it("calls onChange with null value if input is cleared", () => { + const { field, onChange } = setup({ value: 4 }); + + userEvent.clear(field); + field.blur(); + + expect(onChange).toHaveBeenLastCalledWith(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/index.js b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/index.js new file mode 100644 index 000000000000..35634c0a52f2 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/CacheTTLField/index.js @@ -0,0 +1 @@ +export * from "./CacheTTLField"; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.jsx new file mode 100644 index 000000000000..c0cda2d8ddd0 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.jsx @@ -0,0 +1,57 @@ +import React, { useCallback, useEffect, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Select, { Option } from "metabase/components/Select"; +import { CacheTTLField } from "../CacheTTLField"; +import { + CacheFieldContainer, + FieldContainer, +} from "./DatabaseCacheTTLField.styled"; + +const MODE = { + INSTANCE_DEFAULT: "instance-default", + CUSTOM: "custom", +}; + +const INSTANCE_DEFAULT_CACHE_TTL = null; +const DEFAULT_CUSTOM_CACHE_TTL = 24; // hours + +const propTypes = { + field: PropTypes.object.isRequired, +}; + +export function DatabaseCacheTTLField({ field }) { + const [mode, setMode] = useState( + field.value > 0 ? MODE.CUSTOM : MODE.INSTANCE_DEFAULT, + ); + + const onModeChange = useCallback(e => { + setMode(e.target.value); + }, []); + + useEffect(() => { + if (mode === MODE.INSTANCE_DEFAULT) { + field.onChange(INSTANCE_DEFAULT_CACHE_TTL); + } else if (field.value == null) { + field.onChange(DEFAULT_CUSTOM_CACHE_TTL); + } + }, [field, mode]); + + return ( + + + {mode === MODE.CUSTOM && ( + + + + )} + + ); +} + +DatabaseCacheTTLField.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.styled.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.styled.jsx new file mode 100644 index 000000000000..65f8eac58b5a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.styled.jsx @@ -0,0 +1,12 @@ +import styled from "styled-components"; +import { space } from "metabase/styled-components/theme"; + +export const FieldContainer = styled.div` + display: flex; + flex-direction: row; + align-items: center; +`; + +export const CacheFieldContainer = styled.div` + margin-left: ${space(2)}; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.unit.spec.js new file mode 100644 index 000000000000..e4670251a828 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/DatabaseCacheTTLField.unit.spec.js @@ -0,0 +1,72 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { DatabaseCacheTTLField } from "./DatabaseCacheTTLField"; + +function setup({ value = null } = {}) { + const onChange = jest.fn(); + render( + , + ); + return { onChange }; +} + +function selectMode(nextMode) { + const currentModeLabel = + nextMode === "custom" ? "Use instance default (TTL)" : "Custom"; + const nextModeLabel = + nextMode === "instance-default" ? "Use instance default (TTL)" : "Custom"; + + userEvent.click(screen.getByText(currentModeLabel)); + userEvent.click(screen.getByText(nextModeLabel)); +} + +describe("DatabaseCacheTTLField", () => { + it("displays 'Use instance default' option when cache_ttl is null", () => { + setup({ value: null }); + expect( + screen.queryByText("Use instance default (TTL)"), + ).toBeInTheDocument(); + expect(screen.queryByLabelText("Cache TTL Field")).not.toBeInTheDocument(); + }); + + it("displays 'Use instance default' option when cache_ttl is 0", () => { + setup({ value: 0 }); + expect( + screen.queryByText("Use instance default (TTL)"), + ).toBeInTheDocument(); + expect(screen.queryByLabelText("Cache TTL Field")).not.toBeInTheDocument(); + }); + + it("sets 24 hours as a default TTL custom value", () => { + const { onChange } = setup(); + selectMode("custom"); + expect(onChange).toHaveBeenLastCalledWith(24); + }); + + it("can select and fill custom cache TTL value", () => { + const { onChange } = setup(); + + selectMode("custom"); + const input = screen.getByPlaceholderText("24"); + userEvent.type(input, "{selectall}{backspace}14"); + input.blur(); + + expect(onChange).toHaveBeenLastCalledWith(14); + }); + + it("displays input when cache_ttl has value", () => { + setup({ value: 4 }); + expect(screen.queryByDisplayValue("4")).toBeInTheDocument(); + expect(screen.queryByText("Custom")).toBeInTheDocument(); + expect( + screen.queryByText("Use instance default (TTL)"), + ).not.toBeInTheDocument(); + }); + + it("can reset cache_ttl to instance default", () => { + const { onChange } = setup({ value: 48 }); + selectMode("instance-default"); + expect(onChange).toHaveBeenLastCalledWith(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/index.js b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/index.js new file mode 100644 index 000000000000..0e1d95230ac8 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/DatabaseCacheTTLField/index.js @@ -0,0 +1 @@ +export * from "./DatabaseCacheTTLField"; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.jsx new file mode 100644 index 000000000000..51ab91308c92 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.jsx @@ -0,0 +1,75 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { t } from "ttag"; +import PropTypes from "prop-types"; +import { duration } from "metabase/lib/formatting"; +import { getQuestionsImplicitCacheTTL } from "../../utils"; +import { + CacheTTLInput, + CacheTTLExpandedField, + StyledRadio, +} from "./QuestionCacheTTLField.styled"; + +const propTypes = { + field: PropTypes.shape({ + value: PropTypes.number, + onChange: PropTypes.func.isRequired, + }).isRequired, + question: PropTypes.object.isRequired, // metabase-lib's Question instance +}; + +const DEFAULT_CACHE_TTL = null; + +const MODE = { + DEFAULT: "default", + CUSTOM: "custom", +}; + +function getInitialMode(question, implicitCacheTTL) { + if (question.card().cache_ttl > 0 || !implicitCacheTTL) { + return MODE.CUSTOM; + } + return MODE.DEFAULT; +} + +export function QuestionCacheTTLField({ field, question, ...props }) { + const implicitCacheTTL = useMemo( + () => getQuestionsImplicitCacheTTL(question), + [question], + ); + + const [mode, setMode] = useState(getInitialMode(question, implicitCacheTTL)); + + useEffect(() => { + if (mode === MODE.DEFAULT) { + field.onChange(DEFAULT_CACHE_TTL); + } + }, [field, mode]); + + if (!implicitCacheTTL) { + return ; + } + + // implicitCacheTTL is in seconds and duration works with milliseconds + const defaultCachingLabel = duration(implicitCacheTTL * 1000); + + return ( +
+ setMode(val)} + options={[ + { + name: t`Use default` + ` (${defaultCachingLabel})`, + value: MODE.DEFAULT, + }, + { name: t`Custom`, value: MODE.CUSTOM }, + ]} + vertical + showButtons + /> + {mode === MODE.CUSTOM && } +
+ ); +} + +QuestionCacheTTLField.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.styled.jsx b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.styled.jsx new file mode 100644 index 000000000000..1d00df64c42d --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.styled.jsx @@ -0,0 +1,21 @@ +import React from "react"; +import { t } from "ttag"; +import styled from "styled-components"; +import { space } from "metabase/styled-components/theme"; +import Radio from "metabase/components/Radio"; +import { CacheTTLField } from "../CacheTTLField"; + +export function CacheTTLInput(props) { + return ; +} + +export const CacheTTLExpandedField = styled(CacheTTLInput)` + margin-left: 1.3rem; +`; + +export const StyledRadio = styled(Radio)` + li { + margin-top: ${space(0)}; + font-weight: bold; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.unit.spec.js new file mode 100644 index 000000000000..dda8007c75e5 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/QuestionCacheTTLField.unit.spec.js @@ -0,0 +1,149 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { msToMinutes, msToHours } from "metabase/lib/time"; +import MetabaseSettings from "metabase/lib/settings"; +import { QuestionCacheTTLField } from "./QuestionCacheTTLField"; + +const TEN_MINUTES = 10 * 60 * 1000; + +function setup({ + value = null, + avgQueryDuration, + databaseCacheTTL = null, + cacheTTLMultiplier, + minCacheThreshold, +} = {}) { + const onChange = jest.fn(); + + const spy = jest.spyOn(MetabaseSettings, "get"); + spy.mockImplementation(key => { + if (key === "enable-query-caching") { + return true; + } + if (key === "query-caching-ttl-ratio") { + return cacheTTLMultiplier; + } + if (key === "query-caching-min-ttl") { + return minCacheThreshold; + } + }); + + const question = { + card: () => ({ + average_query_time: avgQueryDuration, + cache_ttl: value, + }), + database: () => ({ + cache_ttl: databaseCacheTTL, + }), + }; + + render( +
+ Label + + , + ); + return { onChange, avgQueryDuration }; +} + +const DEFAULT_MODE_REGEXP = /Use default \([.0-9]+ hours\)/; + +function selectMode(nextMode) { + const currentModeLabel = + nextMode === "custom" ? DEFAULT_MODE_REGEXP : "Custom"; + const nextModeLabel = nextMode === "default" ? DEFAULT_MODE_REGEXP : "Custom"; + + userEvent.click(screen.getByText(currentModeLabel)); + userEvent.click(screen.getByText(nextModeLabel)); +} + +function fillValue(input, value) { + userEvent.clear(input); + userEvent.type(input, String(value)); + input.blur(); +} + +const DEFAULT_MODE_TEXT_TEST_ID = /radio-[0-9]+-default-name/; + +describe("QuestionCacheTTLField", () => { + it("displays a placeholder if question is not cached", () => { + setup(); + expect(screen.getByLabelText("Label")).toHaveAttribute("placeholder", "24"); + }); + + it("displays question's cache TTL value", () => { + setup({ value: 21 }); + expect(screen.getByLabelText("Label")).toHaveValue("21"); + }); + + it("displays default caching value if question is cached on a db level", () => { + setup({ databaseCacheTTL: 32 }); + expect(screen.queryByTestId(DEFAULT_MODE_TEXT_TEST_ID)).toHaveTextContent( + "Use default (32 hours)", + ); + }); + + it("displays default caching value if question is cached on an instance level", () => { + setup({ + avgQueryDuration: TEN_MINUTES, + minCacheThreshold: 0, + cacheTTLMultiplier: 100, + }); + const expectedTTL = Math.round(msToHours(TEN_MINUTES * 100)); + expect(screen.queryByTestId(DEFAULT_MODE_TEXT_TEST_ID)).toHaveTextContent( + `Use default (${expectedTTL} hours)`, + ); + }); + + it("handles if cache duration is in minutes", () => { + setup({ + avgQueryDuration: 14400, + minCacheThreshold: 0, + cacheTTLMultiplier: 100, + }); + const expectedTTL = Math.round(msToMinutes(14400 * 100)); + expect(screen.queryByTestId(DEFAULT_MODE_TEXT_TEST_ID)).toHaveTextContent( + `Use default (${expectedTTL} minutes)`, + ); + }); + + it("calls onChange correctly when filling the input", () => { + const { onChange } = setup(); + fillValue(screen.getByLabelText("Label"), 48); + expect(onChange).toHaveBeenLastCalledWith(48); + }); + + it("offers to provide custom cache TTL when question is cached on a db level", () => { + setup({ databaseCacheTTL: 32 }); + + expect(screen.queryByLabelText("Use default (32 hours)")).toBeChecked(); + expect(screen.queryByLabelText("Custom")).not.toBeChecked(); + }); + + it("allows to overwrite default caching with custom value", () => { + const { onChange } = setup({ databaseCacheTTL: 32 }); + + selectMode("custom"); + fillValue(screen.getByLabelText("Label"), 24); + + expect(onChange).toHaveBeenLastCalledWith(24); + }); + + it("offers to switch to default caching instead of a custom TTL", () => { + setup({ value: 24, databaseCacheTTL: 32 }); + + expect(screen.queryByLabelText("Use default (32 hours)")).not.toBeChecked(); + expect(screen.queryByLabelText("Custom")).toBeChecked(); + }); + + it("allows to switch to default caching instead of a custom TTL", () => { + const { onChange } = setup({ value: 24, databaseCacheTTL: 32 }); + selectMode("default"); + expect(onChange).toHaveBeenLastCalledWith(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/index.js b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/index.js new file mode 100644 index 000000000000..bbc522fae524 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/components/QuestionCacheTTLField/index.js @@ -0,0 +1 @@ +export * from "./QuestionCacheTTLField"; diff --git a/enterprise/frontend/src/metabase-enterprise/caching/index.js b/enterprise/frontend/src/metabase-enterprise/caching/index.js new file mode 100644 index 000000000000..68af6b6f0f18 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/index.js @@ -0,0 +1,51 @@ +import React from "react"; +import { t, jt } from "ttag"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import { PLUGIN_CACHING, PLUGIN_FORM_WIDGETS } from "metabase/plugins"; +import Link from "metabase/components/Link"; +import { CacheTTLField } from "./components/CacheTTLField"; +import { DatabaseCacheTTLField } from "./components/DatabaseCacheTTLField"; +import { QuestionCacheTTLField } from "./components/QuestionCacheTTLField"; +import { + getQuestionsImplicitCacheTTL, + validateCacheTTL, + normalizeCacheTTL, +} from "./utils"; + +function getDatabaseCacheTTLFieldDescription() { + return ( + + {jt`How long to keep question results. By default, Metabase will use the value you supply on the ${( + {t`cache settings page`} + )}, but if this database has other factors that influence the freshness of data, it could make sense to set a custom duration. You can also choose custom durations on individual questions or dashboards to help improve performance.`} + + ); +} + +if (hasPremiumFeature("advanced_config")) { + PLUGIN_CACHING.cacheTTLFormField = { + name: "cache_ttl", + validate: validateCacheTTL, + normalize: normalizeCacheTTL, + }; + + PLUGIN_CACHING.databaseCacheTTLFormField = { + name: "cache_ttl", + type: "databaseCacheTTL", + title: t`Default result cache duration`, + description: getDatabaseCacheTTLFieldDescription(), + descriptionPosition: "bottom", + validate: validateCacheTTL, + normalize: normalizeCacheTTL, + }; + + PLUGIN_FORM_WIDGETS.dashboardCacheTTL = CacheTTLField; + PLUGIN_FORM_WIDGETS.databaseCacheTTL = DatabaseCacheTTLField; + PLUGIN_FORM_WIDGETS.questionCacheTTL = QuestionCacheTTLField; + + PLUGIN_CACHING.getQuestionsImplicitCacheTTL = getQuestionsImplicitCacheTTL; +} diff --git a/enterprise/frontend/src/metabase-enterprise/caching/utils.js b/enterprise/frontend/src/metabase-enterprise/caching/utils.js new file mode 100644 index 000000000000..525ff74b75e1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/utils.js @@ -0,0 +1,56 @@ +import { t } from "ttag"; +import { msToSeconds } from "metabase/lib/time"; +import MetabaseSettings from "metabase/lib/settings"; + +/** + * If a question doesn't have an explicitly set cache TTL, + * its results can still be cached with a db-level cache TTL + * or with an instance level setting + * + * More on caching: + * https://www.metabase.com/docs/latest/administration-guide/14-caching.html + * + * @param {Question} metabase-lib Question instance + * @returns {number} — cache TTL value in seconds (from db or instance default) that will be used + */ +export function getQuestionsImplicitCacheTTL(question) { + if (!MetabaseSettings.get("enable-query-caching")) { + return null; + } + if (question.database().cache_ttl) { + // Database's cache TTL is in hours, need to convert that to seconds + return question.database().cache_ttl * 60 * 60; + } + const avgQueryDurationInSeconds = msToSeconds( + question.card().average_query_time, + ); + if (checkQuestionWillBeCached(avgQueryDurationInSeconds)) { + return calcQuestionMagicCacheDuration(avgQueryDurationInSeconds); + } + return null; +} + +function checkQuestionWillBeCached(avgQueryDurationInSeconds) { + const minQueryDurationThresholdSeconds = MetabaseSettings.get( + "query-caching-min-ttl", + ); + return avgQueryDurationInSeconds > minQueryDurationThresholdSeconds; +} + +function calcQuestionMagicCacheDuration(avgQueryDurationInSeconds) { + const cacheTTLMultiplier = MetabaseSettings.get("query-caching-ttl-ratio"); + return avgQueryDurationInSeconds * cacheTTLMultiplier; +} + +export function validateCacheTTL(value) { + if (value === null) { + return; + } + if (!Number.isSafeInteger(value) || value < 0) { + return t`Must be a positive integer value`; + } +} + +export function normalizeCacheTTL(value) { + return value === 0 ? null : value; +} diff --git a/enterprise/frontend/src/metabase-enterprise/caching/utils.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/caching/utils.unit.spec.js new file mode 100644 index 000000000000..9b73ebab8701 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/caching/utils.unit.spec.js @@ -0,0 +1,94 @@ +import { msToSeconds, hoursToSeconds } from "metabase/lib/time"; +import MetabaseSettings from "metabase/lib/settings"; +import { getQuestionsImplicitCacheTTL, validateCacheTTL } from "./utils"; + +describe("validateCacheTTL", () => { + const validTestCases = [null, 0, 1, 6, 42]; + const invalidTestCases = [-1, -1.2, 0.5, 4.3]; + + validTestCases.forEach(value => { + it(`should be valid for ${value}`, () => { + expect(validateCacheTTL(value)).toBe(undefined); + }); + }); + + invalidTestCases.forEach(value => { + it(`should return error for ${value}`, () => { + expect(validateCacheTTL(value)).toBe("Must be a positive integer value"); + }); + }); +}); + +describe("getQuestionsImplicitCacheTTL", () => { + const TEN_MINUTES = 10 * 60 * 1000; + const DEFAULT_CACHE_TTL_MULTIPLIER = 10; + + function setup({ + cachingEnabled = true, + avgQueryTime = null, + databaseCacheTTL = null, + cacheTTLMultiplier = DEFAULT_CACHE_TTL_MULTIPLIER, + minCacheThreshold = 60, + } = {}) { + const spy = jest.spyOn(MetabaseSettings, "get"); + spy.mockImplementation(key => { + if (key === "enable-query-caching") { + return cachingEnabled; + } + if (key === "query-caching-ttl-ratio") { + return cachingEnabled ? cacheTTLMultiplier : null; + } + if (key === "query-caching-min-ttl") { + return cachingEnabled ? minCacheThreshold : null; + } + }); + + return { + card: () => ({ + average_query_time: avgQueryTime, + }), + database: () => ({ + cache_ttl: databaseCacheTTL, + }), + }; + } + + it("returns database's cache TTL if set", () => { + const question = setup({ databaseCacheTTL: 10 }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(hoursToSeconds(10)); + }); + + it("returns 'magic TTL' if there is no prior caching strategy", () => { + const question = setup({ avgQueryTime: TEN_MINUTES }); + + expect(getQuestionsImplicitCacheTTL(question)).toBe( + msToSeconds(TEN_MINUTES * DEFAULT_CACHE_TTL_MULTIPLIER), + ); + }); + + it("returns null if instance-level caching enabled, but the query doesn't pass the min exec time threshold", () => { + const question = setup({ + avgQueryTime: TEN_MINUTES, + minCacheThreshold: TEN_MINUTES * 2, + }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(null); + }); + + it("prefers database cache TTL over instance-level one", () => { + const question = setup({ databaseCacheTTL: 10, avgQueryTime: TEN_MINUTES }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(hoursToSeconds(10)); + }); + + it("returns null if caching disabled, but instance level caching parameters are present", () => { + const question = setup({ + avgQueryTime: TEN_MINUTES, + cachingEnabled: false, + }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(null); + }); + + it("returns null if caching disabled, but database has a cache ttl", () => { + const question = setup({ databaseCacheTTL: 10, cachingEnabled: false }); + expect(getQuestionsImplicitCacheTTL(question)).toBe(null); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.jsx b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.jsx new file mode 100644 index 000000000000..adfa981181be --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.jsx @@ -0,0 +1,37 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import Icon from "metabase/components/Icon"; +import { color } from "metabase/lib/colors"; + +import { AUTHORITY_LEVELS } from "../constants"; +import { isRegularCollection } from "../utils"; + +const propTypes = { + tooltip: PropTypes.string, + collection: PropTypes.shape({ + authority_level: PropTypes.oneOf(["official"]), + }), +}; + +export function CollectionAuthorityLevelIcon({ + collection, + tooltip = "default", + ...iconProps +}) { + if (isRegularCollection(collection)) { + return null; + } + const level = AUTHORITY_LEVELS[collection.authority_level]; + return ( + + ); +} + +CollectionAuthorityLevelIcon.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.unit.spec.js new file mode 100644 index 000000000000..778512bf6ee9 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/CollectionAuthorityLevelIcon.unit.spec.js @@ -0,0 +1,78 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { CollectionAuthorityLevelIcon } from "./CollectionAuthorityLevelIcon"; + +describe("CollectionAuthorityLevelIcon", () => { + describe("regular collections", () => { + [ + { + name: "collection without authority level", + collection: {}, + }, + { + name: "regular collection", + collection: { + authority_level: null, + }, + }, + ].forEach(({ collection, name }) => { + it(`doesn't render for ${name}`, () => { + render(); + expect(screen.queryByLabelText("folder icon")).toBeNull(); + }); + }); + }); + + describe("official collections", () => { + const OFFICIAL_COLLECTION = { + authority_level: "official", + }; + + function renderOfficialCollection({ + collection = OFFICIAL_COLLECTION, + ...props + } = {}) { + render( + , + ); + } + + function queryOfficialIcon() { + return screen.queryByLabelText("badge icon"); + } + + it(`renders correctly`, () => { + renderOfficialCollection(); + expect(queryOfficialIcon()).toBeInTheDocument(); + }); + + it(`displays a tooltip by default`, () => { + renderOfficialCollection(); + userEvent.hover(queryOfficialIcon()); + expect(screen.getByRole("tooltip")).toHaveTextContent( + "Official collection", + ); + }); + + it(`can display different tooltip`, () => { + renderOfficialCollection({ tooltip: "belonging" }); + userEvent.hover(queryOfficialIcon()); + expect(screen.getByRole("tooltip")).toHaveTextContent( + "Belongs to an Official collection", + ); + }); + + it(`can display custom tooltip text`, () => { + renderOfficialCollection({ tooltip: "Hello" }); + userEvent.hover(queryOfficialIcon()); + expect(screen.getByRole("tooltip")).toHaveTextContent("Hello"); + }); + + it(`can hide tooltip`, () => { + renderOfficialCollection({ tooltip: null }); + userEvent.hover(queryOfficialIcon()); + expect(screen.queryByLabelText("tooltip")).toBeNull(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.jsx b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.jsx new file mode 100644 index 000000000000..d1b83d5378e3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.jsx @@ -0,0 +1,67 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import CheckBox from "metabase/components/CheckBox"; +import { + SegmentedControl, + optionShape, +} from "metabase/components/SegmentedControl"; + +import { AUTHORITY_LEVELS } from "../constants"; +import { FormFieldRoot, Label } from "./FormCollectionAuthorityLevel.styled"; + +const propTypes = { + field: PropTypes.shape({ + value: PropTypes.any, + initialValue: PropTypes.any, + onChange: PropTypes.func.isRequired, + }).isRequired, + options: PropTypes.arrayOf(optionShape).isRequired, + values: PropTypes.shape({ + id: PropTypes.number, + authority_level: PropTypes.oneOf(["official"]), + update_collection_tree_authority_level: PropTypes.bool, + }), + onChangeField: PropTypes.func.isRequired, +}; + +export function FormCollectionAuthorityLevel({ + field, + options, + values, + onChangeField, +}) { + const isNewCollection = !values.id; + const selectedAuthorityLevel = + AUTHORITY_LEVELS[field.value] || AUTHORITY_LEVELS.regular; + const shouldSuggestToUpdateChildren = + !isNewCollection && field.initialValue !== field.value; + return ( + + + {shouldSuggestToUpdateChildren && ( + {t`Make all sub-collections ${selectedAuthorityLevel.name}, too.`} + } + checked={values.update_collection_tree_authority_level} + onChange={e => + onChangeField( + "update_collection_tree_authority_level", + e.target.checked, + ) + } + /> + )} + + ); +} + +FormCollectionAuthorityLevel.propTypes = propTypes; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.styled.jsx b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.styled.jsx new file mode 100644 index 000000000000..9af08dd82608 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/components/FormCollectionAuthorityLevel.styled.jsx @@ -0,0 +1,16 @@ +import styled from "styled-components"; +import CheckBox from "metabase/components/CheckBox"; +import { color } from "metabase/lib/colors"; + +export const FormFieldRoot = styled.div` + display: flex; + align-items: center; + justify-content: space-between; +`; + +export const Label = styled(CheckBox.Label)` + color: ${color("text-dark")}; + font-size: 1em; + font-weight: bold; + margin-bottom: 1px; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/constants.js b/enterprise/frontend/src/metabase-enterprise/collections/constants.js new file mode 100644 index 000000000000..d511493f3848 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/constants.js @@ -0,0 +1,23 @@ +import { t } from "ttag"; + +export const REGULAR_COLLECTION = { + type: null, + name: t`Regular`, + icon: "folder", +}; + +export const OFFICIAL_COLLECTION = { + type: "official", + name: t`Official`, + icon: "badge", + color: "saturated-yellow", + tooltips: { + default: t`Official collection`, + belonging: t`Belongs to an Official collection`, + }, +}; + +export const AUTHORITY_LEVELS = { + [OFFICIAL_COLLECTION.type]: OFFICIAL_COLLECTION, + [REGULAR_COLLECTION.type]: REGULAR_COLLECTION, +}; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/index.js b/enterprise/frontend/src/metabase-enterprise/collections/index.js new file mode 100644 index 000000000000..4d9a2a0a166c --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/index.js @@ -0,0 +1,50 @@ +import { t } from "ttag"; +import { + PLUGIN_FORM_WIDGETS, + PLUGIN_COLLECTIONS, + PLUGIN_COLLECTION_COMPONENTS, +} from "metabase/plugins"; +import { FormCollectionAuthorityLevel } from "./components/FormCollectionAuthorityLevel"; +import { CollectionAuthorityLevelIcon } from "./components/CollectionAuthorityLevelIcon"; +import { + AUTHORITY_LEVELS, + REGULAR_COLLECTION, + OFFICIAL_COLLECTION, +} from "./constants"; +import { isRegularCollection } from "./utils"; + +PLUGIN_COLLECTIONS.isRegularCollection = isRegularCollection; + +PLUGIN_COLLECTIONS.REGULAR_COLLECTION = REGULAR_COLLECTION; + +PLUGIN_COLLECTIONS.AUTHORITY_LEVEL = AUTHORITY_LEVELS; + +PLUGIN_COLLECTIONS.authorityLevelFormFields = [ + { + name: "authority_level", + title: t`Collection type`, + info: t`The contents of Official collections will get a badge by their name and will be more likely to show up in search results.`, + type: "collectionAuthorityLevel", + options: [ + { + name: REGULAR_COLLECTION.name, + value: REGULAR_COLLECTION.type, + icon: REGULAR_COLLECTION.icon, + }, + { + name: OFFICIAL_COLLECTION.name, + value: OFFICIAL_COLLECTION.type, + icon: OFFICIAL_COLLECTION.icon, + selectedColor: OFFICIAL_COLLECTION.color, + }, + ], + }, + { + name: "update_collection_tree_authority_level", + type: "hidden", + }, +]; + +PLUGIN_FORM_WIDGETS.collectionAuthorityLevel = FormCollectionAuthorityLevel; + +PLUGIN_COLLECTION_COMPONENTS.CollectionAuthorityLevelIcon = CollectionAuthorityLevelIcon; diff --git a/enterprise/frontend/src/metabase-enterprise/collections/utils.js b/enterprise/frontend/src/metabase-enterprise/collections/utils.js new file mode 100644 index 000000000000..2b880fa8eabb --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/utils.js @@ -0,0 +1,6 @@ +import { REGULAR_COLLECTION } from "./constants"; + +export function isRegularCollection({ authority_level }) { + // Root, personal collections don't have `authority_level` + return !authority_level || authority_level === REGULAR_COLLECTION.type; +} diff --git a/enterprise/frontend/src/metabase-enterprise/collections/utils.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/collections/utils.unit.spec.js new file mode 100644 index 000000000000..4b9b5732836e --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/collections/utils.unit.spec.js @@ -0,0 +1,33 @@ +import { isRegularCollection } from "./utils"; + +describe("Collections plugin utils", () => { + const COLLECTION = { + NO_AUTHORITY_LEVEL: { + id: "root", + name: "Our analytics", + }, + REGULAR: { + authority_level: null, + }, + OFFICIAL: { + authority_level: "official", + }, + }; + + describe("isRegularCollection", () => { + it("returns 'true' if collection is missing an authority level", () => { + const collection = COLLECTION.NO_AUTHORITY_LEVEL; + expect(isRegularCollection(collection)).toBe(true); + }); + + it("returns 'true' for regular collections", () => { + const collection = COLLECTION.REGULAR; + expect(isRegularCollection(collection)).toBe(true); + }); + + it("returns 'false' for official collections", () => { + const collection = COLLECTION.OFFICIAL; + expect(isRegularCollection(collection)).toBe(false); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/actions.js b/enterprise/frontend/src/metabase-enterprise/moderation/actions.js new file mode 100644 index 000000000000..3d51a9b36bb3 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/actions.js @@ -0,0 +1,31 @@ +import { createThunkAction } from "metabase/lib/redux"; +import { verifyItem, removeReview } from "./service"; +import { softReloadCard } from "metabase/query_builder/actions"; + +export const VERIFY_CARD = "metabase-enterprise/moderation/VERIFY_CARD"; +export const verifyCard = createThunkAction( + VERIFY_CARD, + (cardId, text) => async (dispatch, getState) => { + await verifyItem({ + itemId: cardId, + itemType: "card", + text, + }); + + return dispatch(softReloadCard()); + }, +); + +export const REMOVE_CARD_REVIEW = + "metabase-enterprise/moderation/REMOVE_CARD_REVIEW"; +export const removeCardReview = createThunkAction( + REMOVE_CARD_REVIEW, + cardId => async (dispatch, getState) => { + await removeReview({ + itemId: cardId, + itemType: "card", + }); + + return dispatch(softReloadCard()); + }, +); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.jsx new file mode 100644 index 000000000000..ee9502b85309 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.jsx @@ -0,0 +1,30 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import { isItemVerified } from "metabase-enterprise/moderation/service"; + +import { Container, VerifyButton } from "./ModerationActions.styled"; + +export default ModerationActions; + +ModerationActions.propTypes = { + className: PropTypes.string, + onVerify: PropTypes.func, + moderationReview: PropTypes.object, +}; + +function ModerationActions({ moderationReview, className, onVerify }) { + const isVerified = isItemVerified(moderationReview); + const hasActions = !!onVerify; + + return hasActions ? ( + + {!isVerified && ( + + {t`Verify this question`} + + )} + + ) : null; +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.styled.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.styled.jsx new file mode 100644 index 000000000000..dd17fc80d11b --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.styled.jsx @@ -0,0 +1,38 @@ +import styled from "styled-components"; + +import { color } from "metabase/lib/colors"; +import { + MODERATION_STATUS, + getStatusIcon, +} from "metabase-enterprise/moderation/service"; + +const { name: verifiedIconName, color: verifiedIconColor } = getStatusIcon( + MODERATION_STATUS.verified, +); + +import Button from "metabase/components/Button"; + +export const Container = styled.div` + display: flex; + align-items: center; + justify-content: space-between; +`; + +export const Label = styled.h5` + font-size: 14px; + color: ${color("text-medium")}; + flex: 1; +`; + +export const VerifyButton = styled(Button).attrs({ + icon: verifiedIconName, + iconSize: 20, +})` + border: none; + color: ${color(verifiedIconColor)}; + padding: 8px; + + &:disabled { + color: ${color("text-medium")}; + } +`; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.unit.spec.js new file mode 100644 index 000000000000..2110349e7834 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationActions/ModerationActions.unit.spec.js @@ -0,0 +1,28 @@ +import React from "react"; +import ModerationActions from "./ModerationActions"; +import { render, screen } from "@testing-library/react"; + +describe("ModerationActions", () => { + describe("when the user is not a moderator", () => { + it("should not render", () => { + const { queryByTestId } = render( + , + ); + expect(queryByTestId("moderation-verify-action")).toBeNull(); + expect(screen.queryByText("Moderation")).toBeNull(); + }); + }); + + describe("when a moderator clicks on the verify button", () => { + it("should call the onVerify prop", () => { + const onVerify = jest.fn(); + const { getByTestId } = render( + , + ); + + getByTestId("moderation-verify-action").click(); + + expect(onVerify).toHaveBeenCalled(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.jsx new file mode 100644 index 000000000000..1cfefdf6e491 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.jsx @@ -0,0 +1,100 @@ +import React from "react"; +import PropTypes from "prop-types"; +import _ from "underscore"; +import { connect } from "react-redux"; + +import { color, alpha } from "metabase/lib/colors"; +import { getUser } from "metabase/selectors/user"; +import { getRelativeTimeAbbreviated } from "metabase/lib/time"; +import { + getTextForReviewBanner, + getIconForReview, +} from "metabase-enterprise/moderation/service"; +import User from "metabase/entities/users"; + +import { + Container, + Text, + Time, + IconButton, + StatusIcon, +} from "./ModerationReviewBanner.styled"; +import Tooltip from "metabase/components/Tooltip"; + +const ICON_BUTTON_SIZE = 20; +const TOOLTIP_X_OFFSET = ICON_BUTTON_SIZE / 4; + +const mapStateToProps = (state, props) => ({ + currentUser: getUser(state), +}); + +export default _.compose( + User.load({ + id: (state, props) => props.moderationReview.moderator_id, + loadingAndErrorWrapper: false, + }), + connect(mapStateToProps), +)(ModerationReviewBanner); + +ModerationReviewBanner.propTypes = { + moderationReview: PropTypes.object.isRequired, + user: PropTypes.object, + currentUser: PropTypes.object.isRequired, + onRemove: PropTypes.func, +}; + +export function ModerationReviewBanner({ + moderationReview, + user: moderator, + currentUser, + onRemove, +}) { + const [isHovering, setIsHovering] = React.useState(false); + const [isActive, setIsActive] = React.useState(false); + + const { bannerText, tooltipText } = getTextForReviewBanner( + moderationReview, + moderator, + currentUser, + ); + const relativeCreationTime = getRelativeTimeAbbreviated( + moderationReview.created_at, + ); + const { name: iconName, color: iconColor } = getIconForReview( + moderationReview, + ); + const showClose = isHovering || isActive; + + return ( + setIsHovering(true)} + onMouseLeave={() => setIsHovering(false)} + > + + {onRemove ? ( + setIsActive(true)} + onBlur={() => setIsActive(false)} + icon={showClose ? "close" : iconName} + color={color(showClose ? "text-medium" : iconColor)} + onClick={onRemove} + iconSize={ICON_BUTTON_SIZE} + /> + ) : ( + + )} + + {bannerText} + + + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.styled.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.styled.jsx new file mode 100644 index 000000000000..60439334ad7a --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.styled.jsx @@ -0,0 +1,40 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Button from "metabase/components/Button"; +import Icon from "metabase/components/Icon"; + +export const Container = styled.div` + padding: 1rem 1rem 1rem 0.5rem; + background-color: ${props => props.backgroundColor}; + display: flex; + justify-content: space-between; + align-items: center; + column-gap: 0.5rem; + border-radius: 8px; +`; + +export const Text = styled.span` + flex: 1; + font-size: 14px; + font-weight: 700; +`; + +export const Time = styled.time` + color: ${color("text-medium")}; + font-size: 12px; +`; + +export const IconButton = styled(Button)` + padding: 0 0 0 0.5rem !important; + border: none; + background-color: transparent; + + &:hover { + background-color: transparent; + color: ${color("danger")}; + } +`; + +export const StatusIcon = styled(Icon)` + padding: 0 0.5rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.unit.spec.js new file mode 100644 index 000000000000..3a53401a3c16 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationReviewBanner/ModerationReviewBanner.unit.spec.js @@ -0,0 +1,104 @@ +import React from "react"; +import { ModerationReviewBanner } from "./ModerationReviewBanner"; +import { render, fireEvent } from "@testing-library/react"; + +const VERIFIED_ICON_SELECTOR = ".Icon-verified"; +const CLOSE_ICON_SELECTOR = ".Icon-close"; + +const moderationReview = { + status: "verified", + moderator_id: 1, + created_at: Date.now(), +}; +const moderator = { id: 1, common_name: "Foo" }; +const currentUser = { id: 2, common_name: "Bar" }; + +describe("ModerationReviewBanner", () => { + it("should show text concerning the given review", () => { + const { getByText } = render( + , + ); + expect(getByText("Foo verified this")).toBeTruthy(); + }); + + describe("when not provided an onRemove prop", () => { + let getByRole; + let container; + beforeEach(() => { + const wrapper = render( + , + ); + + getByRole = wrapper.getByRole; + container = wrapper.container; + }); + + it("should render a status icon, not a button", () => { + expect(() => getByRole("button")).toThrow(); + }); + + it("should render with the icon relevant to the review's status", () => { + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + }); + + describe("when provided an onRemove callback prop", () => { + let onRemove; + let container; + let getByRole; + beforeEach(() => { + onRemove = jest.fn(); + const wrapper = render( + , + ); + + container = wrapper.container; + getByRole = wrapper.getByRole; + }); + + it("should render a button", () => { + expect(getByRole("button")).toBeTruthy(); + }); + + it("should render the button with the icon relevant to the review's status", () => { + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should render the button as a close icon when the user is hovering their mouse over the banner", () => { + const banner = container.firstChild; + fireEvent.mouseEnter(banner); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + fireEvent.mouseLeave(banner); + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should render the button as a close icon when the user focuses the button", () => { + fireEvent.focus(getByRole("button")); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + fireEvent.blur(getByRole("button")); + expect(container.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should render the button as a close icon when focused, even when the mouse leaves the banner", () => { + const banner = container.firstChild; + fireEvent.mouseEnter(banner); + fireEvent.focus(getByRole("button")); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + fireEvent.mouseLeave(banner); + expect(container.querySelector(CLOSE_ICON_SELECTOR)).toBeTruthy(); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.jsx new file mode 100644 index 000000000000..6c9cfff6796f --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.jsx @@ -0,0 +1,20 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import { color } from "metabase/lib/colors"; +import { getStatusIcon } from "metabase-enterprise/moderation/service"; + +import Icon from "metabase/components/Icon"; + +ModerationStatusIcon.propTypes = { + status: PropTypes.string, +}; + +function ModerationStatusIcon({ status, ...iconProps }) { + const { name: iconName, color: iconColor } = getStatusIcon(status); + return iconName ? ( + + ) : null; +} + +export default ModerationStatusIcon; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.unit.spec.js new file mode 100644 index 000000000000..e0616400c2dd --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/ModerationStatusIcon/ModerationStatusIcon.unit.spec.js @@ -0,0 +1,25 @@ +import React from "react"; +import ModerationStatusIcon from "./ModerationStatusIcon"; +import { render } from "@testing-library/react"; + +const VERIFIED_ICON_SELECTOR = ".Icon-verified"; + +describe("ModerationReviewBanner", () => { + it("should show an icon when given a real moderation status", () => { + render(); + + expect(document.querySelector(VERIFIED_ICON_SELECTOR)).toBeTruthy(); + }); + + it("should not show an icon when given an undefined status", () => { + render(); + + expect(document.querySelector(VERIFIED_ICON_SELECTOR)).toBeNull(); + }); + + it("should not show an icon when given a status that does not match any existing moderation status", () => { + render(); + + expect(document.querySelector(VERIFIED_ICON_SELECTOR)).toBeNull(); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.jsx new file mode 100644 index 000000000000..aafe91711f88 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.jsx @@ -0,0 +1,69 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { connect } from "react-redux"; + +import { getLatestModerationReview } from "metabase-enterprise/moderation/service"; +import { getIsModerator } from "metabase-enterprise/moderation/selectors"; +import { + verifyCard, + removeCardReview, +} from "metabase-enterprise/moderation/actions"; + +import { BorderedModerationActions } from "./QuestionModerationSection.styled"; +import ModerationReviewBanner from "../ModerationReviewBanner/ModerationReviewBanner"; + +const mapStateToProps = (state, props) => ({ + isModerator: getIsModerator(state, props), +}); +const mapDispatchToProps = { + verifyCard, + removeCardReview, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(QuestionModerationSection); + +QuestionModerationSection.propTypes = { + question: PropTypes.object.isRequired, + verifyCard: PropTypes.func.isRequired, + removeCardReview: PropTypes.func.isRequired, + isModerator: PropTypes.bool.isRequired, +}; + +function QuestionModerationSection({ + question, + verifyCard, + removeCardReview, + isModerator, +}) { + const latestModerationReview = getLatestModerationReview( + question.getModerationReviews(), + ); + + const onVerify = () => { + const id = question.id(); + verifyCard(id); + }; + + const onRemoveModerationReview = () => { + const id = question.id(); + removeCardReview(id); + }; + + return ( + + + {latestModerationReview && ( + + )} + + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.styled.jsx b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.styled.jsx new file mode 100644 index 000000000000..373a7c285356 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/components/QuestionModerationSection/QuestionModerationSection.styled.jsx @@ -0,0 +1,8 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import ModerationActions from "../ModerationActions/ModerationActions"; + +export const BorderedModerationActions = styled(ModerationActions)` + border-top: 1px solid ${color("border")}; + padding-top: 1rem; +`; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/constants.js b/enterprise/frontend/src/metabase-enterprise/moderation/constants.js new file mode 100644 index 000000000000..3e4417c28b91 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/constants.js @@ -0,0 +1,14 @@ +export const MODERATION_STATUS = { + verified: "verified", +}; + +export const MODERATION_STATUS_ICONS = { + verified: { + name: "verified", + color: "brand", + }, + null: { + name: "close", + color: "text-light", + }, +}; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/index.js b/enterprise/frontend/src/metabase-enterprise/moderation/index.js new file mode 100644 index 000000000000..f471a7f5bcc1 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/index.js @@ -0,0 +1,17 @@ +import { PLUGIN_MODERATION } from "metabase/plugins"; +import QuestionModerationSection from "./components/QuestionModerationSection/QuestionModerationSection"; +import ModerationStatusIcon from "./components/ModerationStatusIcon/ModerationStatusIcon"; + +import { + getStatusIconForQuestion, + getStatusIcon, + getModerationTimelineEvents, +} from "./service"; + +Object.assign(PLUGIN_MODERATION, { + QuestionModerationSection, + ModerationStatusIcon, + getStatusIconForQuestion, + getStatusIcon, + getModerationTimelineEvents, +}); diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/selectors.js b/enterprise/frontend/src/metabase-enterprise/moderation/selectors.js new file mode 100644 index 000000000000..c0b8f5778727 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/selectors.js @@ -0,0 +1,5 @@ +import { getUserIsAdmin } from "metabase/selectors/user"; + +export const getIsModerator = (state, props) => { + return getUserIsAdmin(state, props); +}; diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/service.js b/enterprise/frontend/src/metabase-enterprise/moderation/service.js new file mode 100644 index 000000000000..2d6639f5b2ac --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/service.js @@ -0,0 +1,128 @@ +import { t } from "ttag"; +import _ from "underscore"; + +import { ModerationReviewApi } from "metabase/services"; +import { MODERATION_STATUS_ICONS } from "./constants"; + +export { MODERATION_STATUS } from "./constants"; + +export function verifyItem({ text, itemId, itemType }) { + return ModerationReviewApi.create({ + status: "verified", + moderated_item_id: itemId, + moderated_item_type: itemType, + text, + }); +} + +export function removeReview({ itemId, itemType }) { + return ModerationReviewApi.create({ + status: null, + moderated_item_id: itemId, + moderated_item_type: itemType, + }); +} + +const noIcon = {}; +export function getStatusIcon(status) { + if (isRemovedReviewStatus(status)) { + return noIcon; + } + + return MODERATION_STATUS_ICONS[status] || noIcon; +} + +export function getIconForReview(review, options) { + return getStatusIcon(review?.status, options); +} + +// we only want the icon that represents the removal of a review in special cases, +// so you must ask for the icon explicitly +export function getRemovedReviewStatusIcon() { + return MODERATION_STATUS_ICONS[null]; +} + +export function getLatestModerationReview(reviews) { + const maybeReview = _.findWhere(reviews, { + most_recent: true, + }); + + // since we can't delete reviews, consider a most recent review with a status of null to mean there is no review + return isRemovedReviewStatus(maybeReview?.status) ? undefined : maybeReview; +} + +export function getStatusIconForQuestion(question) { + const reviews = question.getModerationReviews(); + const review = getLatestModerationReview(reviews); + return getIconForReview(review); +} + +export function getTextForReviewBanner( + moderationReview, + moderator, + currentUser, +) { + const moderatorName = getModeratorDisplayName(moderator, currentUser); + const { status } = moderationReview; + + if (status === "verified") { + const bannerText = t`${moderatorName} verified this`; + const tooltipText = t`Remove verification`; + return { bannerText, tooltipText }; + } + + return {}; +} + +function getModeratorDisplayName(user, currentUser) { + const { id: userId, common_name } = user || {}; + const { id: currentUserId } = currentUser || {}; + + if (currentUserId != null && userId === currentUserId) { + return t`You`; + } else if (userId != null) { + return common_name; + } else { + return t`A moderator`; + } +} + +// a `status` of `null` represents the removal of a review, since we can't delete reviews +export function isRemovedReviewStatus(status) { + return String(status) === "null"; +} + +export function isItemVerified(review) { + return review != null && review.status === "verified"; +} + +function getModerationReviewEventText(review, moderatorDisplayName) { + switch (review.status) { + case "verified": + return t`${moderatorDisplayName} verified this`; + case null: + return t`${moderatorDisplayName} removed verification`; + default: + return t`${moderatorDisplayName} changed status to ${review.status}`; + } +} + +export function getModerationTimelineEvents(reviews, usersById, currentUser) { + return reviews.map((review, index) => { + const moderator = usersById[review.moderator_id]; + const moderatorDisplayName = getModeratorDisplayName( + moderator, + currentUser, + ); + const text = getModerationReviewEventText(review, moderatorDisplayName); + const icon = isRemovedReviewStatus(review.status) + ? getRemovedReviewStatusIcon() + : getIconForReview(review); + + return { + timestamp: new Date(review.created_at).valueOf(), + icon, + title: text, + }; + }); +} diff --git a/enterprise/frontend/src/metabase-enterprise/moderation/service.unit.spec.js b/enterprise/frontend/src/metabase-enterprise/moderation/service.unit.spec.js new file mode 100644 index 000000000000..5016f46befee --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/moderation/service.unit.spec.js @@ -0,0 +1,275 @@ +import { + verifyItem, + removeReview, + getIconForReview, + getTextForReviewBanner, + isItemVerified, + getLatestModerationReview, + getStatusIconForQuestion, + getModerationTimelineEvents, + getStatusIcon, + getRemovedReviewStatusIcon, +} from "./service"; + +jest.mock("metabase/services", () => ({ + ModerationReviewApi: { + create: jest.fn(() => Promise.resolve({ id: 123 })), + }, +})); + +import { ModerationReviewApi } from "metabase/services"; + +describe("moderation/service", () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe("verifyItem", () => { + it("should create a new moderation review", async () => { + const review = await verifyItem({ + itemId: 123, + itemType: "card", + text: "bar", + }); + + expect(ModerationReviewApi.create).toHaveBeenCalledWith({ + status: "verified", + moderated_item_id: 123, + moderated_item_type: "card", + text: "bar", + }); + + expect(review).toEqual({ id: 123 }); + }); + }); + + describe("removeReview", () => { + it("should create a new moderation review with a null status", async () => { + const review = await removeReview({ + itemId: 123, + itemType: "card", + }); + + expect(ModerationReviewApi.create).toHaveBeenCalledWith({ + status: null, + moderated_item_id: 123, + moderated_item_type: "card", + }); + + expect(review).toEqual({ id: 123 }); + }); + }); + + describe("getStatusIcon", () => { + it("should return an empty icon if there is no matching status", () => { + expect(getStatusIcon("foo")).toEqual({}); + }); + + it("should return an icon if there is a matching status", () => { + expect(getStatusIcon("verified")).toEqual({ + name: "verified", + color: "brand", + }); + }); + + it("should not return an icon for a status of null, which represents the removal of a review and is a special case", () => { + const removedReviewStatus = null; + const accidentallyStringCoercedRemvovedReviewStatus = "null"; + expect(getStatusIcon(removedReviewStatus)).toEqual({}); + expect( + getStatusIcon(accidentallyStringCoercedRemvovedReviewStatus), + ).toEqual({}); + }); + }); + + describe("getRemovedReviewStatusIcon", () => { + it("should return an icon for a removed review", () => { + expect(getRemovedReviewStatusIcon()).toEqual({ + name: "close", + color: "text-light", + }); + }); + }); + + describe("getIconForReview", () => { + it("should return icon name/color for given review", () => { + expect(getIconForReview({ status: "verified" })).toEqual( + getStatusIcon("verified"), + ); + }); + }); + + describe("getTextForReviewBanner", () => { + it("should return text for a verified review", () => { + expect(getTextForReviewBanner({ status: "verified" })).toEqual({ + bannerText: "A moderator verified this", + tooltipText: "Remove verification", + }); + }); + + it("should include the moderator name", () => { + expect( + getTextForReviewBanner( + { status: "verified" }, + { + common_name: "Foo", + id: 1, + }, + { id: 2 }, + ), + ).toEqual({ + bannerText: "Foo verified this", + tooltipText: "Remove verification", + }); + }); + + it("should handle the moderator being the current user", () => { + expect( + getTextForReviewBanner( + { status: "verified" }, + { + common_name: "Foo", + id: 1, + }, + { id: 1 }, + ), + ).toEqual({ + bannerText: "You verified this", + tooltipText: "Remove verification", + }); + }); + }); + + describe("isItemVerified", () => { + it("should return true for a verified review", () => { + expect(isItemVerified({ status: "verified" })).toBe(true); + }); + + it("should return false for a null review", () => { + expect(isItemVerified({ status: null })).toBe(false); + }); + + it("should return false for no review", () => { + expect(isItemVerified()).toBe(false); + }); + }); + + describe("getLatestModerationReview", () => { + it("should return the review flagged as most recent", () => { + const reviews = [ + { id: 1, status: "verified" }, + { id: 2, status: "verified", most_recent: true }, + { id: 3, status: null }, + ]; + + expect(getLatestModerationReview(reviews)).toEqual({ + id: 2, + status: "verified", + most_recent: true, + }); + }); + + it("should return undefined when there is no review flagged as most recent", () => { + const reviews = [ + { id: 1, status: "verified" }, + { id: 2, status: "verified" }, + { id: 3, status: null }, + ]; + + expect(getLatestModerationReview(reviews)).toEqual(undefined); + expect(getLatestModerationReview([])).toEqual(undefined); + }); + + it("should return undefined when there is a review with a status of null flagged as most recent", () => { + const reviews = [ + { id: 1, status: "verified" }, + { id: 2, status: "verified" }, + { id: 3, status: null, most_recent: true }, + ]; + + expect(getLatestModerationReview(reviews)).toEqual(undefined); + }); + }); + + describe("getStatusIconForQuestion", () => { + it('should return the status icon for the most recent "real" review', () => { + const questionWithReviews = { + getModerationReviews: () => [ + { id: 1, status: "verified" }, + { id: 2, status: "verified", most_recent: true }, + { id: 3, status: null }, + ], + }; + + expect(getStatusIconForQuestion(questionWithReviews)).toEqual( + getStatusIcon("verified"), + ); + }); + + it("should return undefined vals for no review", () => { + const questionWithNoMostRecentReview = { + getModerationReviews: () => [ + { id: 1, status: "verified" }, + { id: 2, status: "verified" }, + { id: 3, status: null, most_recent: true }, + ], + }; + + const questionWithNoReviews = { + getModerationReviews: () => [], + }; + + const questionWithUndefinedReviews = { + getModerationReviews: () => undefined, + }; + + const noIcon = { name: undefined, color: undefined }; + + expect(getStatusIconForQuestion(questionWithNoMostRecentReview)).toEqual( + noIcon, + ); + expect(getStatusIconForQuestion(questionWithNoReviews)).toEqual(noIcon); + expect(getStatusIconForQuestion(questionWithUndefinedReviews)).toEqual( + noIcon, + ); + }); + }); + + describe("getModerationTimelineEvents", () => { + it("should return the moderation timeline events", () => { + const reviews = [ + { + id: 1, + status: "verified", + created_at: "2018-01-01T00:00:00.000Z", + moderator_id: 1, + }, + { + id: 2, + status: null, + created_at: "2018-01-02T00:00:00.000Z", + moderator_id: 123, + }, + ]; + const usersById = { + 1: { + id: 1, + common_name: "Foo", + }, + }; + + expect(getModerationTimelineEvents(reviews, usersById)).toEqual([ + { + timestamp: expect.any(Number), + icon: getStatusIcon("verified"), + title: "Foo verified this", + }, + { + timestamp: expect.any(Number), + icon: getRemovedReviewStatusIcon(), + title: "A moderator removed verification", + }, + ]); + }); + }); +}); diff --git a/enterprise/frontend/src/metabase-enterprise/plugins.js b/enterprise/frontend/src/metabase-enterprise/plugins.js index 8feaedee4ea1..ab5d012de734 100644 --- a/enterprise/frontend/src/metabase-enterprise/plugins.js +++ b/enterprise/frontend/src/metabase-enterprise/plugins.js @@ -5,15 +5,20 @@ import MetabaseSettings from "metabase/lib/settings"; // NOTE: temporarily use "latest" for Enterprise Edition docs MetabaseSettings.docsTag = () => "latest"; MetabaseSettings.isEnterprise = () => true; -// PLUGINS: -// import "./management"; +// PLUGINS: -import "./audit_app"; +import "./tools"; import "./sandboxes"; import "./auth"; +import "./caching"; +import "./collections"; import "./whitelabel"; import "./embedding"; import "./store"; import "./snippets"; import "./sharing"; +import "./moderation"; +import "./advanced_config"; +import "./advanced_permissions"; +import "./audit_app"; diff --git a/enterprise/frontend/src/metabase-enterprise/sandboxes/actions.js b/enterprise/frontend/src/metabase-enterprise/sandboxes/actions.js new file mode 100644 index 000000000000..6e31d23599c8 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/sandboxes/actions.js @@ -0,0 +1,19 @@ +import { updateDataPermission } from "metabase/admin/permissions/permissions"; +import { createThunkAction } from "metabase/lib/redux"; + +export const UPDATE_TABLE_SANDBOXING_PERMISSION = + "metabase-enterprise/sandboxes/UPDATE_TABLE_SANDBOXING_PERMISSION"; +export const updateTableSandboxingPermission = createThunkAction( + UPDATE_TABLE_SANDBOXING_PERMISSION, + params => async dispatch => { + const { groupId, ...entityId } = params; + return dispatch( + updateDataPermission({ + groupId, + permission: { name: "access" }, + value: "controlled", + entityId, + }), + ); + }, +); diff --git a/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx b/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx index c7588543fc85..6c7e994aa2ea 100644 --- a/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx +++ b/enterprise/frontend/src/metabase-enterprise/sandboxes/components/GTAPModal.jsx @@ -1,6 +1,7 @@ /* eslint-disable react/prop-types */ import React from "react"; - +import _ from "underscore"; +import { jt, t } from "ttag"; import { withRouter } from "react-router"; import { connect } from "react-redux"; import { push } from "react-router-redux"; @@ -25,12 +26,13 @@ import QuestionLoader from "metabase/containers/QuestionLoader"; import Dimension from "metabase-lib/lib/Dimension"; -import _ from "underscore"; -import { jt, t } from "ttag"; +import { getParentPath } from "metabase/hoc/ModalRoute"; +import { updateTableSandboxingPermission } from "../actions"; const mapStateToProps = () => ({}); const mapDispatchToProps = { push, + updateTableSandboxingPermission, }; type GTAP = { @@ -91,15 +93,8 @@ export default class GTAPModal extends React.Component { } close = () => { - const { - push, - params: { databaseId, schemaName }, - } = this.props; - push( - `/admin/permissions/databases/${databaseId}` + - (schemaName ? `/schemas/${encodeURIComponent(schemaName)}` : ``) + - `/tables`, - ); + const { push, route, location } = this.props; + return push(getParentPath(route, location)); }; _getCanonicalGTAP() { @@ -128,6 +123,8 @@ export default class GTAPModal extends React.Component { } else { await GTAPApi.create(gtap); } + this.props.updateTableSandboxingPermission(this.props.params); + this.close(); } catch (error) { console.error("Error saving GTAP", error); const message = error @@ -138,7 +135,6 @@ export default class GTAPModal extends React.Component { this.setState({ error: message }); throw new Error(message); } - this.close(); }; isValid() { @@ -483,7 +479,7 @@ const AttributeMappingEditor = ({ - + } diff --git a/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js b/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js index cd73c894d2e2..cd998bebc355 100644 --- a/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js +++ b/enterprise/frontend/src/metabase-enterprise/sandboxes/index.js @@ -1,6 +1,7 @@ import { PLUGIN_ADMIN_USER_FORM_FIELDS, PLUGIN_ADMIN_PERMISSIONS_TABLE_ROUTES, + PLUGIN_ADMIN_PERMISSIONS_TABLE_GROUP_ROUTES, PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS, PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS, PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_POST_ACTION, @@ -12,43 +13,42 @@ import { push } from "react-router-redux"; import { t } from "ttag"; import { hasPremiumFeature } from "metabase-enterprise/settings"; -import { color, alpha } from "metabase/lib/colors"; - +import { + getDatabaseFocusPermissionsUrl, + getGroupFocusPermissionsUrl, +} from "metabase/admin/permissions/utils/urls"; import { ModalRoute } from "metabase/hoc/ModalRoute"; + import LoginAttributesWidget from "./components/LoginAttributesWidget"; import GTAPModal from "./components/GTAPModal"; -const OPTION_BLUE = { - iconColor: color("brand"), - bgColor: alpha(color("brand"), 0.15), -}; - const OPTION_SEGMENTED = { - ...OPTION_BLUE, + label: t`Sandboxed`, value: "controlled", - title: t`Grant sandboxed access`, - tooltip: t`Sandboxed access`, icon: "permissions_limited", + iconColor: "brand", +}; + +const getDatabaseViewSandboxModalUrl = (entityId, groupId) => { + const baseUrl = getDatabaseFocusPermissionsUrl(entityId, groupId); + return `${baseUrl}/segmented/group/${groupId}`; }; -const getEditSegementedAccessUrl = ( - groupId, - { databaseId, schemaName, tableId }, -) => - `/admin/permissions` + - `/databases/${databaseId}` + - (schemaName ? `/schemas/${encodeURIComponent(schemaName)}` : "") + - `/tables/${tableId}/segmented/group/${groupId}`; +const getGroupViewSandboxModalUrl = (entityId, groupId) => { + const baseUrl = getGroupFocusPermissionsUrl(groupId, { + ...entityId, + tableId: null, + }); + return `${baseUrl}/${entityId.tableId}/segmented`; +}; -const getEditSegementedAccessAction = (groupId, entityId) => ({ - ...OPTION_BLUE, - title: t`Edit sandboxed access`, - icon: "pencil", - value: push(getEditSegementedAccessUrl(groupId, entityId)), -}); +const getEditSegementedAccessUrl = (entityId, groupId, view) => + view === "database" + ? getDatabaseViewSandboxModalUrl(entityId, groupId) + : getGroupViewSandboxModalUrl(entityId, groupId); -const getEditSegmentedAcessPostAction = (groupId, entityId) => - push(getEditSegementedAccessUrl(groupId, entityId)); +const getEditSegmentedAcessPostAction = (entityId, groupId, view) => + push(getEditSegementedAccessUrl(entityId, groupId, view)); if (hasPremiumFeature("sandboxes")) { PLUGIN_ADMIN_USER_FORM_FIELDS.push({ @@ -57,12 +57,27 @@ if (hasPremiumFeature("sandboxes")) { type: LoginAttributesWidget, }); PLUGIN_ADMIN_PERMISSIONS_TABLE_ROUTES.push( - , + , ); - PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS.push(OPTION_SEGMENTED); - PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS["controlled"].push( - getEditSegementedAccessAction, + PLUGIN_ADMIN_PERMISSIONS_TABLE_GROUP_ROUTES.push( + , ); + PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS.push(OPTION_SEGMENTED); + PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS["controlled"].push({ + label: t`Edit sandboxed access`, + iconColor: "brand", + icon: "pencil", + actionCreator: (entityId, groupId, view) => + push(getEditSegementedAccessUrl(entityId, groupId, view)), + }); PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_POST_ACTION[ "controlled" ] = getEditSegmentedAcessPostAction; diff --git a/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx b/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx index bb37b3fc8ffb..ceb9e8b40b90 100644 --- a/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx +++ b/enterprise/frontend/src/metabase-enterprise/sharing/components/MutableParametersSection.jsx @@ -10,7 +10,7 @@ import { t } from "ttag"; import CollapseSection from "metabase/components/CollapseSection"; import ParametersList from "metabase/parameters/components/ParametersList"; -import { collateParametersWithValues } from "metabase/meta/Parameter"; +import { getValuePopulatedParameters } from "metabase/meta/Parameter"; import { getPulseParameters, getActivePulseParameters, @@ -31,7 +31,7 @@ function MutableParametersSection({ return map; }, {}); - const collatedParameters = collateParametersWithValues( + const valuePopulatedParameters = getValuePopulatedParameters( parameters, pulseParamValuesById, ); @@ -63,7 +63,7 @@ function MutableParametersSection({ className="align-stretch row-gap-1" vertical dashboard={dashboard} - parameters={collatedParameters} + parameters={valuePopulatedParameters} setParameterValue={setParameterValue} /> diff --git a/enterprise/frontend/src/metabase-enterprise/snippets/index.js b/enterprise/frontend/src/metabase-enterprise/snippets/index.js index cd795ee973cc..ec09fca5febf 100644 --- a/enterprise/frontend/src/metabase-enterprise/snippets/index.js +++ b/enterprise/frontend/src/metabase-enterprise/snippets/index.js @@ -9,7 +9,7 @@ import { } from "metabase/plugins"; import MetabaseSettings from "metabase/lib/settings"; -import CollectionPermissionsModal from "metabase/admin/permissions/containers/CollectionPermissionsModal"; +import CollectionPermissionsModal from "metabase/admin/permissions/components/CollectionPermissionsModal/CollectionPermissionsModal"; import Modal from "metabase/components/Modal"; import CollectionRow from "./components/CollectionRow"; @@ -51,7 +51,7 @@ PLUGIN_SNIPPET_SIDEBAR_MODALS.push( > snippetSidebar.setState({ permissionsModalCollectionId: null }) diff --git a/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx b/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx index 5136282a7b0d..b70fc33cd933 100644 --- a/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx +++ b/enterprise/frontend/src/metabase-enterprise/store/components/StoreIcon.jsx @@ -14,7 +14,7 @@ const StoreIconWrapper = ({ children, color }) => ( p={2} bg={color || colors["brand"]} color="white" - w={WRAPPER_SIZE} + width={WRAPPER_SIZE} style={{ borderRadius: 99, height: WRAPPER_SIZE }} > {children} diff --git a/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx b/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx index bdacf1690381..ebd9add4f70a 100644 --- a/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx +++ b/enterprise/frontend/src/metabase-enterprise/store/containers/StoreAccount.jsx @@ -209,7 +209,7 @@ const AccountStatus = ({ flexDirection="column" className={className} p={[2, 4]} - w="100%" + width="100%" >

{title}

@@ -219,7 +219,7 @@ const AccountStatus = ({ {subtitle}
)} - + {featuresOrdered.map(([id, feature]) => ( ( ); const Feature = ({ feature, included, expired, preview }) => ( - + + diff --git a/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorDetail.jsx b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorDetail.jsx new file mode 100644 index 000000000000..e43f26d25324 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorDetail.jsx @@ -0,0 +1,184 @@ +import React from "react"; + +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { getMetadata } from "metabase/selectors/metadata"; + +import { t } from "ttag"; +import PropTypes from "prop-types"; +import { getIn } from "icepick"; + +import { formatColumn, formatValue } from "metabase/lib/formatting"; +import { CardApi } from "metabase/services"; +import Button from "metabase/components/Button"; +import Link from "metabase/components/Link"; +import Question from "metabase-lib/lib/Question"; +import { QuestionResultLoader } from "metabase/containers/QuestionResultLoader"; +import { columnNameToUrl } from "../../audit_app/lib/mode"; + +function idxToUrl(resRow, resCols, nameToResCol, colName) { + const idVal = resRow[nameToResCol[colName]]; + const urlVal = colName && idVal ? columnNameToUrl[colName](idVal) : ""; + const linkClass = urlVal === "" ? "" : "text-brand"; + return [urlVal, linkClass]; +} + +function ErrorDetailDisplay(props) { + const { result } = props; + const resRow = getIn(result, ["data", "rows", 0]); + const resCols = getIn(result, ["data", "cols"]); + if (resRow && resCols) { + const nameToResCol = resCols.reduce( + (obj, x, idx) => Object.assign(obj, { [x.name]: idx }), + {}, + ); + + const linkColumns = [ + null, + "collection_id", + "database_id", + null, + "table_id", + null, + "user_id", + null, + ]; + + const ordinaryRows = [ + "last_run_at", + "collection_name", + "database_name", + "schema_name", + "table_name", + "total_runs", + "user_name", + "updated_at", + ].map((x, idx) => { + const [urlVal, linkClass] = idxToUrl( + resRow, + resCols, + nameToResCol, + linkColumns[idx], + ); + const formattedVal = formatValue(resRow[nameToResCol[x]], { + column: resCols[nameToResCol[x]], + jsx: true, + rich: true, + type: "cell", + local: true, + }); + return ( +
+ + + + ); + }); + + const dashIdRows = resRow[nameToResCol.dash_name_str] + ?.split("|") + ?.map((x, idx) => ( + + + + + )); + + const [cardUrlVal, cardLinkClass] = idxToUrl( + resRow, + resCols, + nameToResCol, + "card_id", + ); + + return [ +

+ { + + {resRow[nameToResCol.card_name]} + + } +

, +
+ {resRow[nameToResCol.error_str]} +
, +
+ elem)} + onChange={e => this.handleAllSelectClick(e, rows)} + /> +
+ + this.handleRowSelectClick( + { ...e, originRow: rowIndex }, + row, + rowIndex, + ) + } + /> + onVisualizationClick(clicked) : null } > - {formatValue(value, { - ...columnSettings, - type: "cell", - jsx: true, - rich: true, - clicked: clicked, - // always show timestamps in local time for the audit app - local: true, - })} +
+ {formatValue(value, { + ...columnSettings, + type: "cell", + jsx: true, + rich: true, + clicked: clicked, + // always show timestamps in local time for the audit app + local: true, + })} +
+ this.handleRemoveRowClick(row, cols)} + > + + +
+ {formatColumn(resCols[nameToResCol[x]])} + + { + + {formattedVal} + + } +
+ {idx === 0 && formatColumn(resCols[nameToResCol.dash_name_str])} + + {formatValue(x, { column: resCols[nameToResCol.dash_name_str] })} +
+ {[ordinaryRows, dashIdRows]} +
, + ]; + } else { + return null; + } +} + +function ErrorDetail(props) { + const { params, errorRetry } = props; + const cardId = parseInt(params.cardId); + + // below card is not the card in question, but + // the card we're creating to query for the error details + const card = { + name: "Card Errors", + dataset_query: { + type: "internal", + fn: "metabase-enterprise.audit-app.pages.query-detail/bad-card", + args: [cardId], + }, + }; + const question = new Question(card, null); + + return ( +
+ + + {({ rawSeries, result }) => } + +
+ ); +} + +const mapStateToProps = (state, props) => ({ + metadata: getMetadata(state), +}); + +const mapDispatchToProps = { + errorRetry: async cardId => { + await CardApi.query({ cardId: cardId }); + // we're imagining that we successfully reran, in which case we want to go back to overall table + return push("/admin/tools/errors/"); + }, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(ErrorDetail); + +ErrorDetail.propTypes = { + params: PropTypes.object, + errorRetry: PropTypes.func, +}; +ErrorDetailDisplay.propTypes = { + result: PropTypes.object, +}; diff --git a/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorOverview.jsx b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorOverview.jsx new file mode 100644 index 000000000000..eddf2d8108d4 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/containers/ErrorOverview.jsx @@ -0,0 +1,114 @@ +import React, { useState, useRef } from "react"; +import { t } from "ttag"; + +import _ from "underscore"; + +import { CardApi } from "metabase/services"; + +import * as Queries from "../../audit_app/lib/cards/queries"; +import AuditTable from "../../audit_app/containers/AuditTable"; +import AuditParameters from "../../audit_app/components/AuditParameters"; +import { ErrorMode } from "../mode"; + +const getSortOrder = isAscending => (isAscending ? "asc" : "desc"); + +const CARD_ID_COL = 0; + +export default function ErrorOverview(props) { + const reloadRef = useRef(null); + // TODO: use isReloading to display a loading overlay + // eslint-disable-next-line no-unused-vars + const [isReloading, setIsReloading] = useState(false); + const [hasResults, setHasResults] = useState(false); + const [sorting, setSorting] = useState({ + column: "last_run_at", + isAscending: false, + }); + + const [rowChecked, setRowChecked] = useState({}); + + const handleAllSelectClick = e => { + const newRowChecked = { ...rowChecked }; + const noRowChecked = Object.values(rowChecked).every(v => !v); + for (const rowIndex of Array(e.rows.length).keys()) { + const cardIndex = e.rows[rowIndex][CARD_ID_COL]; + if (noRowChecked) { + newRowChecked[cardIndex] = true; + } else { + newRowChecked[cardIndex] = false; + } + } + setRowChecked(newRowChecked); + }; + + const handleRowSelectClick = e => { + const newRowChecked = { ...rowChecked }; + const cardIndex = e.row[CARD_ID_COL]; + newRowChecked[cardIndex] = !(rowChecked[cardIndex] || false); + setRowChecked(newRowChecked); + }; + + const handleReloadSelected = async () => { + const checkedCardIds = Object.keys(_.pick(rowChecked, _.identity)); + + await Promise.all( + checkedCardIds.map( + async member => await CardApi.query({ cardId: member }), + ), + ); + setRowChecked({}); + setIsReloading(true); + reloadRef.current?.(); + }; + + const handleSortingChange = sorting => setSorting(sorting); + + const handleLoad = result => { + setHasResults(result[0].row_count !== 0); + setIsReloading(false); + }; + + return ( + !isChecked), + onClick: handleReloadSelected, + }, + ]} + hasResults={hasResults} + > + {({ errorFilter, dbFilter, collectionFilter }) => ( + + )} + + ); +} diff --git a/enterprise/frontend/src/metabase-enterprise/tools/containers/ToolsApp.jsx b/enterprise/frontend/src/metabase-enterprise/tools/containers/ToolsApp.jsx new file mode 100644 index 000000000000..4048d5a3c6f2 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/containers/ToolsApp.jsx @@ -0,0 +1,20 @@ +import React, { Component } from "react"; +import PropTypes from "prop-types"; + +import AdminLayout from "metabase/components/AdminLayout"; + +export default class ToolsApp extends Component { + static propTypes = { + children: PropTypes.node, + }; + + render() { + const { children } = this.props; + return ( + +

Questions that errored when last run

+ {children} +
+ ); + } +} diff --git a/enterprise/frontend/src/metabase-enterprise/tools/index.js b/enterprise/frontend/src/metabase-enterprise/tools/index.js new file mode 100644 index 000000000000..4220bb437927 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/index.js @@ -0,0 +1,10 @@ +import { t } from "ttag"; + +import { PLUGIN_ADMIN_NAV_ITEMS, PLUGIN_ADMIN_ROUTES } from "metabase/plugins"; +import { hasPremiumFeature } from "metabase-enterprise/settings"; +import getToolsRoutes from "./routes"; + +if (hasPremiumFeature("audit_app")) { + PLUGIN_ADMIN_NAV_ITEMS.push({ name: t`Tools`, path: "/admin/tools" }); + PLUGIN_ADMIN_ROUTES.push(getToolsRoutes); +} diff --git a/enterprise/frontend/src/metabase-enterprise/tools/mode.js b/enterprise/frontend/src/metabase-enterprise/tools/mode.js new file mode 100644 index 000000000000..f59162335c32 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/mode.js @@ -0,0 +1,28 @@ +import { t } from "ttag"; +import { push } from "react-router-redux"; + +const CARD_ID_ROW_IDX = 0; + +const ErrorDrill = ({ clicked }) => { + if (!clicked) { + return []; + } + + const cardId = clicked.origin.row[CARD_ID_ROW_IDX]; + + return [ + { + name: "detail", + title: t`View this`, + default: true, + action() { + return push(`/admin/tools/errors/${cardId}`); + }, + }, + ]; +}; + +export const ErrorMode = { + name: "error", + drills: () => [ErrorDrill], +}; diff --git a/enterprise/frontend/src/metabase-enterprise/tools/routes.jsx b/enterprise/frontend/src/metabase-enterprise/tools/routes.jsx new file mode 100644 index 000000000000..18533bb63bd6 --- /dev/null +++ b/enterprise/frontend/src/metabase-enterprise/tools/routes.jsx @@ -0,0 +1,22 @@ +import React from "react"; + +import { Route } from "metabase/hoc/Title"; +import { IndexRedirect } from "react-router"; +import { t } from "ttag"; +import ToolsApp from "./containers/ToolsApp"; +import ErrorOverview from "./containers/ErrorOverview"; +import ErrorDetail from "./containers/ErrorDetail"; + +const getRoutes = (store: any) => ( + + + + + +); + +export default getRoutes; diff --git a/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx b/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx index 91134f9acb05..fcbaec193674 100644 --- a/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx +++ b/enterprise/frontend/src/metabase-enterprise/whitelabel/components/ColorSchemeWidget.jsx @@ -1,6 +1,8 @@ /* eslint-disable react/prop-types */ import React from "react"; +import { t } from "ttag"; + import ColorPicker from "metabase/components/ColorPicker"; import Icon from "metabase/components/Icon"; import { humanize } from "metabase/lib/formatting"; @@ -15,39 +17,35 @@ const THEMEABLE_COLORS = [ const COLOR_DISPLAY_PROPERTIES = { brand: { - name: "Primary color", - description: - "The main color used throughout the app for buttons, links, and the default chart color.", + name: t`Primary color`, + description: t`The main color used throughout the app for buttons, links, and the default chart color.`, }, nav: { - name: "Navigation bar color", - description: - "The top nav bar of Metabase. Defaults to the Primary Color if not set.", + name: t`Navigation bar color`, + description: t`The top nav bar of Metabase. Defaults to the Primary Color if not set.`, }, accent1: { - name: "Accent 1", - description: - "The color of aggregations and breakouts in the graphical query builder.", + name: t`Accent 1`, + description: t`The color of aggregations and breakouts in the graphical query builder.`, }, accent2: { - name: "Accent 2", - description: - "The color of filters in the query builder and buttons and links in filter widgets.", + name: t`Accent 2`, + description: t`The color of filters in the query builder and buttons and links in filter widgets.`, }, accent3: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent4: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent5: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent6: { - name: "Additional chart color", + name: t`Additional chart color`, }, accent7: { - name: "Additional chart color", + name: t`Additional chart color`, }, }; diff --git a/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js b/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js index 66eff9931ef9..30b5c45b2acb 100644 --- a/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js +++ b/enterprise/frontend/src/metabase-enterprise/whitelabel/lib/whitelabel.js @@ -49,7 +49,9 @@ function walkStyleSheets(sheets, fn) { } if (rule.style) { for (const prop of rule.style) { - fn(rule.style, prop, rule.style[prop]); + const cssValue = rule.style.getPropertyValue(prop); + const cssPriority = rule.style.getPropertyPriority(prop); + fn(rule.style, prop, cssValue, cssPriority); } } } @@ -74,12 +76,15 @@ const replaceColors = (cssValue, matchColor, replacementColor) => { const getColorStyleProperties = memoize(function() { const properties = []; - walkStyleSheets(document.styleSheets, (style, cssProperty, cssValue) => { - // don't bother with checking if there are no colors - if (COLOR_REGEX.test(cssValue)) { - properties.push({ style, cssProperty, cssValue }); - } - }); + walkStyleSheets( + document.styleSheets, + (style, cssProperty, cssValue, cssPriority) => { + // don't bother with checking if there are no colors + if (COLOR_REGEX.test(cssValue)) { + properties.push({ style, cssProperty, cssValue, cssPriority }); + } + }, + ); return properties; }); @@ -96,11 +101,17 @@ function initColorCSS(colorName) { const originalColor = Color(originalColors[colorName]); // look for CSS rules which have colors matching the brand colors or very light or desaturated - for (const { style, cssProperty, cssValue } of getColorStyleProperties()) { + for (const { + style, + cssProperty, + cssValue, + cssPriority, + } of getColorStyleProperties()) { // try replacing with a random color to see if we actually need to if (cssValue !== replaceColors(cssValue, originalColor, RANDOM_COLOR)) { CSS_COLOR_UPDATORS_BY_COLOR_NAME[colorName].push(themeColor => { - style[cssProperty] = replaceColors(cssValue, originalColor, themeColor); + const newCssValue = replaceColors(cssValue, originalColor, themeColor); + style.setProperty(cssProperty, newCssValue, cssPriority); }); } } diff --git a/frontend/interfaces/underscore.js b/frontend/interfaces/underscore.js index ca19feec88e9..27e7fe0ed5df 100755 --- a/frontend/interfaces/underscore.js +++ b/frontend/interfaces/underscore.js @@ -56,6 +56,8 @@ declare module "underscore" { declare function any(a: Array, pred: (val: T) => boolean): boolean; declare function contains(a: Array, val: T): boolean; + declare function head(a: Array, n?: number): Array; + declare function tail(a: Array, n?: number): Array; declare function initial(a: Array, n?: number): Array; declare function rest(a: Array, index?: number): Array; diff --git a/frontend/parse-deps.js b/frontend/parse-deps.js new file mode 100644 index 000000000000..0524c39d9ea3 --- /dev/null +++ b/frontend/parse-deps.js @@ -0,0 +1,258 @@ +#!/usr/bin/env node + +const fs = require("fs"); +const path = require("path"); + +const glob = require("glob"); +const minimatch = require("minimatch"); +const parser = require("@babel/parser"); +const traverse = require("@babel/traverse").default; +const readline = require("readline"); + +const PATTERN = "{enterprise/,}frontend/src/**/*.{js,jsx}"; + +// after webpack.config.js +const ALIAS = { + metabase: "frontend/src/metabase", + "metabase-lib": "frontend/src/metabase-lib", + "metabase-enterprise": "enterprise/frontend/src/metabase-enterprise", + "metabase-types": "frontend/src/metabase-types", +}; + +function files() { + return glob.sync(PATTERN); +} + +function dependencies() { + const deps = files().map(fileName => { + const contents = fs.readFileSync(fileName, "utf-8"); + const options = { + allowImportExportEverywhere: true, + allowReturnOutsideFunction: true, + decoratorsBeforeExport: true, + sourceType: "unambiguous", + plugins: ["jsx", "flow", "decorators-legacy", "exportDefaultFrom"], + }; + const importList = []; + try { + const ast = parser.parse(contents, options); + traverse(ast, { + enter(path) { + if (path.node.type === "ImportDeclaration") { + importList.push(path.node.source.value); + } + if (path.node.type === "CallExpression") { + const callee = path.node.callee; + const args = path.node.arguments; + if (callee.type === "Identifier" && callee.name === "require") { + if (args.length === 1 && args[0].type === "StringLiteral") { + importList.push(args[0].value); + } + } + } + }, + }); + } catch (e) { + console.error(fileName, e.toString()); + process.exit(-1); + n; + } + const base = path.dirname(fileName) + path.sep; + const absoluteImportList = importList + .map(name => { + const absName = name[0] === "." ? path.normalize(base + name) : name; + const parts = absName.split(path.sep); + const realPath = ALIAS[parts[0]]; + parts[0] = realPath ? realPath : parts[0]; + const realName = parts.join(path.sep); + return realName; + }) + .map(name => { + if (fs.existsSync(name)) { + if ( + fs.lstatSync(name).isDirectory() && + fs.existsSync(name + "/index.js") + ) { + return name + "/index.js"; + } + return name; + } else if (fs.existsSync(name + ".js")) { + return name + ".js"; + } else if (fs.existsSync(name + ".jsx")) { + return name + ".jsx"; + } + return name; + }) + .filter(name => minimatch(name, PATTERN)); + + return { source: fileName, dependencies: absoluteImportList.sort() }; + }); + return deps; +} + +function dependents() { + let dependents = {}; + dependencies().forEach(dep => { + const { source, dependencies } = dep; + dependencies.forEach(d => { + if (!dependents[d]) { + dependents[d] = []; + } + dependents[d].push(source); + }); + }); + return dependents; +} + +function getDependents(sources) { + const allDependents = dependents(); + let filteredDependents = []; + + sources.forEach(name => { + const list = allDependents[name]; + if (list && Array.isArray(list) && list.length > 0) { + filteredDependents.push(...list); + } + }); + + return Array.from(new Set(filteredDependents)).sort(); // unique +} + +function filterDependents() { + const rl = readline.createInterface({ input: process.stdin }); + + const start = async () => { + let sources = []; + for await (const line of rl) { + const name = line.trim(); + if (name.length > 0) { + sources.push(name); + } + } + const filteredDependents = getDependents(sources); + console.log(filteredDependents.join("\n")); + }; + start(); +} + +function filterAllDependents() { + const rl = readline.createInterface({ input: process.stdin }); + + const start = async () => { + let sources = []; + for await (const line of rl) { + const name = line.trim(); + if (name.length > 0) { + sources.push(name); + } + } + let filteredDependents = getDependents(sources); + + const allDependents = dependents(); + for (let i = 0; i < filteredDependents.length; ++i) { + const name = filteredDependents[i]; + const list = allDependents[name]; + if (list && Array.isArray(list) && list.length > 0) { + const newAddition = list.filter(e => filteredDependents.indexOf(e) < 0); + filteredDependents.push(...newAddition); + } + } + console.log(filteredDependents.sort().join("\n")); + }; + start(); +} + +function countDependents() { + const allDependents = dependents(); + const sources = Object.keys(allDependents).sort(); + const tally = sources.map(name => { + return { name, count: allDependents[name].length }; + }); + console.log(tally.map(({ name, count }) => `${count} ${name}`).join("\n")); +} + +function countAllDependents() { + const allDependents = dependents(); + const sources = Object.keys(allDependents).sort(); + const tally = sources.map(name => { + const list = allDependents[name]; + for (let i = 0; i < list.length; ++i) { + const deps = allDependents[list[i]]; + if (deps && Array.isArray(deps) && deps.length > 1) { + const newAddition = deps.filter(e => list.indexOf(e) < 0); + list.push(...newAddition); + } + } + return { name, count: list.length }; + }); + console.log(tally.map(({ name, count }) => `${count} ${name}`).join("\n")); +} + +function matrix() { + const allDependents = dependents(); + const sources = Object.keys(allDependents).sort(); + const width = Math.max(...sources.map(s => s.length)); + const rows = sources.map(name => { + const list = allDependents[name]; + const checks = sources.map(dep => (list.indexOf(dep) < 0 ? " " : "x")); + return name.padEnd(width) + " | " + checks.join(""); + }); + console.log(rows.join("\n")); +} + +const USAGE = ` +parse-deps cmd + +cmd must be one of: + + files Display list of source files + dependencies Show the dependencies of each source file + dependents Show the dependents of each source file + filter-dependents Filter direct dependents based on stdin +filter-all-dependents Filter all indirect and direct dependents based on stdin + count-dependents List the total count of direct dependents + count-all-dependents List the total count of its direct and indirect dependents + matrix Display 2-D matrix of dependent relationship +`; + +function main(args) { + const cmd = args[0]; + if (cmd) { + switch (cmd.toLowerCase()) { + case "files": + console.log(files().join("\n")); + break; + case "dependencies": + console.log(JSON.stringify(dependencies(), null, 2)); + break; + case "dependents": + console.log(JSON.stringify(dependents(), null, 2)); + break; + case "filter-dependents": + filterDependents(); + break; + case "filter-all-dependents": + filterAllDependents(); + break; + case "count-dependents": + countDependents(); + break; + case "count-all-dependents": + countAllDependents(); + break; + case "matrix": + matrix(); + break; + default: + console.log(USAGE); + break; + } + } else { + console.log(USAGE); + } +} + +let args = process.argv; +args.shift(); +args.shift(); +main(args); diff --git a/frontend/src/metabase-lib/lib/Question.js b/frontend/src/metabase-lib/lib/Question.js index d7ec233f69c2..6927ab47c5d9 100644 --- a/frontend/src/metabase-lib/lib/Question.js +++ b/frontend/src/metabase-lib/lib/Question.js @@ -1,5 +1,5 @@ import _ from "underscore"; -import { chain, assoc, dissoc, assocIn } from "icepick"; +import { chain, assoc, dissoc, assocIn, getIn } from "icepick"; // NOTE: the order of these matters due to circular dependency issues import StructuredQuery, { @@ -35,7 +35,10 @@ import { findColumnIndexForColumnSetting, syncTableColumnsToQuery, } from "metabase/lib/dataset"; -import { getParametersWithExtras, isTransientId } from "metabase/meta/Card"; +import { + getValueAndFieldIdPopulatedParametersFromCard, + isTransientId, +} from "metabase/meta/Card"; import { parameterToMBQLFilter, normalizeParameterValue, @@ -301,10 +304,16 @@ export default class Question { return this._card && this._card.displayIsLocked; } - // If we're locked to a display that is no longer "sensible", unlock it. - maybeUnlockDisplay(sensibleDisplays): Question { - const locked = - this.displayIsLocked() && sensibleDisplays.includes(this.display()); + // If we're locked to a display that is no longer "sensible", unlock it + // unless it was locked in unsensible + maybeUnlockDisplay(sensibleDisplays, previousSensibleDisplays): Question { + const wasSensible = + previousSensibleDisplays == null || + previousSensibleDisplays.includes(this.display()); + const isSensible = sensibleDisplays.includes(this.display()); + + const shouldUnlock = wasSensible && !isSensible; + const locked = this.displayIsLocked() && !shouldUnlock; return this.setDisplayIsLocked(locked); } @@ -535,13 +544,38 @@ export default class Question { drillPK(field: Field, value: Value): ?Question { const query = this.query(); - if (query instanceof StructuredQuery) { - return query - .reset() - .setTable(field.table) - .filter(["=", ["field", field.id, null], value]) - .question(); + + if (!(query instanceof StructuredQuery)) { + return; } + + const otherPKFilters = query + .filters() + ?.filter(filter => { + const filterField = filter?.field(); + if (!filterField) { + return false; + } + + const isNotSameField = filterField.id !== field.id; + const isPKEqualsFilter = + filterField.isPK() && filter.operatorName() === "="; + const isFromSameTable = filterField.table.id === field.table.id; + + return isPKEqualsFilter && isNotSameField && isFromSameTable; + }) + .map(filter => filter.raw()); + + const filtersToApply = [ + ["=", ["field", field.id, null], value], + ...otherPKFilters, + ]; + + const resultedQuery = filtersToApply.reduce((query, filter) => { + return query.addFilter(filter); + }, query.reset().setTable(field.table)); + + return resultedQuery.question(); } _syncStructuredQueryColumnsAndSettings(previousQuestion, previousQuery) { @@ -624,8 +658,11 @@ export default class Question { const validVizSettings = vizSettings.filter(colSetting => { const hasColumn = findColumnIndexForColumnSetting(cols, colSetting) >= 0; - return hasColumn; + const isMutatingColumn = + findColumnIndexForColumnSetting(addedColumns, colSetting) >= 0; + return hasColumn && !isMutatingColumn; }); + const noColumnsRemoved = validVizSettings.length === vizSettings.length; if (noColumnsRemoved && addedColumns.length === 0) { @@ -645,7 +682,8 @@ export default class Question { syncColumnsAndSettings(previous, queryResults) { const query = this.query(); - if (query instanceof NativeQuery && queryResults) { + const isQueryResultValid = queryResults && !queryResults.error; + if (query instanceof NativeQuery && isQueryResultValid) { return this._syncNativeQuerySettings(queryResults); } const previousQuery = previous && previous.query(); @@ -699,6 +737,11 @@ export default class Question { return Mode.forQuestion(this); } + /** + * Returns true if, based on filters and table columns, the expected result is a single row. + * However, it might not be true when a PK column is not unique, leading to multiple rows. + * Because of that, always check query results in addition to this property. + */ isObjectDetail(): boolean { const mode = this.mode(); return mode ? mode.name() === "object" : false; @@ -781,16 +824,22 @@ export default class Question { originalQuestion, clean = true, query, + includeDisplayIsLocked, }: { originalQuestion?: Question, clean?: boolean, query?: { [string]: any }, + includeDisplayIsLocked?: boolean, } = {}): string { if ( !this.id() || (originalQuestion && this.isDirtyComparedTo(originalQuestion)) ) { - return Urls.question(null, this._serializeForUrl({ clean }), query); + return Urls.question( + null, + this._serializeForUrl({ clean, includeDisplayIsLocked }), + query, + ); } else { return Urls.question(this.card(), "", query); } @@ -965,7 +1014,10 @@ export default class Question { // TODO: Fix incorrect Flow signature parameters(): ParameterObject[] { - return getParametersWithExtras(this.card(), this._parameterValues); + return getValueAndFieldIdPopulatedParametersFromCard( + this.card(), + this._parameterValues, + ); } parametersList(): ParameterObject[] { @@ -999,7 +1051,11 @@ export default class Question { } // Internal methods - _serializeForUrl({ includeOriginalCardId = true, clean = true } = {}) { + _serializeForUrl({ + includeOriginalCardId = true, + clean = true, + includeDisplayIsLocked = false, + } = {}) { const query = clean ? this.query().clean() : this.query(); const cardCopy = { @@ -1015,6 +1071,11 @@ export default class Question { ...(includeOriginalCardId ? { original_card_id: this._card.original_card_id } : {}), + ...(includeDisplayIsLocked + ? { + displayIsLocked: this._card.displayIsLocked, + } + : {}), }; return Card_DEPRECATED.utf8_to_b64url(JSON.stringify(sortObject(cardCopy))); @@ -1035,11 +1096,17 @@ export default class Question { } getUrlWithParameters() { - const question = this.query().isEditable() - ? this.convertParametersToFilters() - : this.markDirty(); // forces use of serialized question url + const question = this.convertParametersToFilters().markDirty(); const query = this.isNative() ? this._parameterValues : undefined; - return question.getUrl({ originalQuestion: this, query }); + return question.getUrl({ + originalQuestion: this, + query, + includeDisplayIsLocked: true, + }); + } + + getModerationReviews() { + return getIn(this, ["_card", "moderation_reviews"]) || []; } } diff --git a/frontend/src/metabase-lib/lib/metadata/Base.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Base.unit.spec.js new file mode 100644 index 000000000000..b7d32dd87636 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Base.unit.spec.js @@ -0,0 +1,53 @@ +import Base from "./Base"; + +describe("Base", () => { + describe("instantiation", () => { + it("should set properties from `object` on the Base instance", () => { + const instance = new Base({ abc: 123 }); + expect(instance.abc).toEqual(123); + }); + + it("should set ALL enumerable properties of `object`, including properties down the prototype chain", () => { + const object = { + abc: 123, + }; + + Object.defineProperty(object, "anEnumerableProperty", { + enumerable: false, + value: false, + }); + + object.__proto__ = { + secretPrototypeValue: true, + }; + + const instance = new Base(object); + + expect(instance.abc).toEqual(123); + expect(instance.secretPrototypeValue).toBe(true); + expect(instance.anEnumerableProperty).toBeUndefined(); + }); + }); + + describe("getPlainObject", () => { + it("returns whatever `object` was provided during instantiation", () => { + const obj = { + abc: 123, + }; + + const instance = new Base(obj); + + expect(instance.getPlainObject()).toBe(obj); + }); + + it("returns whatever `_plainObject` is set to", () => { + const obj1 = {}; + const obj2 = {}; + + const instance = new Base(obj1); + instance._plainObject = obj2; + + expect(instance.getPlainObject()).toBe(obj2); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Database.js b/frontend/src/metabase-lib/lib/metadata/Database.js index ef5d61ec437c..697d2d4475e0 100644 --- a/frontend/src/metabase-lib/lib/metadata/Database.js +++ b/frontend/src/metabase-lib/lib/metadata/Database.js @@ -1,17 +1,14 @@ import Question from "../Question"; import Base from "./Base"; -import Table from "./Table"; -import Schema from "./Schema"; import { memoize, createLookupByProperty } from "metabase-lib/lib/utils"; import { generateSchemaId } from "metabase/schema"; -import type { SchemaName } from "metabase-types/types/Table"; -import type { DatabaseFeature } from "metabase-types/types/Database"; - -type VirtualDatabaseFeature = "join"; +/** + * @typedef { import("./metadata").SchemaName } SchemaName + */ /** * Wrapper class for database metadata objects. Contains {@link Schema}s, {@link Table}s, {@link Metric}s, {@link Segment}s. @@ -21,28 +18,35 @@ type VirtualDatabaseFeature = "join"; export default class Database extends Base { // TODO Atte Keinänen 6/11/17: List all fields here (currently only in types/Database) - name: string; - description: ?string; - - tables: Table[]; - schemas: Schema[]; - - auto_run_queries: boolean; - - displayName(): string { + displayName() { return this.name; } // SCHEMAS - schema(schemaName: ?SchemaName) { + /** + * @param {SchemaName} [schemaName] + */ + schema(schemaName) { return this.metadata.schema(generateSchemaId(this.id, schemaName)); } - schemaNames(): SchemaName[] { + schemaNames() { return this.schemas.map(s => s.name).sort((a, b) => a.localeCompare(b)); } + getSchemas() { + return this.schemas; + } + + schemasCount() { + return this.schemas.length; + } + + getTables() { + return this.tables; + } + // TABLES @memoize @@ -57,9 +61,12 @@ export default class Database extends Base { // FEATURES - hasFeature( - feature: null | DatabaseFeature | VirtualDatabaseFeature, - ): boolean { + /** + * @typedef {import("./metadata").DatabaseFeature} DatabaseFeature + * @typedef {"join"} VirtualDatabaseFeature + * @param {DatabaseFeature | VirtualDatabaseFeature} [feature] + */ + hasFeature(feature) { if (!feature) { return true; } @@ -82,13 +89,13 @@ export default class Database extends Base { // QUESTIONS - newQuestion(): Question { + newQuestion() { return this.question() .setDefaultQuery() .setDefaultDisplay(); } - question(query = { "source-table": null }): Question { + question(query = { "source-table": null }) { return Question.create({ metadata: this.metadata, dataset_query: { @@ -99,7 +106,7 @@ export default class Database extends Base { }); } - nativeQuestion(native = {}): Question { + nativeQuestion(native = {}) { return Question.create({ metadata: this.metadata, dataset_query: { @@ -119,7 +126,36 @@ export default class Database extends Base { } /** Returns a database containing only the saved questions from the same database, if any */ - savedQuestionsDatabase(): ?Database { + savedQuestionsDatabase() { return this.metadata.databasesList().find(db => db.is_saved_questions); } + + /** + * @private + * @param {number} id + * @param {string} name + * @param {?string} description + * @param {Table[]} tables + * @param {Schema[]} schemas + * @param {Metadata} metadata + * @param {boolean} auto_run_queries + */ + /* istanbul ignore next */ + _constructor( + id, + name, + description, + tables, + schemas, + metadata, + auto_run_queries, + ) { + this.id = id; + this.name = name; + this.description = description; + this.tables = tables; + this.schemas = schemas; + this.metadata = metadata; + this.auto_run_queries = auto_run_queries; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Database.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Database.unit.spec.js new file mode 100644 index 000000000000..599eb61da842 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Database.unit.spec.js @@ -0,0 +1,247 @@ +import Database from "./Database"; +import Schema from "./Schema"; +import Metadata from "./Metadata"; +import Table from "./Table"; +import Base from "./Base"; +import Question from "../Question"; + +describe("Database", () => { + describe("instantiation", () => { + it("should create an instance of Schema", () => { + expect(new Database()).toBeInstanceOf(Database); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Database()).toBeInstanceOf(Base); + expect(new Database({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("displayName", () => { + it("should return the name prop", () => { + expect(new Database({ name: "foo" }).displayName()).toBe("foo"); + }); + }); + + describe("schema", () => { + let schema; + let database; + beforeEach(() => { + schema = new Schema({ id: "123:foo" }); + const metadata = new Metadata({ + schemas: { + "123:foo": schema, + }, + }); + database = new Database({ + id: 123, + metadata, + }); + }); + + it("should return the schema with the given name", () => { + expect(database.schema("foo")).toBe(schema); + }); + + it("should return null when the given schema name doesn not match a schema", () => { + expect(database.schema("bar")).toBe(null); + }); + }); + + describe("schemaNames", () => { + it("should return a list of schemaNames", () => { + const database = new Database({ + id: 123, + schemas: [ + new Schema({ id: "123:foo", name: "foo" }), + new Schema({ id: "123:bar", name: "bar" }), + ], + }); + expect(database.schemaNames()).toEqual(["bar", "foo"]); + }); + }); + + describe("tablesLookup", () => { + it("should return a map of tables keyed by id", () => { + const table1 = new Table({ id: 1 }); + const table2 = new Table({ id: 2 }); + + expect( + new Database({ + tables: [], + }).tablesLookup(), + ).toEqual({}); + + expect( + new Database({ + tables: [table1, table2], + }).tablesLookup(), + ).toEqual({ + 1: table1, + 2: table2, + }); + }); + }); + + describe("hasFeature", () => { + beforeEach(() => {}); + + it("returns true when given a falsy `feature`", () => { + expect(new Database({}).hasFeature(null)).toBe(true); + expect(new Database({}).hasFeature("")).toBe(true); + }); + + it("should return true when given `feature` is found within the `features` on the instance", () => { + expect(new Database({ features: ["foo"] }).hasFeature("foo")).toBe(true); + }); + + it("should return false when given `feature` is not found within the `features` on the instance", () => { + expect(new Database({ features: ["foo"] }).hasFeature("bar")).toBe(false); + }); + + it("should return false for 'join' even when it exists in `features`", () => { + expect(new Database({ features: ["join"] }).hasFeature("join")).toBe( + false, + ); + }); + + it("should return true for 'join' for a set of other values", () => { + ["left-join", "right-join", "inner-join", "full-join"].forEach( + feature => { + expect(new Database({ features: [feature] }).hasFeature("join")).toBe( + true, + ); + }, + ); + }); + }); + + describe("supportsPivots", () => { + it("returns true when `expressions` and `left-join` exist in `features`", () => { + expect( + new Database({ + features: ["foo", "left-join"], + }).supportsPivots(), + ).toBe(false); + + expect( + new Database({ + features: ["expressions", "right-join"], + }).supportsPivots(), + ).toBe(false); + + expect( + new Database({ + features: ["expressions", "left-join"], + }).supportsPivots(), + ).toBe(true); + }); + }); + + describe("question", () => { + it("should create a question using the `metadata` found on the Database instance", () => { + const metadata = new Metadata(); + const database = new Database({ + metadata, + }); + const question = database.question(); + expect(question.metadata()).toBe(metadata); + }); + + it("should create a question using the given Database instance's id in the question's query", () => { + const database = new Database({ + id: 123, + }); + + expect(database.question().datasetQuery()).toEqual({ + database: 123, + query: { + "source-table": null, + }, + type: "query", + }); + + expect(database.question({ foo: "bar" }).datasetQuery()).toEqual({ + database: 123, + query: { + foo: "bar", + }, + type: "query", + }); + }); + }); + + describe("nativeQuestion", () => { + it("should create a native question using the `metadata` found on the Database instance", () => { + const metadata = new Metadata(); + const database = new Database({ + metadata, + }); + const question = database.nativeQuestion(); + expect(question.metadata()).toBe(metadata); + }); + + it("should create a native question using the given Database instance's id in the question's query", () => { + const database = new Database({ + id: 123, + }); + + expect(database.nativeQuestion().datasetQuery()).toEqual({ + database: 123, + native: { + query: "", + "template-tags": {}, + }, + type: "native", + }); + + expect(database.nativeQuestion({ foo: "bar" }).datasetQuery()).toEqual({ + database: 123, + native: { + query: "", + "template-tags": {}, + foo: "bar", + }, + type: "native", + }); + }); + }); + + describe("newQuestion", () => { + it("should return new question with defaulted query and display", () => { + const database = new Database({ + id: 123, + }); + + Question.prototype.setDefaultQuery = jest.fn(function() { + return this; + }); + Question.prototype.setDefaultDisplay = jest.fn(function() { + return this; + }); + + const question = database.newQuestion(); + + expect(question).toBeInstanceOf(Question); + expect(Question.prototype.setDefaultDisplay).toHaveBeenCalled(); + expect(Question.prototype.setDefaultQuery).toHaveBeenCalled(); + }); + }); + + describe("savedQuestionsDatabase", () => { + it("should return the 'fake' saved questions database", () => { + const database1 = new Database({ id: 1 }); + const database2 = new Database({ id: 2, is_saved_questions: true }); + const metadata = new Metadata({ + databases: { + 1: database1, + 2: database2, + }, + }); + + database1.metadata = metadata; + + expect(database1.savedQuestionsDatabase()).toBe(database2); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Field.js b/frontend/src/metabase-lib/lib/metadata/Field.js index b8dd19b6336e..2c3cd484b44a 100644 --- a/frontend/src/metabase-lib/lib/metadata/Field.js +++ b/frontend/src/metabase-lib/lib/metadata/Field.js @@ -1,5 +1,4 @@ import Base from "./Base"; -import Table from "./Table"; import moment from "moment"; @@ -34,19 +33,14 @@ import { getFilterOperators, } from "metabase/lib/schema_metadata"; -import type { FieldValues } from "metabase-types/types/Field"; +/** + * @typedef { import("./metadata").FieldValues } FieldValues + */ /** * Wrapper class for field metadata objects. Belongs to a Table. */ export default class Field extends Base { - name: string; - display_name: string; - description: string; - - table: Table; - name_field: ?Field; - parent() { return this.metadata ? this.metadata.field(this.parent_id) : null; } @@ -153,7 +147,10 @@ export default class Field extends Base { return isEntityName(this); } - isCompatibleWith(field: Field) { + /** + * @param {Field} field + */ + isCompatibleWith(field) { return ( this.isDate() === field.isDate() || this.isNumeric() === field.isNumeric() || @@ -161,7 +158,10 @@ export default class Field extends Base { ); } - fieldValues(): FieldValues { + /** + * @returns {FieldValues} + */ + fieldValues() { return getFieldValues(this._object); } @@ -282,8 +282,9 @@ export default class Field extends Base { /** * Returns the remapped field, if any + * @return {?Field} */ - remappedField(): ?Field { + remappedField() { const displayFieldId = this.dimensions && this.dimensions.human_readable_field_id; if (displayFieldId != null) { @@ -299,8 +300,9 @@ export default class Field extends Base { /** * Returns the human readable remapped value, if any + * @returns {?string} */ - remappedValue(value): ?string { + remappedValue(value) { // TODO: Ugh. Should this be handled further up by the parameter widget? if (this.isNumeric() && typeof value !== "number") { value = parseFloat(value); @@ -310,8 +312,9 @@ export default class Field extends Base { /** * Returns whether the field has a human readable remapped value for this value + * @returns {?string} */ - hasRemappedValue(value): ?string { + hasRemappedValue(value) { // TODO: Ugh. Should this be handled further up by the parameter widget? if (this.isNumeric() && typeof value !== "number") { value = parseFloat(value); @@ -321,8 +324,9 @@ export default class Field extends Base { /** * Returns true if this field can be searched, e.x. in filter or parameter widgets + * @returns {boolean} */ - isSearchable(): boolean { + isSearchable() { // TODO: ...? return this.isString(); } @@ -333,8 +337,39 @@ export default class Field extends Base { /** * Returns a FKDimension for this field and the provided field + * @param {Field} foreignField + * @return {Dimension} */ - foreign(foreignField: Field): Dimension { + foreign(foreignField) { return this.dimension().foreign(foreignField.dimension()); } + + /** + * @private + * @param {number} id + * @param {string} name + * @param {string} display_name + * @param {string} description + * @param {Table} table + * @param {?Field} name_field + * @param {Metadata} metadata + */ + /* istanbul ignore next */ + _constructor( + id, + name, + display_name, + description, + table, + name_field, + metadata, + ) { + this.id = id; + this.name = name; + this.display_name = display_name; + this.description = description; + this.table = table; + this.name_field = name_field; + this.metadata = metadata; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Field.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Field.unit.spec.js new file mode 100644 index 000000000000..6eaddf76dc27 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Field.unit.spec.js @@ -0,0 +1,363 @@ +import Field from "./Field"; +import Table from "./Table"; +import Schema from "./Schema"; +import Metadata from "./Metadata"; +import Base from "./Base"; +import Dimension from "../Dimension"; + +describe("Field", () => { + describe("instantiation", () => { + it("should create an instance of Schema", () => { + expect(new Field()).toBeInstanceOf(Field); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Field()).toBeInstanceOf(Base); + expect(new Field({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("parent", () => { + it("should return null when `metadata` does not exist on instance", () => { + expect(new Field().parent()).toBeNull(); + }); + + it("should return the field that matches the instance's `parent_id` when `metadata` exists on the instance", () => { + const parentField = new Field({ + id: 1, + }); + + const metadata = new Metadata({ + fields: { + 1: parentField, + }, + }); + + const field = new Field({ + parent_id: 1, + id: 2, + metadata, + }); + + expect(field.parent()).toBe(parentField); + }); + }); + + describe("path", () => { + it("should return list of fields starting with instance, ending with root parent", () => { + const rootField = new Field({ + id: 1, + }); + + const parentField = new Field({ + id: 2, + parent_id: 1, + }); + + const metadata = new Metadata({ + fields: { + 1: rootField, + 2: parentField, + }, + }); + + parentField.metadata = metadata; + rootField.metadata = metadata; + + const field = new Field({ + parent_id: 2, + id: 3, + metadata, + }); + + expect(field.path()).toEqual([rootField, parentField, field]); + }); + }); + + describe("displayName", () => { + it("should return a field's display name", () => { + expect(new Field({ name: "foo" }).displayName()).toBe("foo"); + }); + + it("should prioritize the `display_name` field over `name`", () => { + expect( + new Field({ display_name: "bar", name: "foo" }).displayName(), + ).toBe("bar"); + }); + + it("should prioritize the name in the field's `dimensions` property if it has one", () => { + const field = new Field({ + dimensions: { name: "dimensions" }, + display_name: "display", + }); + + expect(field.displayName()).toBe("dimensions"); + }); + + describe("includePath flag", () => { + let field; + beforeEach(() => { + const rootField = new Field({ + id: 1, + name: "rootField", + }); + + const parentField = new Field({ + id: 2, + parent_id: 1, + name: "parentField", + }); + + const metadata = new Metadata({ + fields: { + 1: rootField, + 2: parentField, + }, + }); + + parentField.metadata = metadata; + rootField.metadata = metadata; + + field = new Field({ + parent_id: 2, + id: 3, + metadata, + name: "field", + }); + }); + + it("should add parent field display names to the field's display name when enabled", () => { + expect(field.displayName({ includePath: true })).toBe( + "rootField: parentField: field", + ); + }); + + it("should be enabled by default", () => { + expect(field.displayName({ includePath: true })).toBe( + field.displayName(), + ); + }); + + it("should exclude parent field display names when disabled", () => { + expect(field.displayName({ includePath: false })).toBe("field"); + }); + }); + + describe("includeTable flag", () => { + let field; + beforeEach(() => { + field = new Field({ + id: 1, + name: "field", + }); + }); + + it("should do nothing when there is no table on the field instance", () => { + expect(field.displayName({ includeTable: true })).toBe("field"); + }); + + it("should add the table name to the start of the field name", () => { + field.table = new Table({ + display_name: "table", + }); + + expect(field.displayName({ includeTable: true })).toBe("table → field"); + }); + }); + + describe("includeSchema flag", () => { + let field; + beforeEach(() => { + field = new Field({ + id: 1, + name: "field", + }); + }); + + it("won't do anything if enabled and includeTable is not enabled", () => { + expect(field.displayName({ includeSchema: true })).toBe("field"); + }); + + it("should add a combined schema + table display name to the start of the field name", () => { + field.table = new Table({ + display_name: "table", + schema: new Schema({ + name: "schema", + }), + }); + + expect( + field.displayName({ includeTable: true, includeSchema: true }), + ).toBe("Schema.table → field"); + }); + }); + }); + + describe("targetObjectName", () => { + it("should return the display name of the field stripped of an appended id", () => { + const field = new Field({ + name: "field id", + }); + + expect(field.targetObjectName()).toBe("field"); + }); + }); + + describe("dimension", () => { + it("should return the field's dimension when the id is an mbql field", () => { + const field = new Field({ + id: ["field", 123, null], + }); + + const dimension = field.dimension(); + + expect(dimension).toBeInstanceOf(Dimension); + expect(dimension.fieldIdOrName()).toBe(123); + }); + + it("should return the field's dimension when the id is not an mbql field", () => { + const field = new Field({ + id: 123, + }); + + const dimension = field.dimension(); + + expect(dimension).toBeInstanceOf(Dimension); + expect(dimension.fieldIdOrName()).toBe(123); + }); + }); + + describe("getDefaultDateTimeUnit", () => { + describe("when the field is of type `type/DateTime`", () => { + it("should return 'day'", () => { + const field = new Field({ + fingerprint: { + type: { + "type/Number": {}, + }, + }, + }); + + expect(field.getDefaultDateTimeUnit()).toBe("day"); + }); + }); + }); + + describe("when field is of type `type/DateTime`", () => { + it("should return a time unit depending on the number of days in the 'fingerprint'", () => { + const field = new Field({ + fingerprint: { + type: { + "type/DateTime": { + earliest: "2019-03-01T00:00:00Z", + latest: "2021-01-01T00:00:00Z", + }, + }, + }, + }); + + expect(field.getDefaultDateTimeUnit()).toBe("month"); + }); + }); + + describe("remappedField", () => { + it("should return the 'human readable' field tied to the field's dimension", () => { + const field1 = new Field({ id: 1 }); + const field2 = new Field({ id: 2 }); + const metadata = new Metadata({ + fields: { + 1: field1, + 2: field2, + }, + }); + + const field = new Field({ + id: 3, + dimensions: { + human_readable_field_id: 1, + }, + }); + field.metadata = metadata; + + expect(field.remappedField()).toBe(field1); + }); + + it("should return the field's name_field", () => { + const nameField = new Field(); + const field = new Field({ + id: 3, + name_field: nameField, + }); + + expect(field.remappedField()).toBe(nameField); + }); + + it("should return null when the field has no name_field or no dimension with a 'human readable' field", () => { + expect(new Field().remappedField()).toBe(null); + }); + }); + + describe("remappedValue", () => { + it("should call a given value using the instance's remapping property", () => { + const field = new Field({ + remapping: { + get: () => 1, + }, + }); + + expect(field.remappedValue(2)).toBe(1); + }); + + it("should convert a numeric field into a number if it is not a number", () => { + const field = new Field({ + isNumeric: () => true, + remapping: { + get: num => num, + }, + }); + + expect(field.remappedValue("2.5rem")).toBe(2.5); + }); + }); + + describe("hasRemappedValue", () => { + it("should call a given value using the instance's remapping property", () => { + const field = new Field({ + remapping: { + has: () => true, + }, + }); + + expect(field.hasRemappedValue(2)).toBe(true); + }); + + it("should convert a numeric field into a number if it is not a number", () => { + const field = new Field({ + isNumeric: () => true, + remapping: { + has: num => typeof num === "number", + }, + }); + + expect(field.hasRemappedValue("2.5rem")).toBe(true); + }); + }); + + describe("isSearchable", () => { + it("should be true when the field is a string", () => { + const field = new Field({ + isString: () => true, + }); + + expect(field.isSearchable()).toBe(true); + }); + + it("should be false when the field is not a string", () => { + const field = new Field({ + isString: () => false, + }); + + expect(field.isSearchable()).toBe(false); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Metadata.js b/frontend/src/metabase-lib/lib/metadata/Metadata.js index 6551d86603e1..03d16d05c7f7 100644 --- a/frontend/src/metabase-lib/lib/metadata/Metadata.js +++ b/frontend/src/metabase-lib/lib/metadata/Metadata.js @@ -2,33 +2,26 @@ import _ from "underscore"; import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; -import Schema from "./Schema"; -import Field from "./Field"; -import Segment from "./Segment"; -import Metric from "./Metric"; - import Question from "../Question"; -import type { DatabaseId } from "metabase-types/types/Database"; -import type { TableId } from "metabase-types/types/Table"; -import type { FieldId } from "metabase-types/types/Field"; -import type { MetricId } from "metabase-types/types/Metric"; -import type { SegmentId } from "metabase-types/types/Segment"; +/** + * @typedef { import("./metadata").DatabaseId } DatabaseId + * @typedef { import("./metadata").SchemaId } SchemaId + * @typedef { import("./metadata").TableId } TableId + * @typedef { import("./metadata").FieldId } FieldId + * @typedef { import("./metadata").MetricId } MetricId + * @typedef { import("./metadata").SegmentId } SegmentId + */ /** * Wrapper class for the entire metadata store */ export default class Metadata extends Base { - databases: { [id: DatabaseId]: Database }; - tables: { [id: TableId]: Table }; - fields: { [id: FieldId]: Field }; - metrics: { [id: MetricId]: Metric }; - segments: { [id: SegmentId]: Segment }; - - // DEPRECATED: this won't be sorted or filtered in a meaningful way - databasesList({ savedQuestions = true } = {}): Database[] { + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Database[]} + */ + databasesList({ savedQuestions = true } = {}) { return _.chain(this.databases) .values() .filter(db => savedQuestions || !db.is_saved_questions) @@ -36,46 +29,98 @@ export default class Metadata extends Base { .value(); } - // DEPRECATED: this won't be sorted or filtered in a meaningful way - tablesList(): Database[] { - return (Object.values(this.tables): Database[]); + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Table[]} + */ + tablesList() { + return Object.values(this.tables); } - // DEPRECATED: this won't be sorted or filtered in a meaningful way - metricsList(): Metric[] { - return (Object.values(this.metrics): Metric[]); + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Metric[]} + */ + metricsList() { + return Object.values(this.metrics); } - // DEPRECATED: this won't be sorted or filtered in a meaningful way - segmentsList(): Metric[] { - return (Object.values(this.segments): Segment[]); + /** + * @deprecated this won't be sorted or filtered in a meaningful way + * @returns {Segment[]} + */ + segmentsList() { + return Object.values(this.segments); } - segment(segmentId): ?Segment { + /** + * @param {SegmentId} segmentId + * @returns {?Segment} + */ + + segment(segmentId) { return (segmentId != null && this.segments[segmentId]) || null; } - metric(metricId): ?Metric { + /** + * @param {MetricId} metricId + * @returns {?Metric} + */ + metric(metricId) { return (metricId != null && this.metrics[metricId]) || null; } - database(databaseId): ?Database { + /** + * @param {DatabaseId} databaseId + * @returns {?Database} + */ + database(databaseId) { return (databaseId != null && this.databases[databaseId]) || null; } - schema(schemaId): ?Schema { + /** + * @param {SchemaId} schemaId + * @returns {Schema} + */ + schema(schemaId) { return (schemaId != null && this.schemas[schemaId]) || null; } - table(tableId): ?Table { + /** + * + * @param {TableId} tableId + * @returns {?Table} + */ + table(tableId) { return (tableId != null && this.tables[tableId]) || null; } - field(fieldId): ?Field { + /** + * @param {FieldId} fieldId + * @returns {?Field} + */ + field(fieldId) { return (fieldId != null && this.fields[fieldId]) || null; } question(card) { return new Question(card, this); } + + /** + * @private + * @param {Object.} databases + * @param {Object.} tables + * @param {Object.} fields + * @param {Object.} metrics + * @param {Object.} segments + */ + /* istanbul ignore next */ + _constructor(databases, tables, fields, metrics, segments) { + this.databases = databases; + this.tables = tables; + this.fields = fields; + this.metrics = metrics; + this.segments = segments; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Metadata.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Metadata.unit.spec.js new file mode 100644 index 000000000000..36475e355b6c --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Metadata.unit.spec.js @@ -0,0 +1,185 @@ +import Metadata from "./Metadata"; +import Base from "./Base"; + +import Database from "./Database"; +import Table from "./Table"; +import Schema from "./Schema"; +import Field from "./Field"; +import Segment from "./Segment"; +import Metric from "./Metric"; +import Question from "../Question"; + +describe("Metadata", () => { + describe("instantiation", () => { + it("should create an instance of Metadata", () => { + expect(new Metadata()).toBeInstanceOf(Metadata); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Metadata()).toBeInstanceOf(Base); + expect(new Metadata({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("databasesList (deprecated)", () => { + let databases; + let databaseA; + let databaseB; + let databaseC; + + beforeEach(() => { + databaseA = new Database({ id: 2, name: "A", is_saved_questions: true }); + databaseB = new Database({ id: 3, name: "B" }); + databaseC = new Database({ id: 1, name: "C" }); + + databases = { + 1: databaseC, + 2: databaseA, + 3: databaseB, + }; + }); + + it("should return a sorted list of database objects found on the metadata instance", () => { + const metadata = new Metadata({ + databases, + }); + + expect(metadata.databasesList()).toEqual([ + databaseA, + databaseB, + databaseC, + ]); + }); + + it("should return all databases when the `savedQuestions` flag is true", () => { + const metadata = new Metadata({ + databases, + }); + + expect( + metadata.databasesList({ + savedQuestions: true, + }), + ).toEqual(metadata.databasesList()); + }); + + it("should exclude the 'is_saved_questions' db when the `savedQuestions` flag is false", () => { + const metadata = new Metadata({ + databases, + }); + + expect( + metadata.databasesList({ + savedQuestions: false, + }), + ).toEqual([databaseB, databaseC]); + }); + }); + + describe("tablesList (deprecated)", () => { + it("should return a list of table objects found on the instance", () => { + const tableA = new Table({ id: 1, name: "A" }); + const tableB = new Table({ id: 2, name: "B" }); + + const tables = { + 1: tableA, + 2: tableB, + }; + + const metadata = new Metadata({ + tables, + }); + + expect(metadata.tablesList()).toEqual([tableA, tableB]); + }); + }); + + describe("metricsList (deprecated)", () => { + it("should return a list of metric objects found on the instance", () => { + const metricA = new Metric({ id: 1, name: "A" }); + const metricB = new Metric({ id: 2, name: "B" }); + + const metrics = { + 1: metricA, + 2: metricB, + }; + + const metadata = new Metadata({ + metrics, + }); + + expect(metadata.metricsList()).toEqual([metricA, metricB]); + }); + }); + + describe("segmentsList (deprecated)", () => { + it("should return a list of segment objects found on the instance", () => { + const segmentA = new Segment({ id: 1, name: "A" }); + const segmentB = new Segment({ id: 2, name: "B" }); + + const segments = { + 1: segmentA, + 2: segmentB, + }; + + const metadata = new Metadata({ + segments, + }); + + expect(metadata.segmentsList()).toEqual([segmentA, segmentB]); + }); + }); + + describe("question", () => { + it("should return a new question using the metadata instance", () => { + const card = { name: "Question", id: 1 }; + const metadata = new Metadata(); + const question = metadata.question(card); + + expect(question).toBeInstanceOf(Question); + expect(question.card()).toBe(card); + expect(question.metadata()).toBe(metadata); + }); + }); + + [ + ["segment", obj => new Segment(obj)], + ["metric", obj => new Metric(obj)], + ["database", obj => new Database(obj)], + ["schema", obj => new Schema(obj)], + ["table", obj => new Table(obj)], + ["field", obj => new Field(obj)], + ].forEach(([fnName, instantiate]) => { + describe(fnName, () => { + let instanceA; + let instanceB; + let metadata; + beforeEach(() => { + instanceA = instantiate({ id: 1, name: "A" }); + instanceB = instantiate({ id: 2, name: "B" }); + + const instances = { + 1: instanceA, + 2: instanceB, + }; + + metadata = new Metadata({ + [`${fnName}s`]: instances, + }); + }); + + it(`should retun the ${fnName} with the given id`, () => { + expect(metadata[fnName](1)).toBe(instanceA); + expect(metadata[fnName](2)).toBe(instanceB); + }); + + it("should return null when the id matches nothing", () => { + expect(metadata[fnName](3)).toBeNull(); + }); + + it("should return null when the id is nil", () => { + expect(metadata[fnName]()).toBeNull(); + }); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Metric.js b/frontend/src/metabase-lib/lib/metadata/Metric.js index 76a80de261a5..e7e221133755 100644 --- a/frontend/src/metabase-lib/lib/metadata/Metric.js +++ b/frontend/src/metabase-lib/lib/metadata/Metric.js @@ -1,23 +1,21 @@ import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; -import type { Aggregation } from "metabase-types/types/Query"; + +/** + * @typedef { import("./metadata").Aggregation } Aggregation + */ /** * Wrapper class for a metric. Belongs to a {@link Database} and possibly a {@link Table} */ export default class Metric extends Base { - name: string; - description: string; - - database: Database; - table: Table; - - displayName(): string { + displayName() { return this.name; } - aggregationClause(): Aggregation { + /** + * @returns {Aggregation} + */ + aggregationClause() { return ["metric", this.id]; } @@ -49,7 +47,28 @@ export default class Metric extends Base { } } - isActive(): boolean { + isActive() { return !this.archived; } + + /** + * @private + * @param {string} name + * @param {string} description + * @param {Database} database + * @param {Table} table + * @param {number} id + * @param {StructuredQuery} definition + * @param {boolean} archived + */ + /* istanbul ignore next */ + _constructor(name, description, database, table, id, definition, archived) { + this.name = name; + this.description = description; + this.database = database; + this.table = table; + this.id = id; + this.definition = definition; + this.archived = archived; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Schema.js b/frontend/src/metabase-lib/lib/metadata/Schema.js index 3f7aea7d497f..db1b0bf90c57 100644 --- a/frontend/src/metabase-lib/lib/metadata/Schema.js +++ b/frontend/src/metabase-lib/lib/metadata/Schema.js @@ -1,6 +1,4 @@ import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; import { titleize, humanize } from "metabase/lib/formatting"; @@ -8,10 +6,24 @@ import { titleize, humanize } from "metabase/lib/formatting"; * Wrapper class for a {@link Database} schema. Contains {@link Table}s. */ export default class Schema extends Base { - database: Database; - tables: Table[]; - displayName() { return titleize(humanize(this.name)); } + + getTables() { + return this.tables; + } + + /** + * @private + * @param {string} name + * @param {Database} database + * @param {Table[]} tables + */ + /* istanbul ignore next */ + _constructor(name, database, tables) { + this.name = name; + this.database = database; + this.tables = tables; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Schema.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Schema.unit.spec.js new file mode 100644 index 000000000000..4a9afea2b1cd --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Schema.unit.spec.js @@ -0,0 +1,22 @@ +import Schema from "./Schema"; +import Base from "./Base"; + +describe("Schema", () => { + describe("instantiation", () => { + it("should create an instance of Schema", () => { + expect(new Schema()).toBeInstanceOf(Schema); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Schema()).toBeInstanceOf(Base); + expect(new Schema({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("displayName", () => { + it("should return a formatted `name` string", () => { + const schema = new Schema({ name: "foo_bar" }); + expect(schema.displayName()).toBe("Foo Bar"); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Segment.js b/frontend/src/metabase-lib/lib/metadata/Segment.js index 697f55edadc6..7a6ceb5cb07b 100644 --- a/frontend/src/metabase-lib/lib/metadata/Segment.js +++ b/frontend/src/metabase-lib/lib/metadata/Segment.js @@ -1,27 +1,44 @@ import Base from "./Base"; -import Database from "./Database"; -import Table from "./Table"; -import type { FilterClause } from "metabase-types/types/Query"; + +/** + * @typedef { import("./metadata").FilterClause } FilterClause + */ /** * Wrapper class for a segment. Belongs to a {@link Database} and possibly a {@link Table} */ export default class Segment extends Base { - name: string; - description: string; - - database: Database; - table: Table; - - displayName(): string { + displayName() { return this.name; } - filterClause(): FilterClause { + /** + * @returns {FilterClause} + */ + filterClause() { return ["segment", this.id]; } - isActive(): boolean { + isActive() { return !this.archived; } + + /** + * @private + * @param {string} name + * @param {string} description + * @param {Database} database + * @param {Table} table + * @param {number} id + * @param {boolean} archived + */ + /* istanbul ignore next */ + _constructor(name, description, database, table, id, archived) { + this.name = name; + this.description = description; + this.database = database; + this.table = table; + this.id = id; + this.archived = archived; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Segment.unit.spec.js b/frontend/src/metabase-lib/lib/metadata/Segment.unit.spec.js new file mode 100644 index 000000000000..d26d110654be --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/Segment.unit.spec.js @@ -0,0 +1,37 @@ +import Segment from "./Segment"; +import Base from "./Base"; + +describe("Segment", () => { + describe("instantiation", () => { + it("should create an instance of Segment", () => { + expect(new Segment()).toBeInstanceOf(Segment); + }); + + it("should add `object` props to the instance (because it extends Base)", () => { + expect(new Segment()).toBeInstanceOf(Base); + expect(new Segment({ foo: "bar" })).toHaveProperty("foo", "bar"); + }); + }); + + describe("displayName", () => { + it("should return the `name` property found on the instance", () => { + expect(new Segment({ name: "foo" }).displayName()).toBe("foo"); + }); + }); + + describe("filterClause", () => { + it("should return a filter clause", () => { + expect(new Segment({ id: 123 }).filterClause()).toEqual(["segment", 123]); + }); + }); + + describe("isActive", () => { + it("should return true if the segment is not archived", () => { + expect(new Segment({ archived: false }).isActive()).toBe(true); + }); + + it("should return false if the segment is archived", () => { + expect(new Segment({ archived: true }).isActive()).toBe(false); + }); + }); +}); diff --git a/frontend/src/metabase-lib/lib/metadata/Table.js b/frontend/src/metabase-lib/lib/metadata/Table.js index 27cff3e45c71..628ed812128d 100644 --- a/frontend/src/metabase-lib/lib/metadata/Table.js +++ b/frontend/src/metabase-lib/lib/metadata/Table.js @@ -2,36 +2,20 @@ import Question from "../Question"; import Base from "./Base"; -import Database from "./Database"; -import Schema from "./Schema"; -import Field from "./Field"; - -import Dimension from "../Dimension"; import { singularize } from "metabase/lib/formatting"; import { getAggregationOperatorsWithFields } from "metabase/lib/schema_metadata"; import { memoize, createLookupByProperty } from "metabase-lib/lib/utils"; -import type { SchemaName } from "metabase-types/types/Table"; -import type StructuredQuery from "metabase-lib/lib/queries/StructuredQuery"; - -type EntityType = string; // TODO: move somewhere central +/** + * @typedef { import("./metadata").SchemaName } SchemaName + * @typedef { import("./metadata").EntityType } EntityType + * @typedef { import("./metadata").StructuredQuery } StructuredQuery + */ /** This is the primary way people interact with tables */ export default class Table extends Base { - description: string; - - db: Database; - - schema: ?Schema; - // @deprecated: use schema.name (all tables should have a schema object, in theory) - schema_name: ?SchemaName; - - fields: Field[]; - - entity_type: ?EntityType; - - hasSchema(): boolean { + hasSchema() { return (this.schema_name && this.db && this.db.schemas.length > 1) || false; } @@ -40,13 +24,13 @@ export default class Table extends Base { return this.db; } - newQuestion(): Question { + newQuestion() { return this.question() .setDefaultQuery() .setDefaultDisplay(); } - question(): Question { + question() { return Question.create({ databaseId: this.db && this.db.id, tableId: this.id, @@ -54,7 +38,7 @@ export default class Table extends Base { }); } - isSavedQuestion(): boolean { + isSavedQuestion() { return this.savedQuestionId() !== null; } @@ -63,13 +47,16 @@ export default class Table extends Base { return match ? parseInt(match[1]) : null; } - query(query = {}): StructuredQuery { + /** + * @returns {StructuredQuery} + */ + query(query = {}) { return this.question() .query() .updateQuery(q => ({ ...q, ...query })); } - dimensions(): Dimension[] { + dimensions() { return this.fields.map(field => field.dimension()); } @@ -89,7 +76,7 @@ export default class Table extends Base { return singularize(this.displayName()); } - dateFields(): Field[] { + dateFields() { return this.fields.filter(field => field.isDate()); } @@ -130,4 +117,23 @@ export default class Table extends Base { get fields_lookup() { return this.fieldsLookup(); } + + /** + * @private + * @param {string} description + * @param {Database} db + * @param {Schema?} schema + * @param {SchemaName} [schema_name] + * @param {Field[]} fields + * @param {EntityType} entity_type + */ + /* istanbul ignore next */ + _constructor(description, db, schema, schema_name, fields, entity_type) { + this.description = description; + this.db = db; + this.schema = schema; + this.schema_name = schema_name; + this.fields = fields; + this.entity_type = entity_type; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/metadata.d.ts b/frontend/src/metabase-lib/lib/metadata/metadata.d.ts new file mode 100644 index 000000000000..d5df84f67ca0 --- /dev/null +++ b/frontend/src/metabase-lib/lib/metadata/metadata.d.ts @@ -0,0 +1,39 @@ +// to help declaring nominal types +interface Flavoring { + _type?: FlavorT; +} +export type Flavor = T & Flavoring; + + +export type EntityType = Flavor; +export type SchemaName = Flavor; + + +// TODO: move to types.d.ts + +export type DatabaseId = Flavor; +export type TableId = Flavor; +export type FieldId = Flavor; +export type MetricId = Flavor; +export type SegmentId = Flavor; + +export type SchemaId = Flavor; + +export type DatabaseFeature = + | "basic-aggregations" + | "standard-deviation-aggregations" + | "expression-aggregations" + | "foreign-keys" + | "native-parameters" + | "nested-queries" + | "expressions" + | "case-sensitivity-string-filter-options" + | "binning"; + +export type FieldValues = Flavor; + + +// TODO: move to query.d.ts +export type Aggregation = Flavor; +export type FilterClause = Flavor; +export type StructuredQuery = Flavor; \ No newline at end of file diff --git a/frontend/src/metabase-lib/lib/queries/InternalQuery.js b/frontend/src/metabase-lib/lib/queries/InternalQuery.js index 3dac6aa7f40b..e4829a804c36 100644 --- a/frontend/src/metabase-lib/lib/queries/InternalQuery.js +++ b/frontend/src/metabase-lib/lib/queries/InternalQuery.js @@ -9,7 +9,7 @@ import AtomicQuery from "metabase-lib/lib/queries/AtomicQuery"; // args: [], // } export default class InternalQuery extends AtomicQuery { - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + static isDatasetQueryType(datasetQuery: DatasetQuery) { return datasetQuery.type === "internal"; } } diff --git a/frontend/src/metabase-lib/lib/queries/NativeQuery.js b/frontend/src/metabase-lib/lib/queries/NativeQuery.js index 87b7592b20a0..94a1abb66055 100644 --- a/frontend/src/metabase-lib/lib/queries/NativeQuery.js +++ b/frontend/src/metabase-lib/lib/queries/NativeQuery.js @@ -24,7 +24,7 @@ import type { DatabaseEngine, DatabaseId } from "metabase-types/types/Database"; import AtomicQuery from "metabase-lib/lib/queries/AtomicQuery"; -import Dimension, { TemplateTagDimension } from "../Dimension"; +import Dimension, { TemplateTagDimension, FieldDimension } from "../Dimension"; import Variable, { TemplateTagVariable } from "../Variable"; import DimensionOptions from "../DimensionOptions"; @@ -76,7 +76,7 @@ export default class NativeQuery extends AtomicQuery { this._nativeDatasetQuery = (datasetQuery: NativeDatasetQuery); } - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + static isDatasetQueryType(datasetQuery: DatasetQuery) { return datasetQuery && datasetQuery.type === NATIVE_QUERY_TEMPLATE.type; } @@ -133,7 +133,7 @@ export default class NativeQuery extends AtomicQuery { /** * Returns true if the database metadata (or lack thererof indicates the user can modify and run this query */ - readOnly(): boolean { + readOnly() { const database = this.database(); return !database || database.native_permissions !== "write"; } @@ -168,12 +168,12 @@ export default class NativeQuery extends AtomicQuery { return this; } - hasWritePermission(): boolean { + hasWritePermission() { const database = this.database(); return database != null && database.native_permissions === "write"; } - supportsNativeParameters(): boolean { + supportsNativeParameters() { const database = this.database(); return ( database != null && _.contains(database.features, "native-parameters") @@ -270,7 +270,7 @@ export default class NativeQuery extends AtomicQuery { templateTagsMap(): TemplateTags { return getIn(this.datasetQuery(), ["native", "template-tags"]) || {}; } - allTemplateTagsAreValid(): boolean { + allTemplateTagsAreValid() { return this.templateTags().every( // field filters require a field t => !(t.type === "dimension" && t.dimension == null), @@ -458,4 +458,26 @@ export default class NativeQuery extends AtomicQuery { } return {}; } + + dependentMetadata() { + const templateTags = this.templateTags(); + + return templateTags + .filter( + tag => + tag.type === "dimension" && + FieldDimension.isFieldClause(tag.dimension), + ) + .map(tag => { + const dimension = FieldDimension.parseMBQL( + tag.dimension, + this.metadata(), + ); + + return { + type: "field", + id: dimension.field().id, + }; + }); + } } diff --git a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js index 82d6f5c8d646..fc467a71254e 100644 --- a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js +++ b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js @@ -72,7 +72,7 @@ export const STRUCTURED_QUERY_TEMPLATE = { * A wrapper around an MBQL (`query` type @type {DatasetQuery}) object */ export default class StructuredQuery extends AtomicQuery { - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + static isDatasetQueryType(datasetQuery: DatasetQuery) { return datasetQuery && datasetQuery.type === STRUCTURED_QUERY_TEMPLATE.type; } @@ -117,7 +117,7 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns true if this query is in a state where it can be edited. Must have database and table set, and metadata for the table loaded. */ - isEditable(): boolean { + isEditable() { return this.hasMetadata(); } @@ -158,7 +158,7 @@ export default class StructuredQuery extends AtomicQuery { /** * Returns true if the database metadata (or lack thererof indicates the user can modify and run this query */ - readOnly(): boolean { + readOnly() { return !this.database(); } @@ -361,7 +361,11 @@ export default class StructuredQuery extends AtomicQuery { } cleanJoins(): StructuredQuery { - return this._cleanClauseList("joins"); + let query = this; + this.joins().forEach((join, index) => { + query = query.updateJoin(index, join.clean()); + }); + return query._cleanClauseList("joins"); } cleanExpressions(): StructuredQuery { @@ -404,7 +408,7 @@ export default class StructuredQuery extends AtomicQuery { } } - isValid(): boolean { + isValid() { if (!this.hasData()) { return false; } @@ -647,21 +651,21 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns true if the aggregation can be removed */ - canRemoveAggregation(): boolean { + canRemoveAggregation() { return this.aggregations().length > 1; } /** * @returns true if the query has no aggregation */ - isBareRows(): boolean { + isBareRows() { return !this.hasAggregations(); } /** * @returns true if the query has no aggregation or breakouts */ - isRaw(): boolean { + isRaw() { return !this.hasAggregations() && !this.hasBreakouts(); } @@ -734,14 +738,14 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns whether a new breakout can be added or not */ - canAddBreakout(): boolean { + canAddBreakout() { return this.breakoutOptions().count > 0; } /** * @returns whether the current query has a valid breakout */ - hasValidBreakout(): boolean { + hasValidBreakout() { const breakouts = this.breakouts(); return breakouts.length > 0 && breakouts[0].isValid(); } @@ -878,7 +882,7 @@ export default class StructuredQuery extends AtomicQuery { /** * @returns whether a new filter can be added or not */ - canAddFilter(): boolean { + canAddFilter() { return ( Q.canAddFilter(this.query()) && (this.filterDimensionOptions().count > 0 || @@ -952,7 +956,7 @@ export default class StructuredQuery extends AtomicQuery { return new DimensionOptions(sortOptions); } } - canAddSort(): boolean { + canAddSort() { const sorts = this.sorts(); return ( this.sortOptions().count > 0 && @@ -1087,6 +1091,32 @@ export default class StructuredQuery extends AtomicQuery { // DIMENSION OPTIONS + _keyForFK(source, destination) { + if (source && destination) { + return `${source.id},${destination.id}`; + } + return null; + } + + _getExplicitJoinsSet(joins) { + const joinDimensionPairs = joins.map(join => { + const dimensionPairs = join.getDimensions(); + return dimensionPairs.map(pair => { + const [parentDimension, joinDimension] = pair; + return this._keyForFK( + parentDimension && parentDimension.field(), + joinDimension && joinDimension.field(), + ); + }); + }); + + const flatJoinDimensions = _.flatten(joinDimensionPairs); + const explicitJoins = new Set(flatJoinDimensions); + explicitJoins.delete(null); + + return explicitJoins; + } + // TODO Atte Keinänen 6/18/17: Refactor to dimensionOptions which takes a dimensionFilter // See aggregationFieldOptions for an explanation why that covers more use cases dimensionOptions( @@ -1121,21 +1151,12 @@ export default class StructuredQuery extends AtomicQuery { } // de-duplicate explicit and implicit joined tables - const keyForFk = (src, dst) => - src && dst ? `${src.id},${dst.id}` : null; - const explicitJoins = new Set( - joins.map(join => { - const p = join.parentDimension(); - const j = join.joinDimension(); - return keyForFk(p && p.field(), j && j.field()); - }), - ); - explicitJoins.delete(null); + const explicitJoins = this._getExplicitJoinsSet(joins); const fkDimensions = this.dimensions().filter(dimensionIsFKReference); for (const dimension of fkDimensions) { const field = dimension.field(); - if (field && explicitJoins.has(keyForFk(field, field.target))) { + if (field && explicitJoins.has(this._keyForFK(field, field.target))) { continue; } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js b/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js index 06ddfc2f2c10..91fb2b7b1da1 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Aggregation.js @@ -44,7 +44,7 @@ export default class Aggregation extends MBQLClause { return this._query.removeAggregation(this._index); } - canRemove(): boolean { + canRemove() { return this.remove() .clean() .isValid(); @@ -131,7 +131,7 @@ export default class Aggregation extends MBQLClause { /** * Predicate function to test if a given aggregation clause is valid */ - isValid(): boolean { + isValid() { if (this.hasOptions()) { return this.aggregation().isValid(); } else if (this.isStandard() && this.dimension()) { @@ -159,21 +159,21 @@ export default class Aggregation extends MBQLClause { /** * Returns true if this is a "standard" metric */ - isStandard(): boolean { + isStandard() { return AGGREGATION.isStandard(this); } /** * Returns true if this is a metric */ - isMetric(): boolean { + isMetric() { return AGGREGATION.isMetric(this); } /** * Returns true if this is custom expression created with the expression editor */ - isCustom(): boolean { + isCustom() { return AGGREGATION.isCustom(this); } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Breakout.js b/frontend/src/metabase-lib/lib/queries/structured/Breakout.js index 3652e60533ab..50f0fd6b4c14 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Breakout.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Breakout.js @@ -43,7 +43,7 @@ export default class Breakout extends MBQLClause { /** * Predicate function to test if a given breakout clause is valid */ - isValid(): boolean { + isValid() { const query = this.query(); return !query || query.breakoutOptions(this).hasDimension(this.dimension()); } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Filter.js b/frontend/src/metabase-lib/lib/queries/structured/Filter.js index 14136bec5bb4..51497cdc9e89 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Filter.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Filter.js @@ -115,28 +115,28 @@ export default class Filter extends MBQLClause { /** * Returns true if this is a "standard" filter */ - isStandard(): boolean { + isStandard() { return isStandard(this); } /** * Returns true if this is a segment */ - isSegment(): boolean { + isSegment() { return isSegment(this); } /** * Returns true if this is custom filter created with the expression editor */ - isCustom(): boolean { + isCustom() { return isCustom(this); } /** * Returns true for filters where the first argument is a field */ - isFieldFilter(): boolean { + isFieldFilter() { return isFieldFilter(this); } @@ -308,7 +308,7 @@ export default class Filter extends MBQLClause { } } - isDimension(otherDimension: Dimension): boolean { + isDimension(otherDimension: Dimension) { const dimension = this.dimension(); return dimension ? dimension.isEqual(otherDimension) : false; } diff --git a/frontend/src/metabase-lib/lib/queries/structured/Join.js b/frontend/src/metabase-lib/lib/queries/structured/Join.js index 255dbf21e92c..708f48eb0750 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/Join.js +++ b/frontend/src/metabase-lib/lib/queries/structured/Join.js @@ -6,6 +6,7 @@ import Dimension, { FieldDimension } from "metabase-lib/lib/Dimension"; import DimensionOptions from "metabase-lib/lib/DimensionOptions"; import { pluralize } from "metabase/lib/formatting"; +import { getDatetimeUnit, isDateTimeField } from "metabase/lib/query/field_ref"; import { TableId } from "metabase-types/types/Table"; import type { @@ -28,6 +29,9 @@ const JOIN_STRATEGY_OPTIONS = [ { value: "full-join", name: t`Full outer join`, icon: "join_full_outer" }, ]; +const PARENT_DIMENSION_INDEX = 1; +const JOIN_DIMENSION_INDEX = 2; + export default class Join extends MBQLObjectClause { strategy: ?JoinStrategy; alias: ?JoinAlias; @@ -131,23 +135,32 @@ export default class Join extends MBQLObjectClause { setAlias(alias: JoinAlias) { alias = this._uniqueAlias(alias); if (alias !== this.alias) { - const join = this.set({ ...this, alias }); + let join = this.set({ ...this, alias }); // propagate alias change to join dimension - const joinDimension = join.joinDimension(); - if ( - joinDimension instanceof FieldDimension && - joinDimension.joinAlias() && - joinDimension.joinAlias() === this.alias - ) { - const newDimension = joinDimension.withJoinAlias(alias); - return join.setJoinDimension(newDimension); - } else { - return join; - } + const joinDimensions = join.joinDimensions(); + + joinDimensions.forEach((joinDimension, i) => { + if ( + joinDimension instanceof FieldDimension && + joinDimension.joinAlias() && + joinDimension.joinAlias() === this.alias + ) { + const newDimension = joinDimension.withJoinAlias(alias); + join = join.setJoinDimension({ index: i, dimension: newDimension }); + } + }); + + return join; } return this; } + _getParentDimensionForAlias() { + return this.parentDimensions().find( + dimension => dimension && dimension.field().isFK(), + ); + } + setDefaultAlias() { // The Join alias should be "Table - FK Field" if possible. We need both to disamiguate sitatutions where we have // multiple FKs that point to the same Table -- see #8418 and #11452. @@ -164,11 +177,9 @@ export default class Join extends MBQLObjectClause { const tableName = table && table.display_name; - const parentDimension = this.parentDimension(); + const parentDimension = this._getParentDimensionForAlias(); const fieldName = - parentDimension && - parentDimension.field().isFK() && - parentDimension.field().targetObjectName(); + parentDimension && parentDimension.field().targetObjectName(); const similarTableAndFieldNames = tableName && @@ -192,6 +203,17 @@ export default class Join extends MBQLObjectClause { return this.setAlias(alias); } + getConditions() { + if (!this.condition) { + return []; + } + if (this.isSingleConditionJoin()) { + return [this.condition]; + } + const [, ...conditions] = this.condition; + return conditions; + } + // STRATEGY setStrategy(strategy: JoinStrategy) { return this.set({ ...this, strategy }); @@ -211,10 +233,71 @@ export default class Join extends MBQLObjectClause { ); } - // CONDITION + // CONDITIONS + + isSingleConditionJoin() { + const { condition } = this; + return Array.isArray(condition) && condition[0] === "="; + } + + isMultipleConditionsJoin() { + const { condition } = this; + return Array.isArray(condition) && condition[0] === "and"; + } + + getConditionByIndex(index) { + if (!this.condition) { + return null; + } + if (this.isSingleConditionJoin() && !index) { + return this.condition; + } + if (this.isMultipleConditionsJoin()) { + const [, ...conditions] = this.condition; + return conditions[index]; + } + return null; + } + setCondition(condition: JoinCondition): Join { return this.set({ ...this, condition }); } + + setConditionByIndex({ index = 0, condition }): Join { + if (!this.condition) { + return this.setCondition(condition); + } + if (this.isSingleConditionJoin()) { + if (index === 0) { + return this.setCondition(condition); + } else { + return this.setCondition(["and", this.condition, condition]); + } + } + const conditions = [...this.condition]; + conditions[index + 1] = condition; + return this.setCondition(conditions); + } + + removeCondition(index) { + if (index == null || !this.getConditionByIndex(index)) { + return this; + } + if (this.isSingleConditionJoin()) { + return this.setCondition(null); + } + const filteredCondition = this.condition.filter((_, i) => { + // Adding 1 because the first element of a condition is an operator ("and") + return i !== index + 1; + }); + const [, ...conditions] = filteredCondition; + const isSingleNewCondition = conditions.length === 1; + if (isSingleNewCondition) { + return this.setCondition(conditions[0]); + } + return this.setCondition(filteredCondition); + } + setDefaultCondition() { const { dimensions } = this.parentDimensionOptions(); // look for foreign keys linking the two tables @@ -225,26 +308,66 @@ export default class Join extends MBQLObjectClause { return target && target.table && target.table.id === joinedTable.id; }); if (fk) { - return this.setParentDimension(fk).setJoinDimension( - this.joinedDimension(fk.field().target.dimension()), - ); + return this.setParentDimension({ + index: 0, + dimension: fk, + }).setJoinDimension({ + index: 0, + dimension: this.joinedDimension(fk.field().target.dimension()), + }); } } return this; } + _convertDimensionIntoMBQL(dimension: Dimension | ConcreteField) { + return dimension instanceof Dimension ? dimension.mbql() : dimension; + } + + _getJoinDimensionFromCondition(condition) { + const [, , joinDimension] = condition; + const joinedQuery = this.joinedQuery(); + return ( + joinedQuery && + joinDimension && + joinedQuery.parseFieldReference(joinDimension) + ); + } + + _getJoinDimensionsFromMultipleConditions() { + const [, ...conditions] = this.condition; + return conditions.map(condition => + this._getJoinDimensionFromCondition(condition), + ); + } + // simplified "=" join condition helpers: // NOTE: parentDimension refers to the left-hand side of the join, // and joinDimension refers to the right-hand side // TODO: should we rename them to lhsDimension/rhsDimension etc? - parentDimension() { - const { condition } = this; - if (Array.isArray(condition) && condition[0] === "=" && condition[1]) { - return this.query().parseFieldReference(condition[1]); + _getParentDimensionFromCondition(condition) { + const [, parentDimension] = condition; + return parentDimension && this.query().parseFieldReference(parentDimension); + } + + _getParentDimensionsFromMultipleConditions() { + const [, ...conditions] = this.condition; + return conditions.map(condition => + this._getParentDimensionFromCondition(condition), + ); + } + + parentDimensions() { + if (!this.condition) { + return []; } + return this.isSingleConditionJoin() + ? [this._getParentDimensionFromCondition(this.condition)] + : this._getParentDimensionsFromMultipleConditions(); } + parentDimensionOptions() { const query = this.query(); const dimensions = query.dimensions(); @@ -262,37 +385,77 @@ export default class Join extends MBQLObjectClause { } return new DimensionOptions(options); } - // TODO -- in what way is this setting a "parent dimension"? These names make no sense - setParentDimension(dimension: Dimension | ConcreteField): Join { - if (dimension instanceof Dimension) { - dimension = dimension.mbql(); + + joinDimensions() { + if (!this.condition) { + return []; } - const joinDimension = this.joinDimension(); - return this.setCondition([ - "=", - dimension, - joinDimension instanceof Dimension ? joinDimension.mbql() : null, - ]); + + return this.isSingleConditionJoin() + ? [this._getJoinDimensionFromCondition(this.condition)] + : this._getJoinDimensionsFromMultipleConditions(); } - joinDimension() { - const { condition } = this; - if (Array.isArray(condition) && condition[0] === "=" && condition[2]) { - const joinedQuery = this.joinedQuery(); - return joinedQuery && joinedQuery.parseFieldReference(condition[2]); + addEmptyDimensionsPair() { + if (!this.condition) { + return this.setCondition([]); } - } - setJoinDimension(dimension: Dimension | ConcreteField): Join { - if (dimension instanceof Dimension) { - dimension = dimension.mbql(); + if (this.isSingleConditionJoin()) { + return this.setCondition(["and", this.condition, []]); + } else { + return this.setCondition([...this.condition, []]); } - const parentDimension = this.parentDimension(); - return this.setCondition([ - "=", - parentDimension instanceof Dimension ? parentDimension.mbql() : null, - dimension, - ]); } + + _isDateTimeDimensionsJoin(d1, d2) { + return d1 && d2 && isDateTimeField(d1) && isDateTimeField(d2); + } + + _getDateTimeFieldCondition( + parentDimension, + joinDimension, + temporalUnitSource, + ) { + const temporalUnit = getDatetimeUnit( + temporalUnitSource === "parent" ? parentDimension : joinDimension, + ); + const parent = setTemporalUnit(parentDimension, temporalUnit); + const join = setTemporalUnit(joinDimension, temporalUnit); + return ["=", parent, join]; + } + + setJoinDimension({ index = 0, dimension, overwriteTemporalUnit = false }) { + const condition = this.getConditionByIndex(index); + const join = this._convertDimensionIntoMBQL(dimension); + const parent = condition ? condition[PARENT_DIMENSION_INDEX] : null; + + const newCondition = this._isDateTimeDimensionsJoin(parent, join) + ? this._getDateTimeFieldCondition( + parent, + join, + overwriteTemporalUnit ? "join" : "parent", + ) + : ["=", parent, join]; + + return this.setConditionByIndex({ index, condition: newCondition }); + } + + setParentDimension({ index = 0, dimension, overwriteTemporalUnit = false }) { + const condition = this.getConditionByIndex(index); + const parent = this._convertDimensionIntoMBQL(dimension); + const join = condition ? condition[JOIN_DIMENSION_INDEX] : null; + + const newCondition = this._isDateTimeDimensionsJoin(parent, join) + ? this._getDateTimeFieldCondition( + parent, + join, + overwriteTemporalUnit ? "parent" : "join", + ) + : ["=", parent, join]; + + return this.setConditionByIndex({ index, condition: newCondition }); + } + joinDimensionOptions() { const dimensions = this.joinedDimensions(); return new DimensionOptions({ @@ -304,6 +467,19 @@ export default class Join extends MBQLObjectClause { // HELPERS + getDimensions() { + const conditions = this.getConditions(); + return conditions.map(condition => { + const [, parentDimension, joinDimension] = condition; + return [ + parentDimension + ? this.query().parseFieldReference(parentDimension) + : null, + joinDimension ? this.query().parseFieldReference(joinDimension) : null, + ]; + }); + } + joinedQuery() { const sourceTable = this.joinSourceTableId(); const sourceQuery = this.joinSourceQuery(); @@ -389,11 +565,65 @@ export default class Join extends MBQLObjectClause { return this._query.removeJoin(this._index); } - isValid(): boolean { - return !!( - this.joinedTable() && - this.parentDimension() && - this.joinDimension() + hasGaps() { + if (!this.joinedTable()) { + return true; + } + const parentDimensions = this.parentDimensions(); + const joinDimensions = this.joinDimensions(); + return ( + parentDimensions.length === 0 || + joinDimensions.length === 0 || + parentDimensions.length !== joinDimensions.length || + parentDimensions.some(dimension => dimension == null) || + joinDimensions.some(dimension => dimension == null) + ); + } + + isValid() { + if (this.hasGaps()) { + return false; + } + const dimensionOptions = this.parent().dimensionOptions(); + const dimensions = [...this.parentDimensions(), ...this.joinDimensions()]; + return dimensions.every(dimension => + dimensionOptions.hasDimension(dimension), + ); + } + + clean() { + const invalidAndCantFix = !this.condition || !this.joinedTable(); + if (invalidAndCantFix || this.isValid()) { + return this; + } + let join = this; + + let invalidDimensionIndex = this.parentDimensions().findIndex( + dimension => dimension == null, + ); + if (invalidDimensionIndex >= 0) { + join = this.removeCondition(invalidDimensionIndex); + } + + invalidDimensionIndex = this.joinDimensions().findIndex( + dimension => dimension == null, ); + if (invalidDimensionIndex >= 0) { + join = this.removeCondition(invalidDimensionIndex); + } + + return join.clean(); } } + +function setTemporalUnit(fieldRef, value) { + const [field, id, opts] = fieldRef; + return [ + field, + id, + { + ...opts, + "temporal-unit": value, + }, + ]; +} diff --git a/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js b/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js index b6515329d740..8152ad58bbdf 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js +++ b/frontend/src/metabase-lib/lib/queries/structured/MBQLClause.js @@ -10,6 +10,15 @@ export default class MBQLArrayClause extends Array { _private(this, "_query", query); } + // There is a mismatch between the constructor args for `MBQLArrayClause` and `Array` + // so we need to reconcile things in the MBQLArrayClause[Symbol.species] constructor function + // See https://stackoverflow.com/questions/54522949 + static get [Symbol.species]() { + return Object.assign(function(...items) { + return new MBQLArrayClause(new Array(...items), this._index, this._query); + }, MBQLArrayClause); + } + set(mbql: any[]) { return new this.constructor(mbql, this._index, this._query); } @@ -106,6 +115,14 @@ export class MBQLObjectClause { metadata() { return this._query.metadata(); } + + raw() { + const entriesWithDefinedValue = Object.entries(this).filter(entry => { + const [, value] = entry; + return value !== undefined; + }); + return Object.fromEntries(entriesWithDefinedValue); + } } function _private(object, key, value) { diff --git a/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js b/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js index e4162c4f11f5..fe6cd27e7e2e 100644 --- a/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js +++ b/frontend/src/metabase-lib/lib/queries/structured/OrderBy.js @@ -43,7 +43,7 @@ export default class OrderBy extends MBQLClause { /** * Predicate function to test if a given order-by clause is valid */ - isValid(): boolean { + isValid() { const query = this.query(); return !query || query.sortOptions(this).hasDimension(this.dimension()); } diff --git a/frontend/src/metabase/App.jsx b/frontend/src/metabase/App.jsx index 6bacc9515ca9..dd176e986987 100644 --- a/frontend/src/metabase/App.jsx +++ b/frontend/src/metabase/App.jsx @@ -55,7 +55,6 @@ export default class App extends Component { } componentDidCatch(error, errorInfo) { - console.log("COMPONENT DID CATCH LOLE"); this.setState({ errorInfo }); } diff --git a/frontend/src/metabase/user/components/LoginHistoryList.jsx b/frontend/src/metabase/account/login-history/components/LoginHistory/LoginHistory.jsx similarity index 92% rename from frontend/src/metabase/user/components/LoginHistoryList.jsx rename to frontend/src/metabase/account/login-history/components/LoginHistory/LoginHistory.jsx index 82ab3dd4c747..4aa44e5fc737 100644 --- a/frontend/src/metabase/user/components/LoginHistoryList.jsx +++ b/frontend/src/metabase/account/login-history/components/LoginHistory/LoginHistory.jsx @@ -1,12 +1,8 @@ /* eslint-disable react/prop-types */ import React from "react"; import { Box, Flex } from "grid-styled"; - import _ from "underscore"; import moment from "moment"; - -import LoginHistory from "metabase/entities/loginHistory"; - import Card from "metabase/components/Card"; import Label from "metabase/components/type/Label"; import Text from "metabase/components/type/Text"; @@ -60,4 +56,4 @@ function LoginHistoryList({ loginHistory }) { return {_.map(groups, LoginHistoryGroup)}; } -export default LoginHistory.loadList()(LoginHistoryList); +export default LoginHistoryList; diff --git a/frontend/src/metabase/account/login-history/components/LoginHistory/index.js b/frontend/src/metabase/account/login-history/components/LoginHistory/index.js new file mode 100644 index 000000000000..cf0573759df0 --- /dev/null +++ b/frontend/src/metabase/account/login-history/components/LoginHistory/index.js @@ -0,0 +1 @@ +export { default } from "./LoginHistory"; diff --git a/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/LoginHistoryApp.jsx b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/LoginHistoryApp.jsx new file mode 100644 index 000000000000..27b009e30143 --- /dev/null +++ b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/LoginHistoryApp.jsx @@ -0,0 +1,4 @@ +import LoginHistory from "metabase/entities/loginHistory"; +import LoginHistoryList from "../../components/LoginHistory"; + +export default LoginHistory.loadList()(LoginHistoryList); diff --git a/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/index.js b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/index.js new file mode 100644 index 000000000000..9df80a402fcd --- /dev/null +++ b/frontend/src/metabase/account/login-history/containers/LoginHistoryApp/index.js @@ -0,0 +1 @@ +export { default } from "./LoginHistoryApp"; diff --git a/frontend/src/metabase/account/notifications/actions.js b/frontend/src/metabase/account/notifications/actions.js new file mode 100644 index 000000000000..cae214bfde78 --- /dev/null +++ b/frontend/src/metabase/account/notifications/actions.js @@ -0,0 +1,16 @@ +import { push } from "react-router-redux"; + +const PREFIX = `/account/notifications`; + +export const navigateToUnsubscribe = (item, type) => { + return push(`${PREFIX}/${type}/${item.id}/unsubscribe`); +}; + +export const navigateToArchive = (item, type, hasUnsubscribed) => { + const query = hasUnsubscribed ? "?unsubscribed=true" : ""; + return push(`${PREFIX}/${type}/${item.id}/archive${query}`); +}; + +export const navigateToHelp = () => { + return push(`${PREFIX}/help`); +}; diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.jsx b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.jsx new file mode 100644 index 000000000000..bb54bcd5cb6a --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.jsx @@ -0,0 +1,127 @@ +import React, { useCallback, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Settings from "metabase/lib/settings"; +import { formatDateTimeWithUnit } from "metabase/lib/formatting"; +import { formatChannelRecipients } from "metabase/lib/notifications"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import FormMessage from "metabase/components/form/FormMessage"; +import { ModalMessage } from "./ArchiveModal.styled"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + user: PropTypes.object, + hasUnsubscribed: PropTypes.bool, + onArchive: PropTypes.func, + onClose: PropTypes.func, +}; + +const ArchiveModal = ({ + item, + type, + user, + hasUnsubscribed, + onArchive, + onClose, +}) => { + const [error, setError] = useState(); + + const handleArchiveClick = useCallback(async () => { + try { + await onArchive(item, true); + onClose(); + } catch (error) { + setError(error); + } + }, [item, onArchive, onClose]); + + return ( + : null, + , + , + ]} + onClose={onClose} + > + {isCreator(item, user) && hasUnsubscribed && ( + + {getCreatorMessage(type, user)} + {t`As the creator you can also choose to delete this if it’s no longer relevant to others as well.`} + + )} + + {getDateMessage(item, type)} + {getRecipientsMessage(item)} + + + ); +}; + +ArchiveModal.propTypes = propTypes; + +const isCreator = (item, user) => { + return user != null && user.id === item.creator?.id; +}; + +const getTitleMessage = (type, hasUnsubscribed) => { + switch (type) { + case "alert": + return hasUnsubscribed + ? t`You’re unsubscribed. Delete this alert as well?` + : t`Delete this alert?`; + case "pulse": + return hasUnsubscribed + ? t`You’re unsubscribed. Delete this subscription as well?` + : t`Delete this subscription?`; + } +}; + +const getSubmitMessage = (type, hasUnsubscribed) => { + switch (type) { + case "alert": + return hasUnsubscribed ? t`Delete this alert` : t`Yes, delete this alert`; + case "pulse": + return hasUnsubscribed + ? t`Delete this subscription` + : t`Yes, delete this subscription`; + } +}; + +const getCancelMessage = hasUnsubscribed => { + return hasUnsubscribed ? t`Keep it around` : t`I changed my mind`; +}; + +const getCreatorMessage = (type, user) => { + switch (type) { + case "alert": + return t`You won’t receive this alert at ${user.email} any more. `; + case "pulse": + return t`You won’t receive this subscription at ${user.email} any more. `; + } +}; + +const getDateMessage = (item, type) => { + const options = Settings.formattingOptions(); + const createdAt = formatDateTimeWithUnit(item.created_at, "day", options); + + switch (type) { + case "alert": + return t`You created this alert on ${createdAt}. `; + case "pulse": + return t`You created this subscription on ${createdAt}. `; + } +}; + +const getRecipientsMessage = item => { + return t`It’s currently being sent to ${formatChannelRecipients(item)}.`; +}; + +export default ArchiveModal; diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.styled.jsx b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.styled.jsx new file mode 100644 index 000000000000..748448f0e643 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.styled.jsx @@ -0,0 +1,7 @@ +import styled from "styled-components"; + +export const ModalMessage = styled.div` + &:not(:last-child) { + margin-bottom: 1rem; + } +`; diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.unit.spec.js b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.unit.spec.js new file mode 100644 index 000000000000..faf3ab1f667e --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/ArchiveModal.unit.spec.js @@ -0,0 +1,137 @@ +import React from "react"; +import { render, screen, waitFor } from "@testing-library/react"; +import ArchiveModal from "./ArchiveModal"; + +const getAlert = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + creator, + channels, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getPulse = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + creator, + channels, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getUser = ({ id = 1 } = {}) => ({ + id, + common_name: "John Doe", +}); + +const getChannel = ({ + channel_type = "email", + schedule_type = "hourly", + recipients = [getUser()], +} = {}) => { + return { + channel_type, + schedule_type, + recipients, + schedule_hour: 8, + schedule_day: "mon", + schedule_frame: "first", + }; +}; + +describe("ArchiveModal", () => { + it("should render an email alert", () => { + const alert = getAlert(); + + render(); + + screen.getByText("Delete this alert?"); + screen.getByText("Yes, delete this alert"); + screen.getByText("You created this alert on May 8, 2021", { exact: false }); + screen.getByText("It’s currently being sent to 1 email.", { exact: false }); + }); + + it("should render an email pulse", () => { + const pulse = getPulse(); + + render(); + + screen.getByText("Delete this subscription?"); + screen.getByText("Yes, delete this subscription"); + screen.getByText("May 8, 2021", { exact: false }); + screen.getByText("It’s currently being sent to 1 email.", { exact: false }); + }); + + it("should render a slack pulse", () => { + const pulse = getPulse({ + channels: [getChannel({ channel_type: "slack" })], + }); + + render(); + + screen.getByText("1 Slack channel", { exact: false }); + }); + + it("should render an alert with both email and slack channels", () => { + const alert = getAlert({ + channels: [ + getChannel({ + channel_type: "email", + recipients: [getUser(), getUser()], + }), + getChannel({ + channel_type: "slack", + recipients: [getUser(), getUser(), getUser()], + }), + ], + }); + + render(); + + screen.getByText("2 emails and 3 Slack channels", { exact: false }); + }); + + it("should close on submit", async () => { + const alert = getAlert(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onArchive.mockResolvedValue(); + + render( + , + ); + + screen.getByText("Yes, delete this alert").click(); + + waitFor(() => { + expect(onArchive).toHaveBeenCalled(alert, true); + expect(onClose).toHaveBeenCalled(); + }); + }); + + it("should not close on a submit error", async () => { + const alert = getAlert(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onArchive.mockRejectedValue({ data: { message: "An error occurred" } }); + + render( + , + ); + + screen.getByText("Yes, delete this alert").click(); + + waitFor(() => { + screen.getByText("An error occurred"); + expect(onArchive).toHaveBeenCalled(alert, true); + expect(onClose).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/ArchiveModal/index.js b/frontend/src/metabase/account/notifications/components/ArchiveModal/index.js new file mode 100644 index 000000000000..3eb77a0f1601 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/ArchiveModal/index.js @@ -0,0 +1 @@ +export { default } from "./ArchiveModal"; diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.jsx b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.jsx new file mode 100644 index 000000000000..204857bb3156 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.jsx @@ -0,0 +1,50 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { jt, t } from "ttag"; +import Settings from "metabase/lib/settings"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import { ModalLink, ModalMessage } from "./HelpModal.styled"; + +const propTypes = { + onClose: PropTypes.func, +}; + +const HelpModal = ({ onClose }) => { + const email = Settings.get("admin-email"); + + const handleClose = () => onClose(true); + + return ( + + {t`Got it`} + + } + onClose={handleClose} + > + + {t`It’s possible you may also receive emails from Metabase if you’re a member of an email distribution list, like “team@mycompany.com” and that list is used as the recipient for an alert or dashboard subscription instead of your individual email.`} + + + {getAdminMessage(email)} + {t`Hopefully they’ll be able to help you out!`} + + + ); +}; + +HelpModal.propTypes = propTypes; + +const getAdminLink = (email, text) => { + return email ? {text} : text; +}; + +const getAdminMessage = email => { + const adminLink = getAdminLink(email, t`your instance administrator`); + return jt`Metabase doesn’t manage those lists, so we’d recommend contacting ${adminLink}. `; +}; + +export default HelpModal; diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.styled.jsx b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.styled.jsx new file mode 100644 index 000000000000..56ca70f898b0 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.styled.jsx @@ -0,0 +1,17 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Link from "metabase/components/Link"; + +export const ModalLink = styled(Link)` + color: ${color("brand")}; + + &:hover { + text-decoration: underline; + } +`; + +export const ModalMessage = styled.div` + &:not(:last-child) { + margin-bottom: 1rem; + } +`; diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.unit.spec.js b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.unit.spec.js new file mode 100644 index 000000000000..daea8aeda38f --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/HelpModal.unit.spec.js @@ -0,0 +1,32 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import Settings from "metabase/lib/settings"; +import HelpModal from "./HelpModal"; + +describe("HelpModal", () => { + it("should render with admin email", () => { + Settings.set("admin-email", "admin@example.com"); + + render(); + + const link = screen.getByRole("link"); + expect(link).toHaveProperty("href", "mailto:admin@example.com"); + }); + + it("should render without admin email", () => { + Settings.set("admin-email", null); + + render(); + + screen.getByText("administrator", { exact: false }); + }); + + it("should close on button click", () => { + const onClose = jest.fn(); + + render(); + + screen.getByText("Got it").click(); + expect(onClose).toHaveBeenCalled(); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/HelpModal/index.js b/frontend/src/metabase/account/notifications/components/HelpModal/index.js new file mode 100644 index 000000000000..cb16fb1a5562 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/HelpModal/index.js @@ -0,0 +1 @@ +export { default } from "./HelpModal"; diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.jsx b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.jsx new file mode 100644 index 000000000000..f25fac3f66e7 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.jsx @@ -0,0 +1,106 @@ +import React, { useCallback } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Settings from "metabase/lib/settings"; +import { formatDateTimeWithUnit } from "metabase/lib/formatting"; +import { + canArchive, + formatChannel, + formatLink, + formatTitle, +} from "metabase/lib/notifications"; +import { + NotificationContent, + NotificationIcon, + NotificationDescription, + NotificationCardRoot, + NotificationMessage, + NotificationTitle, +} from "./NotificationCard.styled"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["pulse", "alert"]).isRequired, + user: PropTypes.object.isRequired, + onUnsubscribe: PropTypes.func, + onArchive: PropTypes.func, +}; + +const NotificationCard = ({ item, type, user, onUnsubscribe, onArchive }) => { + const hasArchive = canArchive(item, user); + + const onUnsubscribeClick = useCallback(() => { + onUnsubscribe(item, type); + }, [item, type, onUnsubscribe]); + + const onArchiveClick = useCallback(() => { + onArchive(item, type); + }, [item, type, onArchive]); + + return ( + + + + {formatTitle(item, type)} + + + {item.channels.map((channel, index) => ( + + {getChannelMessage(channel)} + + ))} + + {getCreatorMessage(item, user)} + + + + {!hasArchive && ( + + )} + {hasArchive && ( + + )} + + ); +}; + +NotificationCard.propTypes = propTypes; + +const getChannelMessage = channel => { + return getCapitalizedMessage(formatChannel(channel)); +}; + +const getCapitalizedMessage = message => { + const [firstLetter, ...otherLetters] = message; + return [firstLetter.toUpperCase(), ...otherLetters].join(""); +}; + +const getCreatorMessage = (item, user) => { + let creatorString = ""; + const options = Settings.formattingOptions(); + + if (user.id === item.creator?.id) { + creatorString += t`Created by you`; + } else if (item.creator?.common_name) { + creatorString += t`Created by ${item.creator.common_name}`; + } else { + creatorString += t`Created`; + } + + if (item.created_at) { + const createdAt = formatDateTimeWithUnit(item.created_at, "day", options); + creatorString += t` on ${createdAt}`; + } + + return creatorString; +}; + +export default NotificationCard; diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.styled.jsx b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.styled.jsx new file mode 100644 index 000000000000..7d84acae5991 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.styled.jsx @@ -0,0 +1,58 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Icon from "metabase/components/Icon"; +import Link from "metabase/components/Link"; + +export const NotificationCardRoot = styled.div` + display: flex; + align-items: center; + padding: 1rem 1.5rem; + border: 1px solid ${color("border")}; + border-radius: 6px; + background-color: ${color("white")}; + + &:not(:last-child) { + margin-bottom: 1.25rem; + } +`; + +export const NotificationContent = styled.div` + flex: 1 1 auto; +`; + +export const NotificationTitle = styled(Link)` + color: ${color("brand")}; + font-weight: bold; + + &:hover { + text-decoration: underline; + } +`; + +export const NotificationDescription = styled.div` + display: flex; + flex-wrap: wrap; + margin-top: 0.25rem; +`; + +export const NotificationMessage = styled.span` + color: ${color("text-medium")}; + font-size: 0.75rem; + line-height: 0.875rem; + + &:not(:last-child)::after { + content: " · "; + white-space: pre; + } +`; + +export const NotificationIcon = styled(Icon)` + color: ${color("text-light")}; + cursor: pointer; + width: 1rem; + height: 1rem; + + &:hover { + color: ${color("text-medium")}; + } +`; diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.unit.spec.js b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.unit.spec.js new file mode 100644 index 000000000000..3f51fde7adbe --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/NotificationCard.unit.spec.js @@ -0,0 +1,212 @@ +import React from "react"; +import { render, screen, fireEvent } from "@testing-library/react"; +import NotificationCard from "./NotificationCard"; + +const getAlert = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + creator, + channels, + card: { + name: "Alert", + }, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getPulse = ({ creator = getUser(), channels = [getChannel()] } = {}) => ({ + name: "Pulse", + creator, + channels, + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getUser = ({ id = 1 } = {}) => ({ + id, + common_name: "John Doe", +}); + +const getChannel = ({ + channel_type = "email", + schedule_type = "hourly", + recipients = [], +} = {}) => ({ + channel_type, + schedule_type, + recipients, + schedule_hour: 8, + schedule_day: "mon", + schedule_frame: "first", + details: { + channel: "@channel", + }, +}); + +describe("NotificationCard", () => { + it("should render an alert", () => { + const alert = getAlert(); + const user = getUser(); + + render(); + + screen.getByText("Alert"); + screen.getByText("Emailed hourly"); + screen.getByText("Created by you on May 8, 2021"); + }); + + it("should render a pulse", () => { + const pulse = getPulse(); + const user = getUser(); + + render(); + + screen.getByText("Pulse"); + screen.getByText("Emailed hourly"); + screen.getByText("Created by you on May 8, 2021"); + }); + + it("should render a slack alert", () => { + const alert = getAlert({ + channels: [getChannel({ channel_type: "slack" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Slack’d hourly to @channel"); + }); + + it("should render a daily alert", () => { + const alert = getAlert({ + channels: [getChannel({ schedule_type: "daily" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Emailed daily at 8:00 AM"); + }); + + it("should render a weekly alert", () => { + const alert = getAlert({ + channels: [getChannel({ schedule_type: "weekly" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Emailed Monday at 8:00 AM"); + }); + + it("should render a monthly alert", () => { + const alert = getAlert({ + channels: [getChannel({ schedule_type: "monthly" })], + }); + const user = getUser(); + + render(); + + screen.getByText("Emailed monthly on the first Monday at 8:00 AM"); + }); + + it("should render an alert created by another user", () => { + const alert = getAlert(); + const user = getUser({ id: 2 }); + + render(); + + screen.getByText("Created by John Doe on May 8, 2021"); + }); + + it("should unsubscribe when the user is not the creator and subscribed", () => { + const creator = getUser({ id: 1 }); + const user = getUser({ id: 2 }); + const alert = getAlert({ + creator, + channels: [getChannel({ recipients: [user] })], + }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).toHaveBeenCalledWith(alert, "alert"); + expect(onArchive).not.toHaveBeenCalled(); + }); + + it("should unsubscribe when user user is the creator and subscribed with another user", () => { + const creator = getUser({ id: 1 }); + const recipient = getUser({ id: 2 }); + const alert = getAlert({ + creator, + channels: [getChannel({ recipients: [creator, recipient] })], + }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).toHaveBeenCalledWith(alert, "alert"); + expect(onArchive).not.toHaveBeenCalled(); + }); + + it("should archive when the user is the creator and not subscribed", () => { + const creator = getUser(); + const alert = getAlert({ creator }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).not.toHaveBeenCalled(); + expect(onArchive).toHaveBeenCalledWith(alert, "alert"); + }); + + it("should archive when the user is the creator and is the only one subscribed", () => { + const creator = getUser(); + const alert = getAlert({ + creator, + channels: [getChannel({ recipients: [creator] })], + }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + + render( + , + ); + + fireEvent.click(screen.getByLabelText("close icon")); + expect(onUnsubscribe).not.toHaveBeenCalled(); + expect(onArchive).toHaveBeenCalledWith(alert, "alert"); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/NotificationCard/index.js b/frontend/src/metabase/account/notifications/components/NotificationCard/index.js new file mode 100644 index 000000000000..8c5b82942811 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationCard/index.js @@ -0,0 +1 @@ +export { default } from "./NotificationCard"; diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.jsx b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.jsx new file mode 100644 index 000000000000..7d14a2cfe770 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.jsx @@ -0,0 +1,71 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import NotificationCard from "../NotificationCard"; +import { + NotificationButton, + NotificationHeader, + NotificationIcon, + NotificationLabel, + NotificationMessage, + NotificationSection, +} from "./NotificationList.styled"; + +const propTypes = { + items: PropTypes.array.isRequired, + user: PropTypes.object.isRequired, + children: PropTypes.node, + onHelp: PropTypes.func, + onUnsubscribe: PropTypes.func, + onArchive: PropTypes.func, +}; + +const NotificationList = ({ + items, + user, + children, + onHelp, + onUnsubscribe, + onArchive, +}) => { + if (!items.length) { + return ; + } + + return ( +
+ + {t`You receive or created these`} + + {t`Not seeing one here?`} + + + {items.map(({ item, type }) => ( + + ))} + {children} +
+ ); +}; + +const NotificationEmptyState = () => { + return ( + + + + {t`If you subscribe or are added to dashboard subscriptions or alerts you’ll be able to manage those here.`} + + + ); +}; + +NotificationList.propTypes = propTypes; + +export default NotificationList; diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.styled.jsx b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.styled.jsx new file mode 100644 index 000000000000..241279eb8dc7 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.styled.jsx @@ -0,0 +1,39 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import Label from "metabase/components/type/Label"; +import { TextButton } from "metabase/components/Button.styled"; +import Icon from "metabase/components/Icon"; + +export const NotificationHeader = styled.div` + display: flex; + align-items: center; + margin-bottom: 1.5rem; +`; + +export const NotificationLabel = styled(Label)` + flex: 1 1 auto; + margin: 0; +`; + +export const NotificationButton = styled(TextButton).attrs({ + size: "small", +})``; + +export const NotificationSection = styled.div` + display: flex; + flex-direction: column; + align-items: center; +`; + +export const NotificationIcon = styled(Icon)` + color: ${color("bg-dark")}; + width: 3.25rem; + height: 3.25rem; + margin-top: 4.875rem; + margin-bottom: 1.75rem; +`; + +export const NotificationMessage = styled.div` + max-width: 24rem; + text-align: center; +`; diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.unit.spec.js b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.unit.spec.js new file mode 100644 index 000000000000..dc4184986d93 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/NotificationList.unit.spec.js @@ -0,0 +1,35 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import NotificationList from "./NotificationList"; + +const getPulse = () => ({ + name: "Pulse", + channels: [], + created_at: "2021-05-08T02:02:07.441Z", +}); + +const getUser = () => ({ + id: 1, + common_name: "John Doe", +}); + +describe("NotificationList", () => { + it("should render items", () => { + const pulse = getPulse(); + const user = getUser(); + + render( + , + ); + + screen.getByText("Pulse"); + }); + + it("should render empty state when there are no items", () => { + const user = getUser(); + + render(); + + screen.getByText("you’ll be able to manage those here", { exact: false }); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/NotificationList/index.js b/frontend/src/metabase/account/notifications/components/NotificationList/index.js new file mode 100644 index 000000000000..886c9d477a29 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/NotificationList/index.js @@ -0,0 +1 @@ +export { default } from "./NotificationList"; diff --git a/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.jsx b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.jsx new file mode 100644 index 000000000000..b78f58e32eb6 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.jsx @@ -0,0 +1,78 @@ +import React, { useCallback, useState } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Button from "metabase/components/Button"; +import ModalContent from "metabase/components/ModalContent"; +import FormMessage from "metabase/components/form/FormMessage"; + +const propTypes = { + item: PropTypes.object.isRequired, + type: PropTypes.oneOf(["alert", "pulse"]).isRequired, + user: PropTypes.object, + onUnsubscribe: PropTypes.func, + onArchive: PropTypes.func, + onClose: PropTypes.func, +}; + +const UnsubscribeModal = ({ + item, + type, + user, + onUnsubscribe, + onArchive, + onClose, +}) => { + const [error, setError] = useState(); + + const handleUnsubscribeClick = useCallback(async () => { + try { + await onUnsubscribe(item); + + if (isCreator(item, user)) { + onArchive(item, type, true); + } else { + onClose(); + } + } catch (error) { + setError(error); + } + }, [item, type, user, onUnsubscribe, onArchive, onClose]); + + return ( + : null, + , + , + ]} + onClose={onClose} + > +

+ {getUnsubscribeMessage(type)} + {t`Depending on your organization’s permissions you might need to ask a moderator to be re-added in the future.`} +

+
+ ); +}; + +UnsubscribeModal.propTypes = propTypes; + +const isCreator = (item, user) => { + return user != null && user.id === item.creator?.id; +}; + +const getUnsubscribeMessage = type => { + switch (type) { + case "alert": + return t`You’ll stop receiving this alert from now on. `; + case "pulse": + return t`You’ll stop receiving this subscription from now on. `; + } +}; + +export default UnsubscribeModal; diff --git a/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.unit.spec.js b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.unit.spec.js new file mode 100644 index 000000000000..4fd016450182 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/UnsubscribeModal.unit.spec.js @@ -0,0 +1,121 @@ +import React from "react"; +import { render, screen, waitFor } from "@testing-library/react"; +import UnsubscribeModal from "./UnsubscribeModal"; + +const getAlert = ({ creator = getUser({ id: 1 }) } = {}) => ({ + name: "Alert", + creator: creator, +}); + +const getPulse = ({ creator = getUser({ id: 1 }) } = {}) => ({ + name: "Pulse", + creator: creator, +}); + +const getUser = ({ id = 2 } = {}) => ({ + id, +}); + +describe("UnsubscribeModal", () => { + it("should render an alert", () => { + const alert = getAlert(); + + render(); + + screen.getByText("this alert", { exact: false }); + }); + + it("should render a pulse", () => { + const pulse = getPulse(); + + render(); + + screen.getByText("this subscription", { exact: false }); + }); + + it("should close if unsubscribed successfully", () => { + const alert = getAlert(); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onUnsubscribe.mockResolvedValue(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalledWith(alert); + expect(onArchive).not.toHaveBeenCalled(); + expect(onClose).toHaveBeenCalled(); + }); + }); + + it("should proceed with archiving if the notification is created by the user", () => { + const user = getUser(); + const alert = getAlert({ creator: user }); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onUnsubscribe.mockResolvedValue(); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + expect(onUnsubscribe).toHaveBeenCalledWith(alert); + expect(onArchive).toHaveBeenCalledWith(alert, "alert", true); + expect(onClose).not.toHaveBeenCalled(); + }); + }); + + it("should not close on a submit error", () => { + const user = getUser(); + const alert = getAlert(); + const onUnsubscribe = jest.fn(); + const onArchive = jest.fn(); + const onClose = jest.fn(); + + onUnsubscribe.mockRejectedValue({ data: { message: "An error occurred" } }); + + render( + , + ); + + screen.getByText("Unsubscribe").click(); + + waitFor(() => { + screen.getByText("An error occurred"); + expect(onUnsubscribe).toHaveBeenCalled(); + expect(onArchive).not.toHaveBeenCalled(); + expect(onClose).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/frontend/src/metabase/account/notifications/components/UnsubscribeModal/index.js b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/index.js new file mode 100644 index 000000000000..78ad8837f723 --- /dev/null +++ b/frontend/src/metabase/account/notifications/components/UnsubscribeModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeModal"; diff --git a/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/ArchiveAlertModal.jsx b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/ArchiveAlertModal.jsx new file mode 100644 index 000000000000..4ed89bafaa2f --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/ArchiveAlertModal.jsx @@ -0,0 +1,27 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import { getUser } from "metabase/selectors/user"; +import { getAlertId } from "../../selectors"; +import ArchiveModal from "../../components/ArchiveModal"; + +const mapStateToProps = (state, { alert, location }) => ({ + item: alert, + type: "alert", + user: getUser(state), + hasUnsubscribed: location.query.unsubscribed, +}); + +const mapDispatchToProps = { + onArchive: Alerts.actions.setArchived, +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => getAlertId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(ArchiveModal); diff --git a/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/index.js b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/index.js new file mode 100644 index 000000000000..950bf8843c0a --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchiveAlertModal/index.js @@ -0,0 +1 @@ +export { default } from "./ArchiveAlertModal"; diff --git a/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/ArchivePulseModal.jsx b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/ArchivePulseModal.jsx new file mode 100644 index 000000000000..e616155221e8 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/ArchivePulseModal.jsx @@ -0,0 +1,27 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import { getUser } from "metabase/selectors/user"; +import { getPulseId } from "../../selectors"; +import ArchiveModal from "../../components/ArchiveModal"; + +const mapStateToProps = (state, { pulse, location }) => ({ + item: pulse, + type: "pulse", + user: getUser(state), + hasUnsubscribed: location.query.unsubscribed, +}); + +const mapDispatchToProps = { + onArchive: Pulses.actions.setArchived, +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => getPulseId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(ArchiveModal); diff --git a/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/index.js b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/index.js new file mode 100644 index 000000000000..7121b9f24cf0 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/ArchivePulseModal/index.js @@ -0,0 +1 @@ +export { default } from "./ArchivePulseModal"; diff --git a/frontend/src/metabase/account/notifications/containers/NotificationsApp/NotificationsApp.jsx b/frontend/src/metabase/account/notifications/containers/NotificationsApp/NotificationsApp.jsx new file mode 100644 index 000000000000..fb99353193fd --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/NotificationsApp/NotificationsApp.jsx @@ -0,0 +1,38 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import Pulses from "metabase/entities/pulses"; +import { getUser, getUserId } from "metabase/selectors/user"; +import { + navigateToArchive, + navigateToHelp, + navigateToUnsubscribe, +} from "../../actions"; +import { getNotifications } from "../../selectors"; +import NotificationList from "../../components/NotificationList"; + +const mapStateToProps = (state, props) => ({ + user: getUser(state), + items: getNotifications(props), +}); + +const mapDispatchToProps = { + onHelp: navigateToHelp, + onUnsubscribe: navigateToUnsubscribe, + onArchive: navigateToArchive, +}; + +export default _.compose( + Alerts.loadList({ + query: state => ({ user_id: getUserId(state) }), + reload: true, + }), + Pulses.loadList({ + query: state => ({ user_id: getUserId(state) }), + reload: true, + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(NotificationList); diff --git a/frontend/src/metabase/account/notifications/containers/NotificationsApp/index.js b/frontend/src/metabase/account/notifications/containers/NotificationsApp/index.js new file mode 100644 index 000000000000..219d08c4598d --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/NotificationsApp/index.js @@ -0,0 +1 @@ +export { default } from "./NotificationsApp"; diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/UnsubscribeAlertModal.jsx b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/UnsubscribeAlertModal.jsx new file mode 100644 index 000000000000..22858ef8230d --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/UnsubscribeAlertModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Alerts from "metabase/entities/alerts"; +import { getUser } from "metabase/selectors/user"; +import { navigateToArchive } from "../../actions"; +import { getAlertId } from "../../selectors"; +import UnsubscribeModal from "../../components/UnsubscribeModal"; + +const mapStateToProps = (state, { alert }) => ({ + item: alert, + type: "alert", + user: getUser(state), +}); + +const mapDispatchToProps = { + onUnsubscribe: Alerts.actions.unsubscribe, + onArchive: navigateToArchive, +}; + +export default _.compose( + Alerts.load({ + id: (state, props) => getAlertId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(UnsubscribeModal); diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/index.js b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/index.js new file mode 100644 index 000000000000..473d470d0765 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribeAlertModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribeAlertModal"; diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/UnsubscribePulseModal.jsx b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/UnsubscribePulseModal.jsx new file mode 100644 index 000000000000..97650c451697 --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/UnsubscribePulseModal.jsx @@ -0,0 +1,28 @@ +import { connect } from "react-redux"; +import _ from "underscore"; +import Pulses from "metabase/entities/pulses"; +import { getUser } from "metabase/selectors/user"; +import { navigateToArchive } from "../../actions"; +import { getPulseId } from "../../selectors"; +import UnsubscribeModal from "../../components/UnsubscribeModal"; + +const mapStateToProps = (state, { pulse }) => ({ + item: pulse, + type: "pulse", + user: getUser(state), +}); + +const mapDispatchToProps = { + onUnsubscribe: Pulses.actions.unsubscribe, + onArchive: navigateToArchive, +}; + +export default _.compose( + Pulses.load({ + id: (state, props) => getPulseId(props), + }), + connect( + mapStateToProps, + mapDispatchToProps, + ), +)(UnsubscribeModal); diff --git a/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/index.js b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/index.js new file mode 100644 index 000000000000..08537ea00a8c --- /dev/null +++ b/frontend/src/metabase/account/notifications/containers/UnsubscribePulseModal/index.js @@ -0,0 +1 @@ +export { default } from "./UnsubscribePulseModal"; diff --git a/frontend/src/metabase/account/notifications/routes.jsx b/frontend/src/metabase/account/notifications/routes.jsx new file mode 100644 index 000000000000..639a742d1b71 --- /dev/null +++ b/frontend/src/metabase/account/notifications/routes.jsx @@ -0,0 +1,27 @@ +import React from "react"; +import { Route } from "metabase/hoc/Title"; +import { ModalRoute } from "metabase/hoc/ModalRoute"; +import NotificationsApp from "./containers/NotificationsApp"; +import HelpModal from "./components/HelpModal"; +import ArchiveAlertModal from "./containers/ArchiveAlertModal"; +import ArchivePulseModal from "./containers/ArchivePulseModal"; +import UnsubscribeAlertModal from "./containers/UnsubscribeAlertModal"; +import UnsubscribePulseModal from "./containers/UnsubscribePulseModal"; + +const getRoutes = () => ( + + + + + + + +); + +export default getRoutes; diff --git a/frontend/src/metabase/account/notifications/selectors.js b/frontend/src/metabase/account/notifications/selectors.js new file mode 100644 index 000000000000..5558b68e79e6 --- /dev/null +++ b/frontend/src/metabase/account/notifications/selectors.js @@ -0,0 +1,32 @@ +import { createSelector } from "reselect"; +import { parseTimestamp } from "metabase/lib/time"; + +export const getAlertId = ({ params: { alertId } }) => { + return parseInt(alertId); +}; + +export const getPulseId = ({ params: { pulseId } }) => { + return parseInt(pulseId); +}; + +export const getNotifications = createSelector( + [({ alerts }) => alerts, ({ pulses }) => pulses], + (alerts, pulses) => { + const items = [ + ...alerts.map(alert => ({ + item: alert, + type: "alert", + })), + ...pulses.map(pulse => ({ + item: pulse, + type: "pulse", + })), + ]; + + return items.sort( + (a, b) => + parseTimestamp(b.item.created_at).unix() - + parseTimestamp(a.item.created_at).unix(), + ); + }, +); diff --git a/frontend/src/metabase/account/password/actions.js b/frontend/src/metabase/account/password/actions.js new file mode 100644 index 000000000000..7ed6425b9ec8 --- /dev/null +++ b/frontend/src/metabase/account/password/actions.js @@ -0,0 +1,41 @@ +import { t } from "ttag"; +import { UserApi, UtilApi } from "metabase/services"; +import { createThunkAction } from "metabase/lib/redux"; + +export const UPDATE_PASSWORD = "UPDATE_PASSWORD"; +export const VALIDATE_PASSWORD = "VALIDATE_PASSWORD"; + +export const validatePassword = createThunkAction(VALIDATE_PASSWORD, function( + password, +) { + return async function() { + return await UtilApi.password_check({ + password: password, + }); + }; +}); + +export const updatePassword = createThunkAction(UPDATE_PASSWORD, function( + user_id, + password, + old_password, +) { + return async function() { + try { + await UserApi.update_password({ + id: user_id, + password, + old_password, + }); + + return { + success: true, + data: { + message: t`Password updated successfully!`, + }, + }; + } catch (error) { + return error; + } + }; +}); diff --git a/frontend/src/metabase/account/password/components/UserPasswordForm/UserPasswordForm.jsx b/frontend/src/metabase/account/password/components/UserPasswordForm/UserPasswordForm.jsx new file mode 100644 index 000000000000..8a7d9633e07c --- /dev/null +++ b/frontend/src/metabase/account/password/components/UserPasswordForm/UserPasswordForm.jsx @@ -0,0 +1,45 @@ +import React, { useCallback } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import User from "metabase/entities/users"; + +const propTypes = { + user: PropTypes.object, + validatePassword: PropTypes.func, + updatePassword: PropTypes.func, +}; + +const UserPasswordForm = ({ user, validatePassword, updatePassword }) => { + const handleAsyncValidate = useCallback( + async ({ password }) => { + try { + validatePassword(password); + return {}; + } catch (error) { + return error.data.errors; + } + }, + [validatePassword], + ); + + const handleSubmit = useCallback( + ({ password, old_password }) => { + updatePassword(user.id, password, old_password); + }, + [user, updatePassword], + ); + + return ( + + ); +}; + +UserPasswordForm.propTypes = propTypes; + +export default UserPasswordForm; diff --git a/frontend/src/metabase/account/password/components/UserPasswordForm/index.js b/frontend/src/metabase/account/password/components/UserPasswordForm/index.js new file mode 100644 index 000000000000..107c471a5505 --- /dev/null +++ b/frontend/src/metabase/account/password/components/UserPasswordForm/index.js @@ -0,0 +1 @@ +export { default } from "./UserPasswordForm"; diff --git a/frontend/src/metabase/account/password/containers/UserPasswordApp/UserPasswordApp.jsx b/frontend/src/metabase/account/password/containers/UserPasswordApp/UserPasswordApp.jsx new file mode 100644 index 000000000000..b0c222bc2cd1 --- /dev/null +++ b/frontend/src/metabase/account/password/containers/UserPasswordApp/UserPasswordApp.jsx @@ -0,0 +1,18 @@ +import { connect } from "react-redux"; +import { getUser } from "metabase/selectors/user"; +import { updatePassword, validatePassword } from "../../actions"; +import UserPasswordForm from "../../components/UserPasswordForm"; + +const mapStateToProps = state => ({ + user: getUser(state), +}); + +const mapDispatchToProps = { + validatePassword, + updatePassword, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(UserPasswordForm); diff --git a/frontend/src/metabase/account/password/containers/UserPasswordApp/index.js b/frontend/src/metabase/account/password/containers/UserPasswordApp/index.js new file mode 100644 index 000000000000..e3bb2bbe3842 --- /dev/null +++ b/frontend/src/metabase/account/password/containers/UserPasswordApp/index.js @@ -0,0 +1 @@ +export { default } from "./UserPasswordApp"; diff --git a/frontend/src/metabase/account/profile/components/UserProfileForm/UserProfileForm.jsx b/frontend/src/metabase/account/profile/components/UserProfileForm/UserProfileForm.jsx new file mode 100644 index 000000000000..45a412908a44 --- /dev/null +++ b/frontend/src/metabase/account/profile/components/UserProfileForm/UserProfileForm.jsx @@ -0,0 +1,24 @@ +import React, { useCallback } from "react"; +import PropTypes from "prop-types"; +import User from "metabase/entities/users"; + +const propTypes = { + user: PropTypes.object, +}; + +const UserProfileForm = ({ user }) => { + const handleSaved = useCallback( + ({ locale }) => { + if (locale !== user.locale) { + window.location.reload(); + } + }, + [user], + ); + + return ; +}; + +UserProfileForm.propTypes = propTypes; + +export default UserProfileForm; diff --git a/frontend/src/metabase/account/profile/components/UserProfileForm/index.js b/frontend/src/metabase/account/profile/components/UserProfileForm/index.js new file mode 100644 index 000000000000..3ca451b87862 --- /dev/null +++ b/frontend/src/metabase/account/profile/components/UserProfileForm/index.js @@ -0,0 +1 @@ +export { default } from "./UserProfileForm"; diff --git a/frontend/src/metabase/account/profile/containers/UserProfileApp/UserProfileApp.jsx b/frontend/src/metabase/account/profile/containers/UserProfileApp/UserProfileApp.jsx new file mode 100644 index 000000000000..5b26c02d115d --- /dev/null +++ b/frontend/src/metabase/account/profile/containers/UserProfileApp/UserProfileApp.jsx @@ -0,0 +1,9 @@ +import { connect } from "react-redux"; +import { getUser } from "metabase/selectors/user"; +import UserProfileForm from "../../components/UserProfileForm"; + +const mapStateToProps = state => ({ + user: getUser(state), +}); + +export default connect(mapStateToProps)(UserProfileForm); diff --git a/frontend/src/metabase/account/profile/containers/UserProfileApp/index.js b/frontend/src/metabase/account/profile/containers/UserProfileApp/index.js new file mode 100644 index 000000000000..0f02a783b089 --- /dev/null +++ b/frontend/src/metabase/account/profile/containers/UserProfileApp/index.js @@ -0,0 +1 @@ +export { default } from "./UserProfileApp"; diff --git a/frontend/src/metabase/account/routes.jsx b/frontend/src/metabase/account/routes.jsx new file mode 100644 index 000000000000..48747092e1f2 --- /dev/null +++ b/frontend/src/metabase/account/routes.jsx @@ -0,0 +1,25 @@ +import React from "react"; +import { t } from "ttag"; +import { IndexRedirect } from "react-router"; +import { Route } from "metabase/hoc/Title"; +import AccountSettingsApp from "./settings/containers/AccountSettingsApp"; +import UserProfileApp from "./profile/containers/UserProfileApp"; +import UserPasswordApp from "./password/containers/UserPasswordApp"; +import LoginHistoryApp from "./login-history/containers/LoginHistoryApp"; +import getNotificationRoutes from "./notifications/routes"; + +const getRoutes = (store, IsAuthenticated) => { + return ( + + + + + + + {getNotificationRoutes()} + + + ); +}; + +export default getRoutes; diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.jsx b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.jsx new file mode 100644 index 000000000000..9a0d6d9e6855 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.jsx @@ -0,0 +1,55 @@ +import React, { useMemo } from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; +import Radio from "metabase/components/Radio"; +import { PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS } from "metabase/plugins"; +import { + AccountHeaderRoot, + HeaderAvatar, + HeaderSection, + HeaderTitle, +} from "./AccountHeader.styled"; + +const propTypes = { + user: PropTypes.object.isRequired, + path: PropTypes.string, + onChangeLocation: PropTypes.func, +}; + +const AccountHeader = ({ user, path, onChangeLocation }) => { + const hasPasswordChange = useMemo( + () => PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.every(f => f(user)), + [user], + ); + + const tabs = useMemo( + () => [ + { name: t`Profile`, value: "/account/profile" }, + ...(hasPasswordChange + ? [{ name: t`Password`, value: "/account/password" }] + : []), + { name: t`Login History`, value: "/account/login-history" }, + { name: t`Notifications`, value: "/account/notifications" }, + ], + [hasPasswordChange], + ); + + return ( + + + + {t`Account settings`} + + + + ); +}; + +AccountHeader.propTypes = propTypes; + +export default AccountHeader; diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.styled.jsx b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.styled.jsx new file mode 100644 index 000000000000..f24c3b1cd068 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.styled.jsx @@ -0,0 +1,55 @@ +import styled from "styled-components"; +import colors from "metabase/lib/colors"; + +import { + breakpointMinMedium, + breakpointMinSmall, + space, +} from "metabase/styled-components/theme"; +import UserAvatar from "metabase/components/UserAvatar"; + +export const AccountHeaderRoot = styled.div` + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + padding-top: ${space(1)}; + border-bottom: 1px solid ${colors["border"]}; + background-color: ${colors["white"]}; + + ${breakpointMinSmall} { + padding-top: ${space(2)}; + } +`; + +export const HeaderSection = styled.div` + display: flex; + flex-direction: column; + align-items: center; + padding: ${space(2)}; + + ${breakpointMinMedium} { + padding: ${space(4)}; + } +`; + +export const HeaderTitle = styled.h2` + text-align: center; +`; + +export const HeaderAvatar = styled(UserAvatar)` + width: 3em; + height: 3em; + margin-bottom: ${space(1)}; + + ${breakpointMinSmall} { + width: 4em; + height: 4em; + margin-bottom: ${space(2)}; + } + + ${breakpointMinMedium} { + width: 5em; + height: 5em; + } +`; diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.unit.spec.js b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.unit.spec.js new file mode 100644 index 000000000000..8ac3be18980b --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/AccountHeader.unit.spec.js @@ -0,0 +1,69 @@ +import React from "react"; +import { fireEvent, render, screen } from "@testing-library/react"; +import AccountHeader from "./AccountHeader"; +import { PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS } from "metabase/plugins"; + +const getUser = () => ({ + id: 1, + first_name: "John", + last_name: "Doe", + email: "john@metabase.test", + google_auth: true, +}); + +describe("AccountHeader", () => { + const ORIGINAL_SHOW_CHANGE_PASSWORD_CONDITIONS = [ + ...PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS, + ]; + + beforeEach(() => { + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.splice(0); + }); + + afterEach(() => { + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.splice( + 0, + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.length, + ...ORIGINAL_SHOW_CHANGE_PASSWORD_CONDITIONS, + ); + }); + + it("should show all tabs for a regular user", () => { + const user = getUser(); + + render(); + + screen.getByText("Profile"); + screen.getByText("Password"); + screen.getByText("Login History"); + screen.getByText("Notifications"); + }); + + it("should show the password tab if it is enabled by a plugin", () => { + const user = getUser(); + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.push(user => user.google_auth); + + render(); + + screen.getByText("Password"); + }); + + it("should hide the password tab if it is disabled by a plugin", () => { + const user = getUser(); + PLUGIN_SHOW_CHANGE_PASSWORD_CONDITIONS.push(user => !user.google_auth); + + render(); + + expect(screen.queryByText("Password")).not.toBeInTheDocument(); + }); + + it("should change location when a tab is selected", () => { + const user = getUser(); + const onChangeLocation = jest.fn(); + + render(); + + fireEvent.click(screen.getByText("Profile")); + expect(onChangeLocation).toHaveBeenCalledWith("/account/profile"); + }); +}); diff --git a/frontend/src/metabase/account/settings/components/AccountHeader/index.js b/frontend/src/metabase/account/settings/components/AccountHeader/index.js new file mode 100644 index 000000000000..f763e2c973a4 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountHeader/index.js @@ -0,0 +1 @@ +export { default } from "./AccountHeader"; diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.jsx b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.jsx new file mode 100644 index 000000000000..e52ed223f819 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.jsx @@ -0,0 +1,22 @@ +import React from "react"; +import PropTypes from "prop-types"; +import AccountHeader from "../AccountHeader"; +import { AccountContent } from "./AccountLayout.styled"; + +const propTypes = { + ...AccountHeader.propTypes, + children: PropTypes.node, +}; + +const AccountLayout = ({ children, ...props }) => { + return ( +
+ + {children} +
+ ); +}; + +AccountLayout.propTypes = propTypes; + +export default AccountLayout; diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.styled.jsx b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.styled.jsx new file mode 100644 index 000000000000..1ff1dd6b6413 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.styled.jsx @@ -0,0 +1,12 @@ +import styled from "styled-components"; +import { breakpointMinSmall, space } from "metabase/styled-components/theme"; + +export const AccountContent = styled.div` + margin: 0 auto; + padding: ${space(1)}; + + ${breakpointMinSmall} { + width: 540px; + padding: ${space(3)} ${space(2)}; + } +`; diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.unit.spec.js b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.unit.spec.js new file mode 100644 index 000000000000..772a833782f5 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/AccountLayout.unit.spec.js @@ -0,0 +1,21 @@ +import React from "react"; +import { render, screen } from "@testing-library/react"; +import AccountLayout from "./AccountLayout"; + +const getUser = () => ({ + id: 1, + first_name: "John", + last_name: "Doe", + email: "john@metabase.test", +}); + +describe("AccountLayout", () => { + it("should render header and content", () => { + const user = getUser(); + + render(Content); + + screen.getByText("Profile"); + screen.getByText("Content"); + }); +}); diff --git a/frontend/src/metabase/account/settings/components/AccountLayout/index.js b/frontend/src/metabase/account/settings/components/AccountLayout/index.js new file mode 100644 index 000000000000..e8b4574ee5a0 --- /dev/null +++ b/frontend/src/metabase/account/settings/components/AccountLayout/index.js @@ -0,0 +1 @@ +export { default } from "./AccountLayout"; diff --git a/frontend/src/metabase/account/settings/containers/AccountSettingsApp/AccountSettingsApp.jsx b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/AccountSettingsApp.jsx new file mode 100644 index 000000000000..a65c7d59200c --- /dev/null +++ b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/AccountSettingsApp.jsx @@ -0,0 +1,18 @@ +import { connect } from "react-redux"; +import { push } from "react-router-redux"; +import { getUser } from "metabase/selectors/user"; +import AccountLayout from "../../components/AccountLayout"; + +const mapStateToProps = (state, props) => ({ + user: getUser(state), + path: props.location.pathname, +}); + +const mapDispatchToProps = { + onChangeLocation: push, +}; + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(AccountLayout); diff --git a/frontend/src/metabase/account/settings/containers/AccountSettingsApp/index.js b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/index.js new file mode 100644 index 000000000000..96cffd19015b --- /dev/null +++ b/frontend/src/metabase/account/settings/containers/AccountSettingsApp/index.js @@ -0,0 +1 @@ +export { default } from "./AccountSettingsApp"; diff --git a/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.jsx b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.jsx new file mode 100644 index 000000000000..b242b5e240b0 --- /dev/null +++ b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.jsx @@ -0,0 +1,97 @@ +import React, { useRef } from "react"; +import PropTypes from "prop-types"; +import { Box } from "grid-styled"; +import { t } from "ttag"; + +import DeleteDatabaseModal from "metabase/admin/databases/components/DeleteDatabaseModal.jsx"; +import ActionButton from "metabase/components/ActionButton"; +import ModalWithTrigger from "metabase/components/ModalWithTrigger"; +import ConfirmContent from "metabase/components/ConfirmContent"; + +const propTypes = { + database: PropTypes.object.isRequired, + deleteDatabase: PropTypes.func.isRequired, + syncDatabaseSchema: PropTypes.func.isRequired, + rescanDatabaseFields: PropTypes.func.isRequired, + discardSavedFieldValues: PropTypes.func.isRequired, +}; + +const DatabaseEditAppSidebar = ({ + database, + deleteDatabase, + syncDatabaseSchema, + rescanDatabaseFields, + discardSavedFieldValues, +}) => { + const discardSavedFieldValuesModal = useRef(); + const deleteDatabaseModal = useRef(); + + return ( + +
+
+ +
    +
  1. + syncDatabaseSchema(database.id)} + className="Button Button--syncDbSchema" + normalText={t`Sync database schema now`} + activeText={t`Starting…`} + failedText={t`Failed to sync`} + successText={t`Sync triggered!`} + /> +
  2. +
  3. + rescanDatabaseFields(database.id)} + className="Button Button--rescanFieldValues" + normalText={t`Re-scan field values now`} + activeText={t`Starting…`} + failedText={t`Failed to start scan`} + successText={t`Scan triggered!`} + /> +
  4. +
+
+ +
+ +
    +
  1. + + discardSavedFieldValuesModal.current.toggle()} + onAction={() => discardSavedFieldValues(database.id)} + /> + +
  2. + +
  3. + + deleteDatabaseModal.current.toggle()} + onDelete={() => deleteDatabase(database.id, true)} + /> + +
  4. +
+
+
+
+ ); +}; + +DatabaseEditAppSidebar.propTypes = propTypes; + +export default DatabaseEditAppSidebar; diff --git a/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.unit.spec.js b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.unit.spec.js new file mode 100644 index 000000000000..3b1c82c3841e --- /dev/null +++ b/frontend/src/metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar.unit.spec.js @@ -0,0 +1,99 @@ +import React from "react"; +import { fireEvent, render, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; + +import Sidebar from "./Sidebar"; + +it("syncs database schema", () => { + const databaseId = 1; + const database = { id: databaseId }; + const syncDatabaseSchema = jest.fn(); + + render( + , + ); + + const syncButton = screen.getByText("Sync database schema now"); + + fireEvent.click(syncButton); + + expect(syncDatabaseSchema).toHaveBeenCalledWith(databaseId); +}); + +it("rescans database field values", () => { + const databaseId = 1; + const database = { id: databaseId }; + const rescanDatabaseFields = jest.fn(); + + render( + , + ); + + const rescanButton = screen.getByText("Re-scan field values now"); + + fireEvent.click(rescanButton); + + expect(rescanDatabaseFields).toHaveBeenCalledWith(databaseId); +}); + +it("discards saved field values", () => { + const databaseId = 1; + const database = { id: databaseId }; + const discardSavedFieldValues = jest.fn(); + + render( + , + ); + + const discardButton = screen.getByText("Discard saved field values"); + + fireEvent.click(discardButton); + + expect(screen.getAllByText("Discard saved field values").length).toBe(2); + + const cancelButton = screen.getByText("Cancel"); + + fireEvent.click(cancelButton); + + fireEvent.click(discardButton); + + const yesButton = screen.getByText("Yes"); + + fireEvent.click(yesButton); + + expect(discardSavedFieldValues).toHaveBeenCalledWith(databaseId); +}); + +it("removes database", () => { + const databaseId = 1; + const name = "DB Name"; + const database = { id: databaseId, name }; + const deleteDatabase = jest.fn(); + + render(); + + const removeDBButton = screen.getByText("Remove this database"); + + fireEvent.click(removeDBButton); + + screen.getByText(`Delete the ${name} database?`); + + const cancelButton = screen.getByText("Cancel"); + + fireEvent.click(cancelButton); + + fireEvent.click(removeDBButton); + + const input = screen.getByRole("textbox"); + + userEvent.type(input, name); + + const deleteButton = screen.getByText("Delete"); + + fireEvent.click(deleteButton); + + expect(deleteDatabase).toHaveBeenCalled(); +}); diff --git a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx index 94a568d1e317..25584c48008b 100644 --- a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx +++ b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx @@ -10,13 +10,12 @@ import { Box, Flex } from "grid-styled"; import title from "metabase/hoc/Title"; -import DeleteDatabaseModal from "../components/DeleteDatabaseModal"; -import ActionButton from "metabase/components/ActionButton"; import AddDatabaseHelpCard from "metabase/components/AddDatabaseHelpCard"; import Button from "metabase/components/Button"; import Breadcrumbs from "metabase/components/Breadcrumbs"; +import DriverWarning from "metabase/components/DriverWarning"; import Radio from "metabase/components/Radio"; -import ModalWithTrigger from "metabase/components/ModalWithTrigger"; +import Sidebar from "metabase/admin/databases/components/DatabaseEditApp/Sidebar/Sidebar"; import Databases from "metabase/entities/databases"; @@ -37,7 +36,6 @@ import { deleteDatabase, selectEngine, } from "../database"; -import ConfirmContent from "metabase/components/ConfirmContent"; import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper"; import { getIn } from "icepick"; @@ -71,10 +69,7 @@ const mapDispatchToProps = { selectEngine, }; -type TabName = "connection" | "scheduling"; -type TabOption = { name: string, value: TabName }; - -const TABS: TabOption[] = [ +const TABS = [ { name: t`Connection`, value: "connection", @@ -91,19 +86,12 @@ const TABS: TabOption[] = [ ) @title(({ database }) => database && database.name) export default class DatabaseEditApp extends Component { - state: { - currentTab: TabName, - }; - constructor(props, context) { super(props, context); this.state = { currentTab: TABS[0].value, }; - - this.discardSavedFieldValuesModal = React.createRef(); - this.deleteDatabaseModal = React.createRef(); } static propTypes = { @@ -138,26 +126,30 @@ export default class DatabaseEditApp extends Component { render() { const { database, + deleteDatabase, + discardSavedFieldValues, selectedEngine, letUserControlSchedulingSaved, letUserControlSchedulingForm, initializeError, + rescanDatabaseFields, + syncDatabaseSchema, } = this.props; const { currentTab } = this.state; - const editingExistingDatabase = database && database.id != null; + const editingExistingDatabase = database?.id != null; const addingNewDatabase = !editingExistingDatabase; const showTabs = editingExistingDatabase && letUserControlSchedulingSaved; + const crumbs = [ + [t`Databases`, "/admin/databases"], + [addingNewDatabase ? t`Add Database` : database.name], + ]; + return ( - + +
@@ -171,134 +163,100 @@ export default class DatabaseEditApp extends Component { />
)} - - - + {() => ( + ( + + )) + } + submitButtonComponent={Button} > - {() => ( - ( - - )) - } - submitButtonComponent={Button} - /> - )} - - - {addingNewDatabase && ( - - - + {({ + Form, + FormField, + FormMessage, + FormSubmit, + formFields, + onChangeField, + submitTitle, + }) => { + return ( + + +
+ {formFields.map(formField => ( + + ))} + +
+ + {submitTitle} + +
+ +
+ + {addingNewDatabase && ( + + )} + { + onChangeField("engine", engine); + }} + data-testid="database-setup-driver-warning" + /> + +
+ ); + }} + )} -
+
- {/* Sidebar Actions */} {editingExistingDatabase && ( - -
-
- -
    -
  1. - - this.props.syncDatabaseSchema(database.id) - } - className="Button Button--syncDbSchema" - normalText={t`Sync database schema now`} - activeText={t`Starting…`} - failedText={t`Failed to sync`} - successText={t`Sync triggered!`} - /> -
  2. -
  3. - - this.props.rescanDatabaseFields(database.id) - } - className="Button Button--rescanFieldValues" - normalText={t`Re-scan field values now`} - activeText={t`Starting…`} - failedText={t`Failed to start scan`} - successText={t`Scan triggered!`} - /> -
  4. -
-
- -
- -
    -
  1. - - - this.discardSavedFieldValuesModal.current.toggle() - } - onAction={() => - this.props.discardSavedFieldValues(database.id) - } - /> - -
  2. - -
  3. - - - this.deleteDatabaseModal.current.toggle() - } - onDelete={() => - this.props.deleteDatabase(database.id, true) - } - /> - -
  4. -
-
-
-
+ )} diff --git a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.unit.spec.js b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.unit.spec.js new file mode 100644 index 000000000000..1e61ab53958f --- /dev/null +++ b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.unit.spec.js @@ -0,0 +1,105 @@ +import React from "react"; +import { Provider } from "react-redux"; +import { reducer as form } from "redux-form"; +import { Router, Route } from "react-router"; +import { createMemoryHistory } from "history"; +import { + render, + screen, + waitForElementToBeRemoved, +} from "@testing-library/react"; +import admin from "metabase/admin/admin"; +import MetabaseSettings from "metabase/lib/settings"; +import { PLUGIN_CACHING } from "metabase/plugins"; +import { getStore } from "__support__/entities-store"; +import DatabaseEditApp from "./DatabaseEditApp"; + +const ENGINES_MOCK = { + h2: { + "details-fields": [ + { "display-name": "Connection String", name: "db", required: true }, + ], + "driver-name": "H2", + "superseded-by": null, + }, + sqlite: { + "details-fields": [ + { "display-name": "Filename", name: "db", required: true }, + ], + "driver-name": "SQLite", + "superseded-by": null, + }, +}; + +function mockSettings({ cachingEnabled = false }) { + const spy = jest.spyOn(MetabaseSettings, "get"); + spy.mockImplementation(key => { + if (key === "engines") { + return ENGINES_MOCK; + } + if (key === "enable-query-caching") { + return cachingEnabled; + } + if (key === "site-url") { + return "http://localhost:3333"; + } + }); +} + +async function setup({ cachingEnabled = false } = {}) { + mockSettings({ cachingEnabled }); + + render( + + + + + , + ); + + await waitForElementToBeRemoved(() => screen.queryByText("Loading...")); +} + +describe("DatabaseEditApp", () => { + describe("Cache TTL field", () => { + describe("OSS", () => { + it("is invisible", async () => { + await setup({ cachingEnabled: true }); + + expect( + screen.queryByText("Default result cache duration"), + ).not.toBeInTheDocument(); + }); + }); + + describe("EE", () => { + beforeEach(() => { + PLUGIN_CACHING.databaseCacheTTLFormField = { + name: "cache_ttl", + type: "integer", + title: "Default result cache duration", + }; + }); + + afterEach(() => { + PLUGIN_CACHING.databaseCacheTTLFormField = null; + }); + + it("is visible", async () => { + await setup({ cachingEnabled: true }); + + expect( + screen.queryByText("Default result cache duration"), + ).toBeInTheDocument(); + }); + + it("is invisible when caching disabled", async () => { + await setup({ cachingEnabled: false }); + + expect( + screen.queryByText("Default result cache duration"), + ).not.toBeInTheDocument(); + }); + }); + }); +}); diff --git a/frontend/src/metabase/admin/databases/database.js b/frontend/src/metabase/admin/databases/database.js index 17397dc3fdfb..72a4d9e1c11d 100644 --- a/frontend/src/metabase/admin/databases/database.js +++ b/frontend/src/metabase/admin/databases/database.js @@ -12,6 +12,8 @@ import MetabaseSettings from "metabase/lib/settings"; import { MetabaseApi } from "metabase/services"; import Databases from "metabase/entities/databases"; +import { editParamsForUserControlledScheduling } from "./editParamsForUserControlledScheduling"; + // Default schedules for db sync and deep analysis export const DEFAULT_SCHEDULES = { cache_field_values: { @@ -165,10 +167,12 @@ export const proceedWithDbCreation = function(database) { if (database.details["let-user-control-scheduling"]) { try { dispatch.action(VALIDATE_DATABASE_STARTED); + const { valid } = await MetabaseApi.db_validate({ details: database }); + if (valid) { dispatch.action(SET_DATABASE_CREATION_STEP, { - database: database, + database, step: DB_EDIT_FORM_SCHEDULING_TAB, }); } else { @@ -190,6 +194,8 @@ export const proceedWithDbCreation = function(database) { }; export const createDatabase = function(database) { + editParamsForUserControlledScheduling(database); + return async function(dispatch, getState) { try { dispatch.action(CREATE_DATABASE_STARTED, {}); diff --git a/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.js b/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.js new file mode 100644 index 000000000000..9b29a9e62549 --- /dev/null +++ b/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.js @@ -0,0 +1,20 @@ +export function editParamsForUserControlledScheduling(database) { + editSyncParamsForUserControlledScheduling(database); + editScheduleParamsForUserControlledScheduling(database); +} + +function editSyncParamsForUserControlledScheduling(database) { + if (database.details["let-user-control-scheduling"]) { + database.is_full_sync = false; + } +} + +function editScheduleParamsForUserControlledScheduling(database) { + const { details, schedules } = database; + + if (details["let-user-control-scheduling"] && !schedules?.metadata_sync) { + database.schedules.metadata_sync = { + schedule_type: "daily", + }; + } +} diff --git a/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.unit.spec.js b/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.unit.spec.js new file mode 100644 index 000000000000..3b32538412c3 --- /dev/null +++ b/frontend/src/metabase/admin/databases/editParamsForUserControlledScheduling.unit.spec.js @@ -0,0 +1,56 @@ +import { editParamsForUserControlledScheduling } from "./editParamsForUserControlledScheduling"; + +it("adds full_sync param if user will control scheduling", () => { + const database = { + schedules: {}, + details: { "let-user-control-scheduling": true }, + }; + + editParamsForUserControlledScheduling(database); + + expect(database.is_full_sync).toBe(false); +}); + +it("does not add full_sync param if user will not control scheduling", () => { + const database = { + schedules: {}, + details: {}, + }; + + editParamsForUserControlledScheduling(database); + + expect(database.is_full_sync).toBe(undefined); +}); + +it("adds metadata_sync param if user will control scheduling and no metadata_sync data is present", () => { + const database = { + schedules: {}, + details: { "let-user-control-scheduling": true }, + }; + + editParamsForUserControlledScheduling(database); + + expect(database.schedules.metadata_sync.schedule_type).toBe("daily"); +}); + +it("does not add metadata_sync param if user will not control scheduling", () => { + const database = { + schedules: {}, + details: {}, + }; + + editParamsForUserControlledScheduling(database); + + expect(database.schedules).toStrictEqual({}); +}); + +it("does not add metadata_sync param if user will control scheduling and metadata_sync data is present", () => { + const database = { + schedules: { metadata_sync: { schedule_type: "hourly" } }, + details: { "let-user-control-scheduling": true }, + }; + + editParamsForUserControlledScheduling(database); + + expect(database.schedules.metadata_sync.schedule_type).toBe("hourly"); +}); diff --git a/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx b/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx index c9ae0e1899a5..ca4da250112c 100644 --- a/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx +++ b/frontend/src/metabase/admin/datamodel/components/MetricItem.jsx @@ -24,11 +24,7 @@ export default class MetricItem extends Component { - + {metric.name} diff --git a/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx b/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx index d1925cdf7f00..7d06f6095d0b 100644 --- a/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx +++ b/frontend/src/metabase/admin/datamodel/components/SegmentItem.jsx @@ -25,8 +25,8 @@ export default class SegmentItem extends Component { {segment.name} diff --git a/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx b/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx index cba89771856a..8180734d084c 100644 --- a/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx +++ b/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx @@ -10,9 +10,10 @@ import Button from "metabase/components/Button"; import * as MetabaseCore from "metabase/lib/core"; import { isCurrency } from "metabase/lib/schema_metadata"; import { isFK } from "metabase/lib/types"; -import currency from "metabase/lib/currency"; import { getGlobalSettingsForColumn } from "metabase/visualizations/lib/settings/column"; +import { currency } from "cljs/metabase.shared.util.currency"; + import _ from "underscore"; import cx from "classnames"; @@ -238,7 +239,7 @@ export class SemanticTypeAndTargetPicker extends Component { searchProp="name" searchCaseSensitive={false} > - {Object.values(currency).map(c => ( + {currency.map(([_, c]) => (
)} diff --git a/frontend/src/metabase/nav/components/SearchBar.styled.jsx b/frontend/src/metabase/nav/components/SearchBar.styled.jsx new file mode 100644 index 000000000000..273b688aa444 --- /dev/null +++ b/frontend/src/metabase/nav/components/SearchBar.styled.jsx @@ -0,0 +1,39 @@ +import styled from "styled-components"; +import { space } from "styled-system"; + +import { DefaultSearchColor } from "metabase/nav/constants"; +import { color, lighten } from "metabase/lib/colors"; + +const ActiveSearchColor = lighten(color("nav"), 0.1); + +export const SearchWrapper = styled.div` + display: flex; + position: relative; + background-color: ${props => + props.active ? ActiveSearchColor : DefaultSearchColor}; + border-radius: 6px; + flex: 1 1 auto; + max-width: 50em; + align-items: center; + color: white; + transition: background 300ms ease-in; + &:hover { + background-color: ${ActiveSearchColor}; + } +`; + +export const SearchInput = styled.input` + ${space}; + background-color: transparent; + width: 100%; + border: none; + color: white; + font-size: 1em; + font-weight: 700; + &:focus { + outline: none; + } + &::placeholder { + color: ${color("text-white")}; + } +`; diff --git a/frontend/src/metabase/nav/components/SearchResults.jsx b/frontend/src/metabase/nav/components/SearchResults.jsx new file mode 100644 index 000000000000..e23d6458fe8b --- /dev/null +++ b/frontend/src/metabase/nav/components/SearchResults.jsx @@ -0,0 +1,46 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { Box } from "grid-styled"; +import { t } from "ttag"; + +import { DEFAULT_SEARCH_LIMIT } from "metabase/lib/constants"; +import Search from "metabase/entities/search"; +import SearchResult from "metabase/search/components/SearchResult"; +import EmptyState from "metabase/components/EmptyState"; + +const propTypes = { + searchText: PropTypes.string, +}; + +export const SearchResults = ({ searchText }) => { + return ( + + {({ list }) => { + const hasResults = list.length > 0; + + return ( +
    + {hasResults ? ( + list.map(item => ( +
  • + +
  • + )) + ) : ( + + + + )} +
+ ); + }} +
+ ); +}; + +SearchResults.propTypes = propTypes; diff --git a/frontend/src/metabase/nav/components/StoreLink/StoreLink.jsx b/frontend/src/metabase/nav/components/StoreLink/StoreLink.jsx new file mode 100644 index 000000000000..2ee83e41a98c --- /dev/null +++ b/frontend/src/metabase/nav/components/StoreLink/StoreLink.jsx @@ -0,0 +1,18 @@ +import React from "react"; +import { t } from "ttag"; +import Tooltip from "metabase/components/Tooltip"; +import { StoreIcon, StoreIconRoot, StoreIconWrapper } from "./StoreLink.styled"; + +const StoreLink = () => { + return ( + + + + + + + + ); +}; + +export default StoreLink; diff --git a/frontend/src/metabase/nav/components/StoreLink/StoreLink.styled.jsx b/frontend/src/metabase/nav/components/StoreLink/StoreLink.styled.jsx new file mode 100644 index 000000000000..8df56a95afca --- /dev/null +++ b/frontend/src/metabase/nav/components/StoreLink/StoreLink.styled.jsx @@ -0,0 +1,25 @@ +import styled from "styled-components"; +import { color, darken } from "metabase/lib/colors"; +import { space } from "metabase/styled-components/theme"; +import Icon, { IconWrapper } from "metabase/components/Icon"; +import ExternalLink from "metabase/components/ExternalLink"; + +export const StoreIconRoot = styled(ExternalLink)` + margin-right: ${space(1)}; +`; + +export const StoreIconWrapper = styled(IconWrapper)` + color: ${color("white")}; + + &:hover { + color: ${color("white")}; + background-color: ${darken(color("accent7"))}; + } +`; + +export const StoreIcon = styled(Icon).attrs({ + name: "store", + size: 18, +})` + margin: ${space(1)}; +`; diff --git a/frontend/src/metabase/nav/components/StoreLink/index.js b/frontend/src/metabase/nav/components/StoreLink/index.js new file mode 100644 index 000000000000..59ef9cb93dbf --- /dev/null +++ b/frontend/src/metabase/nav/components/StoreLink/index.js @@ -0,0 +1 @@ +export { default } from "./StoreLink"; diff --git a/frontend/src/metabase/nav/components/utils.js b/frontend/src/metabase/nav/components/utils.js new file mode 100644 index 000000000000..36f3829f4b33 --- /dev/null +++ b/frontend/src/metabase/nav/components/utils.js @@ -0,0 +1,15 @@ +import { t } from "ttag"; + +const TRANSLATED_NAME_BY_MODEL_TYPE = { + card: t`Question`, + dashboard: t`Dashboard`, + table: t`Table`, + database: t`Database`, + collection: t`Collection`, + segment: t`Segment`, + metric: t`Metric`, + pulse: t`Pulse`, +}; + +export const getTranslatedEntityName = type => + TRANSLATED_NAME_BY_MODEL_TYPE[type] || null; diff --git a/frontend/src/metabase/nav/containers/Navbar.jsx b/frontend/src/metabase/nav/containers/Navbar.jsx index cc3bd4d8dc01..a42856c544cc 100644 --- a/frontend/src/metabase/nav/containers/Navbar.jsx +++ b/frontend/src/metabase/nav/containers/Navbar.jsx @@ -13,6 +13,7 @@ import { Flex, Box } from "grid-styled"; import * as Urls from "metabase/lib/urls"; import { color, darken } from "metabase/lib/colors"; +import MetabaseSettings from "metabase/lib/settings"; import Icon, { IconWrapper } from "metabase/components/Icon"; import EntityMenu from "metabase/components/EntityMenu"; @@ -43,6 +44,7 @@ const mapStateToProps = (state, props) => ({ }); import { DefaultSearchColor } from "metabase/nav/constants"; +import StoreLink from "metabase/nav/components/StoreLink"; const mapDispatchToProps = { onChangeLocation: push, @@ -156,6 +158,7 @@ export default class Navbar extends Component { /> + {!MetabaseSettings.isPaidPlan() && }
{this.renderModal()} @@ -216,7 +219,7 @@ export default class Navbar extends Component { - + ({ }); const mapDispatchToProps = { - prefetchTables: () => Database.actions.fetchList({ include: "tables" }), - prefetchDatabases: () => Database.actions.fetchList({ saved: true }), push, }; @@ -55,8 +49,6 @@ export default class NewQueryOptions extends Component { props: Props; UNSAFE_componentWillMount(props) { - this.props.prefetchTables(); - this.props.prefetchDatabases(); const { location, push } = this.props; if (Object.keys(location.query).length > 0) { const { database, table, ...options } = location.query; @@ -91,7 +83,7 @@ export default class NewQueryOptions extends Component { {hasDataAccess && ( - + )} {hasDataAccess && ( - + )} {hasNativeWrite && ( - + - {showTypeIcon && } - - - +
+ {showTypeIcon && } + + +
+ ); } else { const placeholderText = isEditing @@ -194,37 +208,46 @@ export default class ParameterValueWidget extends Component { : placeholder || t`Select…`; return ( - - {showTypeIcon && } -
- {hasValue ? WidgetDefinition.format(value) : placeholderText} -
- - - } - target={this.getTargetRef} - // make sure the full date picker will expand to fit the dual calendars - autoWidth={parameter.type === "date/all-options"} + - -
+ + {showTypeIcon && } +
+ {hasValue ? WidgetDefinition.format(value) : placeholderText} +
+ + + } + target={this.getTargetRef} + // make sure the full date picker will expand to fit the dual calendars + autoWidth={parameter.type === "date/all-options"} + > + +
+ ); } } @@ -234,9 +257,12 @@ function getFields(metadata, parameter) { if (!metadata) { return []; } - return getFieldIds(parameter) - .map(id => metadata.field(id)) - .filter(f => f != null); + return ( + parameter.fields ?? + getFieldIds(parameter) + .map(id => metadata.field(id)) + .filter(f => f != null) + ); } function getFieldIds(parameter) { @@ -258,9 +284,21 @@ function Widget({ onFocusChanged, parameters, dashboard, + disabled, }) { const DateWidget = DATE_WIDGETS[parameter.type]; const fields = getFields(metadata, parameter); + + if (disabled) { + return ( + + ); + } + if (DateWidget) { return ( @@ -294,6 +332,7 @@ function Widget({ ); } } + Widget.propTypes = { ...ParameterValueWidget.propTypes, onPopoverClose: PropTypes.func.isRequired, diff --git a/frontend/src/metabase/parameters/components/ParameterWidget.jsx b/frontend/src/metabase/parameters/components/ParameterWidget.jsx index c2bf020f7ddc..080c65497c06 100644 --- a/frontend/src/metabase/parameters/components/ParameterWidget.jsx +++ b/frontend/src/metabase/parameters/components/ParameterWidget.jsx @@ -28,15 +28,21 @@ export default class ParameterWidget extends Component { }; renderPopover(value, setValue, placeholder, isFullscreen) { - const { parameter, editingParameter, commitImmediately } = this.props; - const isEditingParameter = !!( - editingParameter && editingParameter.id === parameter.id - ); + const { + dashboard, + parameter, + editingParameter, + commitImmediately, + parameters, + } = this.props; + + const isEditingParameter = editingParameter?.id === parameter.id; + return ( ({ metadata: getMetadata(state) })) export default class Parameters extends Component { - defaultProps = { - syncQueryString: false, - }; - - constructor(props) { - super(props); - - syncQueryParamsWithURL(props); - } - componentDidUpdate() { - const { parameters, parameterValues } = this.props; + const { parameters, parameterValues, dashboard } = this.props; if (this.props.syncQueryString) { // sync parameters to URL query string - const queryParams = {}; - for (const parameter of collateParametersWithValues( + const parameterValuesBySlug = getParameterValuesBySlug( parameters, parameterValues, - )) { - if (parameter.value) { - queryParams[parameter.slug] = parameter.value; - } - } + dashboard && { preserveDefaultedParameters: true }, + ); - let search = querystring.stringify(queryParams); + let search = querystring.stringify(parameterValuesBySlug); search = search ? "?" + search : ""; if (search !== window.location.search) { diff --git a/frontend/src/metabase/parameters/components/Parameters/syncQueryParamsWithURL.js b/frontend/src/metabase/parameters/components/Parameters/syncQueryParamsWithURL.js deleted file mode 100644 index eb8f1082e0c6..000000000000 --- a/frontend/src/metabase/parameters/components/Parameters/syncQueryParamsWithURL.js +++ /dev/null @@ -1,101 +0,0 @@ -import Dimension from "metabase-lib/lib/Dimension"; - -export const syncQueryParamsWithURL = props => { - props.commitImmediately - ? syncForInternalQuestion(props) - : syncForPublicQuestion(props); -}; - -const syncForInternalQuestion = props => { - const { parameters, setParameterValue, query, metadata } = props; - - if (!setParameterValue) { - return; - } - - for (const parameter of parameters) { - const queryParam = query && query[parameter.slug]; - - if (queryParam != null || parameter.default != null) { - const parsedParam = parseQueryParams(queryParam, parameter, metadata); - - setParameterValue(parameter.id, parsedParam); - } - } -}; - -const syncForPublicQuestion = props => { - const { parameters, setParameterValue, query, metadata } = props; - - if (!setParameterValue) { - return; - } - - const parameterValues = parameters.reduce((acc, parameter) => { - const queryParam = query && query[parameter.slug]; - - if (queryParam != null || parameter.default != null) { - acc[parameter.id] = parseQueryParams(queryParam, parameter, metadata); - } - - return acc; - }, {}); - - setParameterValue(parameterValues); -}; - -const parseQueryParams = (queryParam, parameter, metadata) => { - const value = getValue(queryParam, parameter); - const fields = getFields(parameter, metadata); - - return getValueFromFields(value, fields); -}; - -const getValue = (queryParam, parameter) => { - const value = queryParam != null ? queryParam : parameter.default; - return treatValueForFieldValuesWidget(value, parameter); -}; - -const treatValueForFieldValuesWidget = (value, parameter) => { - // ParameterValueWidget uses FieldValuesWidget if there's no available - // date widget and all targets are fields. - const willUseFieldValuesWidget = - parameter.hasOnlyFieldTargets && !/^date\//.test(parameter.type); - - // If we'll use FieldValuesWidget, we should start with an array to match. - if (willUseFieldValuesWidget && !Array.isArray(value)) { - value = [value]; - } - - return value; -}; - -// field IDs can be either -// ["field", , ] or -// ["field", , ] -const getFields = (parameter, metadata) => { - const fieldIds = parameter.field_ids || []; - return fieldIds.map( - id => metadata.field(id) || Dimension.parseMBQL(id, metadata).field(), - ); -}; - -export const getValueFromFields = (value, fields) => { - if (Array.isArray(value)) { - return value.map(v => getValueFromFields(v, fields)); - } - - // [].every is always true, so only check if there are some fields - if (fields.length > 0) { - // unix dates fields are numeric but query params shouldn't be parsed as numbers - if (fields.every(f => f.isNumeric() && !f.isDate())) { - return parseFloat(value); - } - - if (fields.every(f => f.isBoolean())) { - return value === "true" ? true : value === "false" ? false : value; - } - } - - return value; -}; diff --git a/frontend/src/metabase/parameters/components/ParametersList.jsx b/frontend/src/metabase/parameters/components/ParametersList.jsx index 6975d04068f1..e316f53a06a6 100644 --- a/frontend/src/metabase/parameters/components/ParametersList.jsx +++ b/frontend/src/metabase/parameters/components/ParametersList.jsx @@ -1,15 +1,18 @@ /* eslint-disable react/prop-types */ import React from "react"; -import { - SortableContainer, - SortableElement, - SortableHandle, -} from "react-sortable-hoc"; import cx from "classnames"; import StaticParameterWidget from "./ParameterWidget"; import Icon from "metabase/components/Icon"; -import { collateParametersWithValues } from "metabase/meta/Parameter"; +import { + SortableContainer, + SortableElement, + SortableHandle, +} from "metabase/components/sortable"; +import { + getValuePopulatedParameters, + getVisibleParameters, +} from "metabase/meta/Parameter"; import type { ParameterId, @@ -105,15 +108,16 @@ function ParametersList({ } }; - const hiddenParameters = - typeof hideParameters === "string" - ? new Set(hideParameters.split(",")) - : new Set(); - const collatedParameters = collateParametersWithValues( + const valuePopulatedParameters = getValuePopulatedParameters( parameters, parameterValues, ); + const visibleValuePopulatedParameters = getVisibleParameters( + valuePopulatedParameters, + hideParameters, + ); + let ParameterWidget; let ParameterWidgetList; if (isEditing) { @@ -137,41 +141,42 @@ function ParametersList({ onSortStart={handleSortStart} onSortEnd={handleSortEnd} > - {collatedParameters - .filter(p => !hiddenParameters.has(p.slug)) - .map((parameter, index) => ( - setParameterName(parameter.id, name)) - } - setValue={ - setParameterValue && - (value => setParameterValue(parameter.id, value)) - } - setDefaultValue={ - setParameterDefaultValue && - (value => setParameterDefaultValue(parameter.id, value)) - } - remove={removeParameter && (() => removeParameter(parameter.id))} - commitImmediately={commitImmediately} - dragHandle={ - isEditing && setParameterIndex ? ( - - ) : null - } - /> - ))} + {visibleValuePopulatedParameters.map((valuePopulatedParameter, index) => ( + setParameterName(valuePopulatedParameter.id, name)) + } + setValue={ + setParameterValue && + (value => setParameterValue(valuePopulatedParameter.id, value)) + } + setDefaultValue={ + setParameterDefaultValue && + (value => + setParameterDefaultValue(valuePopulatedParameter.id, value)) + } + remove={ + removeParameter && + (() => removeParameter(valuePopulatedParameter.id)) + } + commitImmediately={commitImmediately} + dragHandle={ + isEditing && setParameterIndex ? : null + } + /> + ))} ); } diff --git a/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx b/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx index c9de5fc23c06..e15d09203f67 100644 --- a/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx +++ b/frontend/src/metabase/parameters/components/widgets/DateMonthYearWidget.jsx @@ -57,9 +57,9 @@ export default class DateMonthYearWidget extends React.Component { onChange={year => this.setState({ year: year })} /> - + {_.range(0, 12).map(m => ( - + void, - - isEditing: boolean, - - fields: Field[], - parentFocusChanged: boolean => void, - - operator?: FilterOperator, - dashboard?: DashboardWithCards, - parameter?: Parameter, - parameters?: Parameter[], - placeholder?: string, -}; - -type State = { - value: any[], - isFocused: boolean, - widgetWidth: ?number, +const propTypes = { + dashboard: PropTypes.object, + fields: PropTypes.array.isRequired, + isEditing: PropTypes.bool.isRequired, + operator: PropTypes.object.isRequired, + parameter: PropTypes.object.isRequired, + parameters: PropTypes.array.isRequired, + parentFocusChanged: PropTypes.bool, + placeholder: PropTypes.string.isRequired, + setValue: PropTypes.func.isRequired, + value: PropTypes.string, }; const BORDER_WIDTH = 1; -const normalizeValue = value => - Array.isArray(value) ? value : value != null ? [value] : []; - -// TODO: rename this something else since we're using it for more than searching and more than text -export default class ParameterFieldWidget extends Component<*, Props, State> { - props: Props; - state: State; - - _unfocusedElement: React.Component; - - constructor(props: Props) { +export default class ParameterFieldWidget extends Component { + constructor(props) { super(props); this.state = { isFocused: false, @@ -66,25 +46,7 @@ export default class ParameterFieldWidget extends Component<*, Props, State> { static noPopover = true; - static format(value, fields) { - value = normalizeValue(value); - if (value.length > 1) { - const n = value.length; - return ngettext(msgid`${n} selection`, `${n} selections`, n); - } else { - return ( - - ); - } - } - - UNSAFE_componentWillReceiveProps(nextProps: Props) { + UNSAFE_componentWillReceiveProps(nextProps) { if (this.props.value !== nextProps.value) { this.setState({ value: nextProps.value }); } @@ -130,7 +92,7 @@ export default class ParameterFieldWidget extends Component<*, Props, State> { const footerClassName = cx( "flex mt1 px1 pb1 PopoverFooter PopoverParameterFieldWidgetFooter", - isEqualsOp && "mr1 mb1", + isEqualsOp ? "mr1 mb1" : "PopoverFooterWhenIsNotEqualOps", ); const placeholder = isEditing @@ -145,7 +107,10 @@ export default class ParameterFieldWidget extends Component<*, Props, State> { onClick={() => focusChanged(true)} > {savedValue.length > 0 ? ( - ParameterFieldWidget.format(savedValue, fields) + ) : ( {placeholder} )} @@ -216,3 +181,5 @@ export default class ParameterFieldWidget extends Component<*, Props, State> { } } } + +ParameterFieldWidget.propTypes = propTypes; diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.jsx b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.jsx new file mode 100644 index 000000000000..5e566e567d2d --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.jsx @@ -0,0 +1,35 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { ngettext, msgid } from "ttag"; + +import Value from "metabase/components/Value"; +import { normalizeValue } from "../normalizeValue"; + +function renderNumberOfSelections(numberOfSelections) { + return ngettext( + msgid`${numberOfSelections} selection`, + `${numberOfSelections} selections`, + numberOfSelections, + ); +} + +export default function ParameterFieldWidgetValue({ savedValue, fields }) { + const values = normalizeValue(savedValue); + + const numberOfValues = values.length; + + // If there are multiple fields, turn off remapping since they might + // be remapped to different fields. + const shouldRemap = fields.length === 1; + + return numberOfValues > 1 ? ( + renderNumberOfSelections(numberOfValues) + ) : ( + + ); +} + +ParameterFieldWidgetValue.propTypes = { + savedValue: PropTypes.array, + fields: PropTypes.array, +}; diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.unit.spec.js b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.unit.spec.js new file mode 100644 index 000000000000..d6e802eb5156 --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/ParameterFieldWidgetValue/ParameterFieldWidgetValue.unit.spec.js @@ -0,0 +1,20 @@ +import React from "react"; + +import ParameterFieldWidgetValue from "./ParameterFieldWidgetValue"; +import { render, screen } from "@testing-library/react"; + +const value = "A value"; + +describe("when fields is empty array", () => { + it("renders savedValue if it is a single item", () => { + render(); + screen.getByText(value); + }); + + it("renders number of selections if multiple items", () => { + render( + , + ); + screen.getByText("2 selections"); + }); +}); diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.js b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.js new file mode 100644 index 000000000000..f83230f4451e --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.js @@ -0,0 +1,7 @@ +export function normalizeValue(value) { + if (Array.isArray(value)) { + return value; + } + + return value ? [value] : []; +} diff --git a/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.unit.spec.js b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.unit.spec.js new file mode 100644 index 000000000000..72386cb63912 --- /dev/null +++ b/frontend/src/metabase/parameters/components/widgets/ParameterFieldWidget/normalizeValue.unit.spec.js @@ -0,0 +1,26 @@ +import { normalizeValue } from "./normalizeValue"; + +it("returns empty array if value is null", () => { + const value = null; + const expected = []; + + const normalized = normalizeValue(value); + + expect(normalized).toEqual(expected); +}); + +it("returns value if value is an array", () => { + const value = [1]; + + const normalized = normalizeValue(value); + + expect(normalized).toBe(value); +}); + +it("returns value as item of array if passed value is not an array", () => { + const value = 1; + + const normalized = normalizeValue(value); + + expect(normalized).toEqual([value]); +}); diff --git a/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx b/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx index fb7aec5f40e8..903e2fe90ea5 100644 --- a/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx +++ b/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx @@ -23,6 +23,7 @@ export default class TextWidget extends Component { commitImmediately: PropTypes.bool, placeholder: PropTypes.string, focusChanged: PropTypes.func, + disabled: PropTypes.bool, }; static defaultProps = { @@ -48,6 +49,7 @@ export default class TextWidget extends Component { className, isEditing, focusChanged: parentFocusChanged, + disabled, } = this.props; const defaultPlaceholder = this.state.isFocused ? "" @@ -89,6 +91,7 @@ export default class TextWidget extends Component { placeholder={ isEditing ? t`Enter a default value...` : defaultPlaceholder } + disabled={disabled} /> ); } diff --git a/frontend/src/metabase/plugins/builtin.js b/frontend/src/metabase/plugins/builtin.js index fd795f52b31f..4024ac3b9c6f 100644 --- a/frontend/src/metabase/plugins/builtin.js +++ b/frontend/src/metabase/plugins/builtin.js @@ -1,3 +1,4 @@ import "metabase/plugins/builtin/auth/password"; import "metabase/plugins/builtin/auth/google"; import "metabase/plugins/builtin/auth/ldap"; +import "metabase/plugins/builtin/settings/hosted"; diff --git a/frontend/src/metabase/plugins/builtin/settings/hosted.js b/frontend/src/metabase/plugins/builtin/settings/hosted.js new file mode 100644 index 000000000000..609ab7a6f072 --- /dev/null +++ b/frontend/src/metabase/plugins/builtin/settings/hosted.js @@ -0,0 +1,34 @@ +import _ from "underscore"; +import { updateIn } from "icepick"; +import { t } from "ttag"; +import MetabaseSettings from "metabase/lib/settings"; +import { PLUGIN_ADMIN_SETTINGS_UPDATES } from "metabase/plugins"; +import { SettingsCloudStoreLink } from "../../components/SettingsCloudStoreLink"; + +if (MetabaseSettings.isHosted()) { + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => + _.omit(sections, ["email", "updates"]), + ); + + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => + updateIn(sections, ["general", "settings"], settings => + _.reject(settings, setting => + ["site-url", "redirect-all-requests-to-https"].includes(setting.key), + ), + ), + ); + + PLUGIN_ADMIN_SETTINGS_UPDATES.push(sections => ({ + ...sections, + cloud: { + name: t`Cloud`, + settings: [ + { + key: "store-link", + display_name: t`Cloud Settings`, + widget: SettingsCloudStoreLink, + }, + ], + }, + })); +} diff --git a/frontend/src/metabase/plugins/components/PluginPlaceholder/PluginPlaceholder.jsx b/frontend/src/metabase/plugins/components/PluginPlaceholder/PluginPlaceholder.jsx new file mode 100644 index 000000000000..9f858ed6ec0b --- /dev/null +++ b/frontend/src/metabase/plugins/components/PluginPlaceholder/PluginPlaceholder.jsx @@ -0,0 +1,5 @@ +function PluginPlaceholder() { + return null; +} + +export default PluginPlaceholder; diff --git a/frontend/src/metabase/plugins/components/PluginPlaceholder/index.js b/frontend/src/metabase/plugins/components/PluginPlaceholder/index.js new file mode 100644 index 000000000000..aa21935936cc --- /dev/null +++ b/frontend/src/metabase/plugins/components/PluginPlaceholder/index.js @@ -0,0 +1 @@ +export { default } from "./PluginPlaceholder"; diff --git a/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.jsx b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.jsx new file mode 100644 index 000000000000..5cc2230510c5 --- /dev/null +++ b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.jsx @@ -0,0 +1,17 @@ +import React from "react"; +import { t } from "ttag"; +import MetabaseSettings from "metabase/lib/settings"; +import { Description, Link, LinkIcon } from "./SettingsCloudStoreLink.styled"; + +export function SettingsCloudStoreLink() { + const url = MetabaseSettings.storeUrl(); + return ( +
+ {t`Manage your Cloud account, including billing preferences and technical settings about this instance in your Metabase Store account.`} + + {t`Go to the Metabase Store`} + + +
+ ); +} diff --git a/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.styled.jsx b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.styled.jsx new file mode 100644 index 000000000000..0952a5b9ebd4 --- /dev/null +++ b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/SettingsCloudStoreLink.styled.jsx @@ -0,0 +1,30 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import ExternalLink from "metabase/components/ExternalLink"; +import Icon from "metabase/components/Icon"; + +export const Description = styled.p` + color: ${color("text-dark")}; + max-width: 360px; +`; + +export const Link = styled(ExternalLink)` + display: inline-flex; + align-items: center; + color: ${color("text-white")}; + font-weight: bold; + background-color: ${color("brand")}; + padding: 12px 18px; + border-radius: 6px; + + &:hover { + opacity: 0.88; + transition: all 200ms linear; + } +`; + +export const LinkIcon = styled(Icon)` + color: ${color("text-white")}; + opacity: 0.6; + margin-left: 8px; +`; diff --git a/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/index.js b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/index.js new file mode 100644 index 000000000000..6ae42da6388f --- /dev/null +++ b/frontend/src/metabase/plugins/components/SettingsCloudStoreLink/index.js @@ -0,0 +1 @@ +export * from "./SettingsCloudStoreLink"; diff --git a/frontend/src/metabase/plugins/index.js b/frontend/src/metabase/plugins/index.js index 33b1699ee26a..37f04eb47b8e 100644 --- a/frontend/src/metabase/plugins/index.js +++ b/frontend/src/metabase/plugins/index.js @@ -1,4 +1,9 @@ +import { t } from "ttag"; +import PluginPlaceholder from "metabase/plugins/components/PluginPlaceholder"; + // Plugin integration points. All exports must be objects or arrays so they can be mutated by plugins. +const object = () => ({}); +const array = () => []; // functions called when the application is started export const PLUGIN_APP_INIT_FUCTIONS = []; @@ -16,8 +21,9 @@ export const PLUGIN_ADMIN_ROUTES = []; // functions that update the sections export const PLUGIN_ADMIN_SETTINGS_UPDATES = []; -// admin permissions grid +// admin permissions export const PLUGIN_ADMIN_PERMISSIONS_TABLE_ROUTES = []; +export const PLUGIN_ADMIN_PERMISSIONS_TABLE_GROUP_ROUTES = []; export const PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_OPTIONS = []; export const PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_ACTIONS = { controlled: [], @@ -32,6 +38,10 @@ export const PLUGIN_ADMIN_PERMISSIONS_TABLE_FIELDS_PERMISSION_VALUE = { // user form fields, e.x. login attributes export const PLUGIN_ADMIN_USER_FORM_FIELDS = []; +// menu items in people management tab +export const PLUGIN_ADMIN_USER_MENU_ITEMS = []; +export const PLUGIN_ADMIN_USER_MENU_ROUTES = []; + // authentication providers export const PLUGIN_AUTH_PROVIDERS = []; @@ -44,6 +54,8 @@ export const PLUGIN_SELECTORS = { getLogoBackgroundClass: (state, props) => "bg-white", }; +export const PLUGIN_FORM_WIDGETS = {}; + // snippet sidebar export const PLUGIN_SNIPPET_SIDEBAR_PLUS_MENU_OPTIONS = []; export const PLUGIN_SNIPPET_SIDEBAR_ROW_RENDERERS = {}; @@ -53,3 +65,45 @@ export const PLUGIN_SNIPPET_SIDEBAR_HEADER_BUTTONS = []; export const PLUGIN_DASHBOARD_SUBSCRIPTION_PARAMETERS_SECTION_OVERRIDE = { Component: undefined, }; + +const AUTHORITY_LEVEL_REGULAR = { + type: null, + name: t`Regular`, + icon: "folder", +}; + +export const PLUGIN_COLLECTIONS = { + authorityLevelFormFields: [], + isRegularCollection: () => true, + REGULAR_COLLECTION: AUTHORITY_LEVEL_REGULAR, + AUTHORITY_LEVEL: { + [AUTHORITY_LEVEL_REGULAR.type]: AUTHORITY_LEVEL_REGULAR, + }, +}; + +export const PLUGIN_COLLECTION_COMPONENTS = { + CollectionAuthorityLevelIcon: PluginPlaceholder, +}; + +export const PLUGIN_MODERATION = { + QuestionModerationSection: PluginPlaceholder, + ModerationStatusIcon: PluginPlaceholder, + getStatusIconForQuestion: object, + getStatusIcon: object, + getModerationTimelineEvents: array, +}; + +export const PLUGIN_CACHING = { + dashboardCacheTTLFormField: null, + databaseCacheTTLFormField: null, + questionCacheTTLFormField: null, + getQuestionsImplicitCacheTTL: () => null, +}; + +export const PLUGIN_ADVANCED_PERMISSIONS = { + DataPermissionsHelp: null, + addDatabasePermissionOptions: (permissions, _value) => permissions, + addSchemaPermissionOptions: (permissions, _value) => permissions, + addTablePermissionOptions: (permissions, _value) => permissions, + isBlockPermission: _value => false, +}; diff --git a/frontend/src/metabase/public/components/EmbedFrame.jsx b/frontend/src/metabase/public/components/EmbedFrame.jsx index b8c37e5b34f9..01106250eed7 100644 --- a/frontend/src/metabase/public/components/EmbedFrame.jsx +++ b/frontend/src/metabase/public/components/EmbedFrame.jsx @@ -5,6 +5,7 @@ import { IFRAMED, initializeIframeResizer } from "metabase/lib/dom"; import { parseHashOptions } from "metabase/lib/browser"; import MetabaseSettings from "metabase/lib/settings"; +import { getValuePopulatedParameters } from "metabase/meta/Parameter"; import TitleAndDescription from "metabase/components/TitleAndDescription"; import Parameters from "metabase/parameters/components/Parameters/Parameters"; @@ -94,10 +95,10 @@ export default class EmbedFrame extends Component {
({ - ...p, - value: parameterValues && parameterValues[p.id], - }))} + parameters={getValuePopulatedParameters( + parameters, + parameterValues, + )} query={location.query} setParameterValue={setParameterValue} syncQueryString diff --git a/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx b/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx index ccf14cac8fc4..e21f1ca298e2 100644 --- a/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx +++ b/frontend/src/metabase/public/components/widgets/DisplayOptionsPane.jsx @@ -24,6 +24,7 @@ const DisplayOptionsPane = ({
onChangeDisplayOptions({ @@ -32,10 +33,10 @@ const DisplayOptionsPane = ({ }) } /> - {t`Border`}
onChangeDisplayOptions({ @@ -44,7 +45,6 @@ const DisplayOptionsPane = ({ }) } /> - {t`Title`}
embeddingParams[parameter.slug] === "locked", + ); + + return lockedParameters; + } + + getPreviewParamsBySlug() { const { resourceParameters } = this.props; const { embeddingParams, parameterValues } = this.state; - const params = {}; - for (const parameter of resourceParameters) { - if (embeddingParams[parameter.slug] === "locked") { - params[parameter.slug] = - parameter.id in parameterValues - ? parameterValues[parameter.id] - : null; - } - } - return params; + + const lockedParameters = this.getPreviewParameters( + resourceParameters, + embeddingParams, + ); + + const parameterSlugValuePairs = lockedParameters.map(parameter => { + const value = + parameter.id in parameterValues ? parameterValues[parameter.id] : null; + return [parameter.slug, value]; + }); + + return Object.fromEntries(parameterSlugValuePairs); } render() { @@ -164,10 +174,10 @@ export default class EmbedModalContent extends Component { displayOptions, } = this.state; - const params = this.getPreviewParams(); - - const previewParameters = resourceParameters.filter( - p => embeddingParams[p.slug] === "locked", + const previewParametersBySlug = this.getPreviewParamsBySlug(); + const previewParameters = this.getPreviewParameters( + resourceParameters, + embeddingParams, ); return ( @@ -229,7 +239,7 @@ export default class EmbedModalContent extends Component { token={getSignedToken( resourceType, resource.id, - params, + previewParametersBySlug, secretKey, embeddingParams, )} @@ -237,14 +247,14 @@ export default class EmbedModalContent extends Component { siteUrl, resourceType, resource.id, - params, + previewParametersBySlug, displayOptions, secretKey, embeddingParams, )} siteUrl={siteUrl} secretKey={secretKey} - params={params} + params={previewParametersBySlug} displayOptions={displayOptions} previewParameters={previewParameters} parameterValues={parameterValues} diff --git a/frontend/src/metabase/public/containers/PublicDashboard.jsx b/frontend/src/metabase/public/containers/PublicDashboard.jsx index e86080ff249e..70f7b26213a3 100644 --- a/frontend/src/metabase/public/containers/PublicDashboard.jsx +++ b/frontend/src/metabase/public/containers/PublicDashboard.jsx @@ -27,7 +27,7 @@ import { getParameterValues, } from "metabase/dashboard/selectors"; -import * as dashboardActions from "metabase/dashboard/dashboard"; +import * as dashboardActions from "metabase/dashboard/actions"; import { setPublicDashboardEndpoints, @@ -151,15 +151,7 @@ export default class PublicDashboard extends Component { parameterValues={parameterValues} setParameterValue={this.props.setParameterValue} actionButtons={ - buttons.length > 0 && ( -
- {buttons.map((button, index) => ( - - {button} - - ))} -
- ) + buttons.length > 0 &&
{buttons}
} > { + await this.run(); + this.setState({ initialized: true }); + }, + ); } catch (error) { console.error("error", error); setErrorPage(error); } } - setParameterValue = parameterValues => { + setParameterValue = (parameterId, value) => { this.setState( { parameterValues: { ...this.state.parameterValues, - ...parameterValues, + [parameterId]: value, }, }, this.run, @@ -146,7 +158,7 @@ export default class PublicQuestion extends Component { return; } - const parameters = getParameters(card); + const parameters = getParametersFromCard(card); try { this.setState({ result: null }); @@ -156,7 +168,7 @@ export default class PublicQuestion extends Component { // embeds apply parameter values server-side newResult = await maybeUsePivotEndpoint(EmbedApi.cardQuery, card)({ token, - ...getParametersBySlug(parameters, parameterValues), + ...getParameterValuesBySlug(parameters, parameterValues), }); } else if (uuid) { // public links currently apply parameters client-side @@ -180,7 +192,7 @@ export default class PublicQuestion extends Component { const { params: { uuid, token }, } = this.props; - const { card, result, parameterValues } = this.state; + const { card, result, initialized, parameterValues } = this.state; const actionButtons = result && ( ); - const parameters = card && getParametersWithExtras(card); + const parameters = + card && getValueAndFieldIdPopulatedParametersFromCard(card); return ( diff --git a/frontend/src/metabase/pulse/actions.js b/frontend/src/metabase/pulse/actions.js index ccbdbab7d448..d784e75130ae 100644 --- a/frontend/src/metabase/pulse/actions.js +++ b/frontend/src/metabase/pulse/actions.js @@ -7,13 +7,16 @@ import Pulses from "metabase/entities/pulses"; import { getEditingPulse, getPulseFormInput } from "./selectors"; import { setErrorPage } from "metabase/redux/app"; -import { getDefaultChannel, createChannel } from "metabase/lib/pulse"; +import { + getDefaultChannel, + createChannel, + NEW_PULSE_TEMPLATE, +} from "metabase/lib/pulse"; export const SET_EDITING_PULSE = "SET_EDITING_PULSE"; export const UPDATE_EDITING_PULSE = "UPDATE_EDITING_PULSE"; export const SAVE_PULSE = "SAVE_PULSE"; export const SAVE_EDITING_PULSE = "SAVE_EDITING_PULSE"; -export const DELETE_PULSE = "DELETE_PULSE"; export const TEST_PULSE = "TEST_PULSE"; export const FETCH_PULSE_FORM_INPUT = "FETCH_PULSE_FORM_INPUT"; @@ -43,12 +46,9 @@ export const setEditingPulse = createThunkAction(SET_EDITING_PULSE, function( (await PulseApi.form_input()).channels; const defaultChannelSpec = getDefaultChannel(channels); return { - name: null, - cards: [], + ...NEW_PULSE_TEMPLATE, channels: defaultChannelSpec ? [createChannel(defaultChannelSpec)] : [], - skip_if_empty: false, collection_id: initialCollectionId, - parameters: [], }; } }; diff --git a/frontend/src/metabase/pulse/components/PulseEdit.jsx b/frontend/src/metabase/pulse/components/PulseEdit.jsx index dc77ece0360a..64bb0919fe7b 100644 --- a/frontend/src/metabase/pulse/components/PulseEdit.jsx +++ b/frontend/src/metabase/pulse/components/PulseEdit.jsx @@ -146,7 +146,7 @@ export default class PulseEdit extends Component { const link = ( {t`dashboard subscriptions`} ); return ( diff --git a/frontend/src/metabase/pulse/components/PulseEditCards.jsx b/frontend/src/metabase/pulse/components/PulseEditCards.jsx index 6dcac2c3fec5..253b16ea9fb9 100644 --- a/frontend/src/metabase/pulse/components/PulseEditCards.jsx +++ b/frontend/src/metabase/pulse/components/PulseEditCards.jsx @@ -103,7 +103,7 @@ export default class PulseEditCards extends Component { notices.push({ type: "warning", head: t`Heads up`, - body: t`We'll show the first 10 columns and 20 rows of this table in your Pulse. If you email this, we'll add a file attachment with all columns and up to 2,000 rows.`, + body: t`We'll show the first 10 rows of this table in your Pulse. If you email this, we'll add a file attachment with all columns and up to 2,000 rows.`, }); } if (cardPreview.pulse_card_type == null && !hasAttachment) { diff --git a/frontend/src/metabase/query_builder/actions.js b/frontend/src/metabase/query_builder/actions.js index d8d88b8182c8..fbbe84c87931 100644 --- a/frontend/src/metabase/query_builder/actions.js +++ b/frontend/src/metabase/query_builder/actions.js @@ -29,7 +29,12 @@ import Utils from "metabase/lib/utils"; import { defer } from "metabase/lib/promise"; import Question from "metabase-lib/lib/Question"; import { FieldDimension } from "metabase-lib/lib/Dimension"; -import { cardIsEquivalent, cardQueryIsEquivalent } from "metabase/meta/Card"; +import { + cardIsEquivalent, + cardQueryIsEquivalent, + getValueAndFieldIdPopulatedParametersFromCard, +} from "metabase/meta/Card"; +import { getParameterValuesByIdFromQueryParams } from "metabase/meta/Parameter"; import { normalize } from "cljs/metabase.mbql.js"; import { @@ -49,6 +54,7 @@ import { getNativeEditorCursorOffset, getNativeEditorSelectedText, getSnippetCollectionId, + getQueryResults, } from "./selectors"; import { MetabaseApi, CardApi, UserApi } from "metabase/services"; @@ -123,6 +129,19 @@ export const onCloseChartSettings = createAction( "metabase/qb/CLOSE_CHART_SETTINGS", ); export const onOpenChartType = createAction("metabase/qb/OPEN_CHART_TYPE"); +export const onOpenQuestionDetails = createAction( + "metabase/qb/OPEN_QUESTION_DETAILS", +); +export const onCloseQuestionDetails = createAction( + "metabase/qb/CLOSE_QUESTION_DETAILS", +); +export const onOpenQuestionHistory = createAction( + "metabase/qb/OPEN_QUESTION_HISTORY", +); +export const onCloseQuestionHistory = createAction( + "metabase/qb/CLOSE_QUESTION_HISTORY", +); + export const onCloseChartType = createAction("metabase/qb/CLOSE_CHART_TYPE"); export const onCloseSidebars = createAction("metabase/qb/CLOSE_SIDEBARS"); @@ -300,16 +319,12 @@ export const RESET_QB = "metabase/qb/RESET_QB"; export const resetQB = createAction(RESET_QB); export const INITIALIZE_QB = "metabase/qb/INITIALIZE_QB"; -export const initializeQB = (location, params) => { +export const initializeQB = (location, params, queryParams) => { return async (dispatch, getState) => { // do this immediately to ensure old state is cleared before the user sees it dispatch(resetQB()); dispatch(cancelQuery()); - // preload metadata that's used in DataSelector - dispatch(Databases.actions.fetchList({ include: "tables" })); - dispatch(Databases.actions.fetchList({ saved: true })); - const { currentUser } = getState(); const cardId = Urls.extractEntityId(params.slug); @@ -507,12 +522,20 @@ export const initializeQB = (location, params) => { } card = question && question.card(); + const metadata = getMetadata(getState()); + const parameters = getValueAndFieldIdPopulatedParametersFromCard(card); + const parameterValues = getParameterValuesByIdFromQueryParams( + parameters, + queryParams, + metadata, + ); // Update the question to Redux state together with the initial state of UI controls dispatch.action(INITIALIZE_QB, { card, originalCard, uiControls, + parameterValues, }); // if we have loaded up a card that we can run then lets kick that off as well @@ -721,10 +744,26 @@ export const setParameterValue = createAction( }, ); +// refetches the card without triggering a run of the card's query +export const SOFT_RELOAD_CARD = "metabase/qb/SOFT_RELOAD_CARD"; +export const softReloadCard = createThunkAction(SOFT_RELOAD_CARD, () => { + return async (dispatch, getState) => { + const outdatedCard = getCard(getState()); + const action = await dispatch( + Questions.actions.fetch({ id: outdatedCard.id }, { reload: true }), + ); + + return Questions.HACK_getObjectFromAction(action); + }; +}); + export const RELOAD_CARD = "metabase/qb/RELOAD_CARD"; export const reloadCard = createThunkAction(RELOAD_CARD, () => { return async (dispatch, getState) => { - const outdatedCard = getState().qb.card; + const outdatedCard = getCard(getState()); + + dispatch(resetQB()); + const action = await dispatch( Questions.actions.fetch({ id: outdatedCard.id }, { reload: true }), ); @@ -891,6 +930,15 @@ export const updateQuestion = ( } // + // Native query should never be in notebook mode (metabase#12651) + if (getQueryBuilderMode(getState()) !== "view" && newQuestion.isNative()) { + await dispatch( + setQueryBuilderMode("view", { + shouldUpdateUrl: false, + }), + ); + } + // Replace the current question with a new one await dispatch.action(UPDATE_QUESTION, { card: newQuestion.card() }); @@ -1010,7 +1058,6 @@ export const apiUpdateQuestion = question => { // so we want the databases list to be re-fetched next time we hit "New Question" so it shows up dispatch(setRequestUnloaded(["entities", "databases"])); - dispatch(updateUrl(updatedQuestion.card(), { dirty: false })); MetabaseAnalytics.trackEvent( "QueryBuilder", "Update Card", @@ -1104,20 +1151,31 @@ export const QUERY_COMPLETED = "metabase/qb/QUERY_COMPLETED"; export const queryCompleted = (question, queryResults) => { return async (dispatch, getState) => { const [{ data }] = queryResults; + const [{ data: prevData }] = getQueryResults(getState()) || [{}]; const originalQuestion = getOriginalQuestion(getState()); const dirty = !originalQuestion || (originalQuestion && question.isDirtyComparedTo(originalQuestion)); + if (dirty) { + if (question.isNative()) { + question = question.syncColumnsAndSettings( + originalQuestion, + queryResults[0], + ); + } // Only update the display if the question is new or has been changed. // Otherwise, trust that the question was saved with the correct display. question = question // if we are going to trigger autoselection logic, check if the locked display no longer is "sensible". - .syncColumnsAndSettings(originalQuestion, queryResults[0]) - .maybeUnlockDisplay(getSensibleDisplays(data)) + .maybeUnlockDisplay( + getSensibleDisplays(data), + prevData && getSensibleDisplays(prevData), + ) .setDefaultDisplay() .switchTableScalar(data); } + dispatch.action(QUERY_COMPLETED, { card: question.card(), queryResults }); }; }; @@ -1333,3 +1391,14 @@ export const showChartSettings = createAction(SHOW_CHART_SETTINGS); // these are just temporary mappings to appease the existing QB code and it's naming prefs export const onUpdateVisualizationSettings = updateCardVisualizationSettings; export const onReplaceAllVisualizationSettings = replaceAllCardVisualizationSettings; + +export const REVERT_TO_REVISION = "metabase/qb/REVERT_TO_REVISION"; +export const revertToRevision = createThunkAction( + REVERT_TO_REVISION, + revision => { + return async dispatch => { + await revision.revert(); + await dispatch(reloadCard()); + }; + }, +); diff --git a/frontend/src/metabase/query_builder/components/AggregationWidget.jsx b/frontend/src/metabase/query_builder/components/AggregationWidget.jsx index 61216094c968..78bfd52583f0 100644 --- a/frontend/src/metabase/query_builder/components/AggregationWidget.jsx +++ b/frontend/src/metabase/query_builder/components/AggregationWidget.jsx @@ -48,7 +48,6 @@ export default class AggregationWidget extends React.Component { children, className, } = this.props; - console.log("aggregation", aggregation); const popover = this.state.isOpen && ( diff --git a/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx b/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx index 0260e1fbb527..26073b73b3a6 100644 --- a/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx +++ b/frontend/src/metabase/query_builder/components/AlertListPopoverContent.jsx @@ -286,7 +286,6 @@ export class AlertScheduleText extends Component { return `${verbose ? "daily at " : "Daily, "} ${hour} ${amPm}`; } else if (scheduleType === "weekly") { - console.log(schedule); const hourOfDay = schedule.schedule_hour; const day = _.find( DAY_OF_WEEK_OPTIONS, diff --git a/frontend/src/metabase/query_builder/components/AlertModals.jsx b/frontend/src/metabase/query_builder/components/AlertModals.jsx index f3d2e3463200..03d0db7b68af 100644 --- a/frontend/src/metabase/query_builder/components/AlertModals.jsx +++ b/frontend/src/metabase/query_builder/components/AlertModals.jsx @@ -15,12 +15,14 @@ import Icon from "metabase/components/Icon"; import ChannelSetupModal from "metabase/components/ChannelSetupModal"; import ButtonWithStatus from "metabase/components/ButtonWithStatus"; import PulseEditChannels from "metabase/pulse/components/PulseEditChannels"; +import { getErrorMessage } from "metabase/components/form/FormMessage"; +import { AlertModalFooter, AlertModalError } from "./AlertModals.styled"; import User from "metabase/entities/users"; // actions import { createAlert, deleteAlert, updateAlert } from "metabase/alert/alert"; -import { apiUpdateQuestion } from "metabase/query_builder/actions"; +import { apiUpdateQuestion, updateUrl } from "metabase/query_builder/actions"; import { fetchPulseFormInput } from "metabase/pulse/actions"; // selectors @@ -71,7 +73,7 @@ const textStyle = { hasConfiguredAnyChannel: hasConfiguredAnyChannelSelector(state), hasConfiguredEmailChannel: hasConfiguredEmailChannelSelector(state), }), - { createAlert, fetchPulseFormInput, apiUpdateQuestion }, + { createAlert, fetchPulseFormInput, apiUpdateQuestion, updateUrl }, ) export class CreateAlertModalContent extends Component { props: { @@ -87,6 +89,7 @@ export class CreateAlertModalContent extends Component { this.state = { hasSeenEducationalScreen: MetabaseCookies.getHasSeenAlertSplash(), alert: getDefaultAlert(question, user, visualizationSettings), + formError: null, }; } @@ -113,21 +116,28 @@ export class CreateAlertModalContent extends Component { onAlertChange = alert => this.setState({ alert }); onCreateAlert = async () => { - const { createAlert, apiUpdateQuestion, onAlertCreated } = this.props; + const { + question, + createAlert, + apiUpdateQuestion, + updateUrl, + onAlertCreated, + } = this.props; const { alert } = this.state; - // Resave the question here (for persisting the x/y axes; see #6749) - await apiUpdateQuestion(); + try { + this.setState({ formError: null }); - await createAlert(alert); + await apiUpdateQuestion(question); + await createAlert(alert); + await updateUrl(question.card(), { dirty: false }); - // should close be triggered manually like this - // but the creation notification would appear automatically ...? - // OR should the modal visibility be part of QB redux state - // (maybe check how other modals are implemented) - onAlertCreated(); - - MetabaseAnalytics.trackEvent("Alert", "Create", alert.alert_condition); + onAlertCreated(); + MetabaseAnalytics.trackEvent("Alert", "Create", alert.alert_condition); + } catch (e) { + this.setState({ formError: e }); + throw e; + } }; proceedFromEducationalScreen = () => { @@ -146,7 +156,7 @@ export class CreateAlertModalContent extends Component { user, hasLoadedChannelInfo, } = this.props; - const { alert, hasSeenEducationalScreen } = this.state; + const { alert, hasSeenEducationalScreen, formError } = this.state; const channelRequirementsMet = isAdmin ? hasConfiguredAnyChannel @@ -186,14 +196,16 @@ export class CreateAlertModalContent extends Component { alert={alert} onAlertChange={this.onAlertChange} /> -
-
+ + {formError && ( + {getErrorMessage(formError)} + )} -
+
); @@ -290,7 +302,7 @@ export class AlertEducationalScreen extends Component { question: getQuestion(state), visualizationSettings: getVisualizationSettings(state), }), - { apiUpdateQuestion, updateAlert, deleteAlert }, + { apiUpdateQuestion, updateAlert, deleteAlert, updateUrl }, ) export class UpdateAlertModalContent extends Component { props: { @@ -306,26 +318,40 @@ export class UpdateAlertModalContent extends Component { super(); this.state = { modifiedAlert: props.alert, + formError: null, }; } onAlertChange = modifiedAlert => this.setState({ modifiedAlert }); onUpdateAlert = async () => { - const { apiUpdateQuestion, updateAlert, onAlertUpdated } = this.props; + const { + question, + apiUpdateQuestion, + updateAlert, + updateUrl, + onAlertUpdated, + } = this.props; const { modifiedAlert } = this.state; - // Resave the question here (for persisting the x/y axes; see #6749) - await apiUpdateQuestion(); + try { + this.setState({ formError: null }); - await updateAlert(modifiedAlert); - onAlertUpdated(); + await apiUpdateQuestion(); + await updateAlert(modifiedAlert); + await updateUrl(question.card(), { dirty: false }); - MetabaseAnalytics.trackEvent( - "Alert", - "Update", - modifiedAlert.alert_condition, - ); + onAlertUpdated(); + + MetabaseAnalytics.trackEvent( + "Alert", + "Update", + modifiedAlert.alert_condition, + ); + } catch (e) { + this.setState({ formError: e }); + throw e; + } }; onDeleteAlert = async () => { @@ -343,7 +369,7 @@ export class UpdateAlertModalContent extends Component { user, isAdmin, } = this.props; - const { modifiedAlert } = this.state; + const { modifiedAlert, formError } = this.state; const isCurrentUser = alert.creator.id === user.id; const title = isCurrentUser ? t`Edit your alert` : t`Edit alert`; @@ -367,14 +393,16 @@ export class UpdateAlertModalContent extends Component { /> )} -
-
+ + {formError && ( + {getErrorMessage(formError)} + )} -
+
); diff --git a/frontend/src/metabase/query_builder/components/AlertModals.styled.jsx b/frontend/src/metabase/query_builder/components/AlertModals.styled.jsx new file mode 100644 index 000000000000..ed338861bfe6 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/AlertModals.styled.jsx @@ -0,0 +1,14 @@ +import styled from "styled-components"; +import { space } from "metabase/styled-components/theme"; +import { color } from "metabase/lib/colors"; + +export const AlertModalFooter = styled.div` + display: flex; + justify-content: right; + align-items: center; + margin-top: ${space(3)}; +`; + +export const AlertModalError = styled.div` + color: ${color("error")}; +`; diff --git a/frontend/src/metabase/query_builder/components/ClampedDescription.jsx b/frontend/src/metabase/query_builder/components/ClampedDescription.jsx new file mode 100644 index 000000000000..14f2b4ee440a --- /dev/null +++ b/frontend/src/metabase/query_builder/components/ClampedDescription.jsx @@ -0,0 +1,28 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import ClampedText from "metabase/components/ClampedText"; +import { TextButton } from "metabase/components/Button.styled"; + +ClampedDescription.propTypes = { + className: PropTypes.string, + description: PropTypes.string, + onEdit: PropTypes.func, +}; + +export function ClampedDescription({ className, description, onEdit }) { + if (!description && !onEdit) { + return null; + } + + return ( +
+ {description ? ( + + ) : ( + {t`Add a description`} + )} +
+ ); +} diff --git a/frontend/src/metabase/query_builder/components/DataSelector.css b/frontend/src/metabase/query_builder/components/DataSelector.css new file mode 100644 index 000000000000..5daae5f85c36 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/DataSelector.css @@ -0,0 +1,4 @@ +/* HACK: DataPopover should be below the search box */ +.DataPopoverContainer { + z-index: 2; +} diff --git a/frontend/src/metabase/query_builder/components/DataSelector.jsx b/frontend/src/metabase/query_builder/components/DataSelector.jsx index aad7f9ebdaea..c244e3876890 100644 --- a/frontend/src/metabase/query_builder/components/DataSelector.jsx +++ b/frontend/src/metabase/query_builder/components/DataSelector.jsx @@ -6,6 +6,8 @@ import { t } from "ttag"; import cx from "classnames"; import _ from "underscore"; +import { SAVED_QUESTIONS_VIRTUAL_DB_ID } from "metabase/lib/constants"; + import ListSearchField from "metabase/components/ListSearchField"; import ExternalLink from "metabase/components/ExternalLink"; import Icon from "metabase/components/Icon"; @@ -28,6 +30,8 @@ import SavedQuestionPicker from "./saved-question-picker/SavedQuestionPicker"; import { getMetadata } from "metabase/selectors/metadata"; import { getSchemaName } from "metabase/schema"; +import "./DataSelector.css"; + const MIN_SEARCH_LENGTH = 2; // chooses a database @@ -186,6 +190,7 @@ export class UnconnectedDataSelector extends Component { tableFilter: PropTypes.func, hasTableSearch: PropTypes.bool, canChangeDatabase: PropTypes.bool, + containerClassName: PropTypes.string, }; static defaultProps = { @@ -356,7 +361,8 @@ export class UnconnectedDataSelector extends Component { selectedDatabase && selectedSchema && selectedSchema.database.id !== selectedDatabase.id && - !selectedSchema.database.is_saved_questions; + selectedSchema.database.id !== SAVED_QUESTIONS_VIRTUAL_DB_ID; + const invalidTable = selectedSchema && selectedTable && @@ -461,7 +467,7 @@ export class UnconnectedDataSelector extends Component { const nextStep = this.getNextStep(); if (!nextStep) { await this.setStateWithComputedState(stateChange); - this.popover.current.toggle(); + this.popover.current && this.popover.current.toggle(); } else { await this.switchToStep(nextStep, stateChange, skipSteps); } @@ -665,7 +671,7 @@ export class UnconnectedDataSelector extends Component { }); renderActiveStep() { - const { combineDatabaseSchemaSteps, hasTableSearch } = this.props; + const { combineDatabaseSchemaSteps } = this.props; const props = { ...this.state, @@ -678,7 +684,7 @@ export class UnconnectedDataSelector extends Component { isLoading: this.state.isLoading, hasNextStep: !!this.getNextStep(), onBack: this.getPreviousStep() ? this.previousStep : null, - hasFiltering: !hasTableSearch, + hasFiltering: true, }; switch (this.state.activeStep) { @@ -767,6 +773,7 @@ export class UnconnectedDataSelector extends Component { autoWidth ref={this.popover} isInitiallyOpen={this.props.isInitiallyOpen} + containerClassName={this.props.containerClassName} triggerElement={this.getTriggerElement()} triggerClasses={this.getTriggerClasses()} horizontalAttachments={["center", "left", "right"]} @@ -964,6 +971,7 @@ const TablePicker = ({ onBack, isLoading, hasFiltering, + minTablesToShowSearch = 10, }) => { // In case DataSelector props get reseted if (!selectedDatabase) { @@ -1006,7 +1014,10 @@ const TablePicker = ({ }, ]; return ( -
+
= minTablesToShowSearch} onChange={item => onChangeTable(item.table)} itemIsSelected={item => item.table && selectedTable diff --git a/frontend/src/metabase/query_builder/components/DimensionList.jsx b/frontend/src/metabase/query_builder/components/DimensionList.jsx index 4031e084b6e2..e1559097fbe1 100644 --- a/frontend/src/metabase/query_builder/components/DimensionList.jsx +++ b/frontend/src/metabase/query_builder/components/DimensionList.jsx @@ -108,7 +108,11 @@ export default class DimensionList extends Component { onRemoveDimension, } = this.props; const subDimensions = - enableSubDimensions && item.dimension && item.dimension.dimensions(); + enableSubDimensions && + item.dimension && + // Do not display sub dimension if this is an FK (metabase#16787) + !item.dimension.field().isFK() && + item.dimension.dimensions(); const multiSelect = !!(onAddDimension || onRemoveDimension); @@ -142,7 +146,9 @@ export default class DimensionList extends Component { dimension={sectionDimension} dimensions={subDimensions} onChangeDimension={dimension => { - this.props.onChangeDimension(dimension); + this.props.onChangeDimension(dimension, { + isSubDimension: true, + }); onClose(); }} /> diff --git a/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx b/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx index e0cd914dde91..cd76c8eb21a3 100644 --- a/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx +++ b/frontend/src/metabase/query_builder/components/NativeQueryEditor.jsx @@ -648,43 +648,45 @@ export default class NativeQueryEditor extends Component { closeModal={this.props.closeSnippetModal} /> )} -
- - - {showSnippetSidebarButton && ( - + - )} - cancelQuery()} - compact - className="mx2 mb2 mt-auto" - style={{ width: 40, height: 40 }} - getTooltip={() => - (this.props.nativeEditorSelectedText - ? t`Run selected text` - : t`Run query`) + - " " + - (isMac() ? t`(⌘ + enter)` : t`(Ctrl + enter)`) - } - /> -
+ + {showSnippetSidebarButton && ( + + )} + cancelQuery()} + compact + className="mx2 mb2 mt-auto" + style={{ width: 40, height: 40 }} + getTooltip={() => + (this.props.nativeEditorSelectedText + ? t`Run selected text` + : t`Run query`) + + " " + + (isMac() ? t`(⌘ + enter)` : t`(Ctrl + enter)`) + } + /> +
+ )}
); diff --git a/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx b/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx index 166e13007e70..1591241637ce 100644 --- a/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx +++ b/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx @@ -29,6 +29,7 @@ const QueryDownloadWidget = ({ dashcardId, icon, params, + visualizationSettings, }) => (

{t`Download full results`}

@@ -54,7 +55,7 @@ const QueryDownloadWidget = ({ )} {EXPORT_FORMATS.map(type => ( - + {dashcardId && token ? ( ) : null} @@ -95,10 +96,17 @@ const QueryDownloadWidget = ({
); -const UnsavedQueryButton = ({ type, result: { json_query = {} }, card }) => ( +const UnsavedQueryButton = ({ + type, + result: { json_query = {} }, + visualizationSettings, +}) => ( {type} diff --git a/frontend/src/metabase/query_builder/components/QueryModals.jsx b/frontend/src/metabase/query_builder/components/QueryModals.jsx index 3ac806057100..c78583863e05 100644 --- a/frontend/src/metabase/query_builder/components/QueryModals.jsx +++ b/frontend/src/metabase/query_builder/components/QueryModals.jsx @@ -4,6 +4,8 @@ import React from "react"; import { t } from "ttag"; import _ from "underscore"; +import { MODAL_TYPES } from "metabase/query_builder/constants"; + import Modal from "metabase/components/Modal"; import SaveQuestionModal from "metabase/containers/SaveQuestionModal"; @@ -34,13 +36,13 @@ export default class QueryModals extends React.Component { onCloseModal(); } else { // HACK: in a timeout because save modal closes itself - setTimeout(() => onOpenModal("create-alert")); + setTimeout(() => onOpenModal(MODAL_TYPES.CREATE_ALERT)); } }; render() { const { modal, question, onCloseModal, onOpenModal } = this.props; - return modal === "save" ? ( + return modal === MODAL_TYPES.SAVE ? ( { await this.props.onCreate(card); - onOpenModal("saved"); + onOpenModal(MODAL_TYPES.SAVED); }} onClose={onCloseModal} /> - ) : modal === "saved" ? ( + ) : modal === MODAL_TYPES.SAVED ? ( { - onOpenModal("add-to-dashboard"); + onOpenModal(MODAL_TYPES.ADD_TO_DASHBOARD); }} /> - ) : modal === "add-to-dashboard-save" ? ( + ) : modal === MODAL_TYPES.ADD_TO_DASHBOARD_SAVE ? ( { await this.props.onSave(card); - onOpenModal("add-to-dashboard"); + onOpenModal(MODAL_TYPES.ADD_TO_DASHBOARD); }} onCreate={async card => { await this.props.onCreate(card); - onOpenModal("add-to-dashboard"); + onOpenModal(MODAL_TYPES.ADD_TO_DASHBOARD); }} onClose={onCloseModal} multiStep /> - ) : modal === "add-to-dashboard" ? ( + ) : modal === MODAL_TYPES.ADD_TO_DASHBOARD ? ( - ) : modal === "create-alert" ? ( + ) : modal === MODAL_TYPES.CREATE_ALERT ? ( - ) : modal === "save-question-before-alert" ? ( + ) : modal === MODAL_TYPES.SAVE_QUESTION_BEFORE_ALERT ? ( - ) : modal === "save-question-before-embed" ? ( + ) : modal === MODAL_TYPES.SAVE_QUESTION_BEFORE_EMBED ? ( { await this.props.onSave(card, false); - onOpenModal("embed"); + onOpenModal(MODAL_TYPES.EMBED); }} onCreate={async card => { await this.props.onCreate(card, false); - onOpenModal("embed"); + onOpenModal(MODAL_TYPES.EMBED); }} onClose={onCloseModal} multiStep initialCollectionId={this.props.initialCollectionId} /> - ) : modal === "history" ? ( + ) : modal === MODAL_TYPES.HISTORY ? ( - ) : modal === "move" ? ( + ) : modal === MODAL_TYPES.MOVE ? ( - ) : modal === "archive" ? ( + ) : modal === MODAL_TYPES.ARCHIVE ? ( - ) : modal === "edit" ? ( + ) : modal === MODAL_TYPES.EDIT ? ( this.props.onSave(card, false)} /> - ) : modal === "embed" ? ( + ) : modal === MODAL_TYPES.EMBED ? ( - ) : modal === "clone" ? ( + ) : modal === MODAL_TYPES.CLONE ? ( onOpenModal("saved")} + onSaved={() => onOpenModal(MODAL_TYPES.SAVED)} /> ) : null; diff --git a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx index 86d25c73b28e..bc1df7c8bec4 100644 --- a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx +++ b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx @@ -126,6 +126,7 @@ export default class QueryVisualization extends Component { diff --git a/frontend/src/metabase/query_builder/components/QuestionActionButtons.jsx b/frontend/src/metabase/query_builder/components/QuestionActionButtons.jsx new file mode 100644 index 000000000000..33d7651419b1 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/QuestionActionButtons.jsx @@ -0,0 +1,84 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { t } from "ttag"; + +import { MODAL_TYPES } from "metabase/query_builder/constants"; + +import Button from "metabase/components/Button"; +import Tooltip from "metabase/components/Tooltip"; +import { Container } from "./QuestionActionButtons.styled"; + +export const EDIT_TESTID = "edit-details-button"; +export const ADD_TO_DASH_TESTID = "add-to-dashboard-button"; +export const MOVE_TESTID = "move-button"; +export const CLONE_TESTID = "clone-button"; +export const ARCHIVE_TESTID = "archive-button"; + +const ICON_SIZE = 18; + +QuestionActionButtons.propTypes = { + canWrite: PropTypes.bool.isRequired, + onOpenModal: PropTypes.func.isRequired, +}; + +export default QuestionActionButtons; + +function QuestionActionButtons({ canWrite, onOpenModal }) { + return ( + + {canWrite && ( + + )} diff --git a/frontend/src/metabase/query_builder/components/notebook/NotebookCell.jsx b/frontend/src/metabase/query_builder/components/notebook/NotebookCell.jsx index c006ae0f534a..642bd6cc248b 100644 --- a/frontend/src/metabase/query_builder/components/notebook/NotebookCell.jsx +++ b/frontend/src/metabase/query_builder/components/notebook/NotebookCell.jsx @@ -2,7 +2,7 @@ import React from "react"; import { Flex } from "grid-styled"; -import styled from "styled-components"; +import styled, { css } from "styled-components"; import Icon from "metabase/components/Icon"; @@ -14,61 +14,114 @@ export const NotebookCell = styled(Flex).attrs({ })` border-radius: 8px; background-color: ${props => alpha(props.color, 0.1)}; + padding: ${props => props.padding || "14px"}; `; -NotebookCell.defaultProps = { - px: 2, - pt: 2, - pb: 1, -}; - NotebookCell.displayName = "NotebookCell"; -export const NotebookCellItem = styled(Flex).attrs({ - align: "center", - children: ({ icon, children }) => [ - icon && , - children, - ], -})` +const NotebookCellItemContainer = styled(Flex).attrs({ align: "center" })` font-weight: bold; - border: 2px solid transparent; - border-radius: 6px; color: ${props => (props.inactive ? props.color : "white")}; - background-color: ${props => (props.inactive ? "transparent" : props.color)}; + border-radius: 6px; + margin-right: 4px; + + border: 2px solid transparent; border-color: ${props => props.inactive ? alpha(props.color, 0.25) : "transparent"}; + &:hover { - background-color: ${props => !props.inactive && alpha(props.color, 0.8)}; border-color: ${props => props.inactive && alpha(props.color, 0.8)}; } - transition: background 300ms linear, border 300ms linear; - > .Icon { + + transition: border 300ms linear; + + .Icon-close { opacity: 0.6; } `; -NotebookCellItem.defaultProps = { - p: 1, - mr: 1, - mb: 1, -}; +const CONTAINER_PADDING = "10px"; + +const NotebookCellItemContentContainer = styled.div` + display: flex; + align-items: center; + padding: ${CONTAINER_PADDING}; + background-color: ${props => (props.inactive ? "transparent" : props.color)}; + + &:hover { + background-color: ${props => !props.inactive && alpha(props.color, 0.8)}; + } + + ${props => + !!props.border && + css` + border-${props.border}: 1px solid ${alpha("white", 0.25)}; + `} + + ${props => + props.roundedCorners.includes("left") && + css` + border-top-left-radius: 6px; + border-bottom-left-radius: 6px; + `} + + ${props => + props.roundedCorners.includes("right") && + css` + border-top-right-radius: 6px; + border-bottom-right-radius: 6px; + `} + + transition: background 300ms linear; +`; + +export function NotebookCellItem({ + inactive, + color, + containerStyle, + right, + rightContainerStyle, + children, + ...props +}) { + const hasRightSide = React.isValidElement(right); + const mainContentRoundedCorners = ["left"]; + if (!hasRightSide) { + mainContentRoundedCorners.push("right"); + } + return ( + + + {children} + + {hasRightSide && ( + + {right} + + )} + + ); +} NotebookCellItem.displayName = "NotebookCellItem"; +NotebookCell.CONTAINER_PADDING = CONTAINER_PADDING; export const NotebookCellAdd = styled(NotebookCellItem).attrs({ inactive: ({ initialAddText }) => initialAddText, // eslint-disable-next-line react/display-name children: ({ initialAddText }) => initialAddText || , -})` - > .Icon { - opacity: 1; - } -`; - -NotebookCellAdd.defaultProps = { - mb: 1, -}; +})``; NotebookCellAdd.displayName = "NotebookCellAdd"; diff --git a/frontend/src/metabase/query_builder/components/notebook/NotebookStep.jsx b/frontend/src/metabase/query_builder/components/notebook/NotebookStep.jsx index 321143315582..8b3fd9af1490 100644 --- a/frontend/src/metabase/query_builder/components/notebook/NotebookStep.jsx +++ b/frontend/src/metabase/query_builder/components/notebook/NotebookStep.jsx @@ -4,6 +4,8 @@ import React from "react"; import { t } from "ttag"; import _ from "underscore"; +import styled from "styled-components"; + import { color as c, lighten, darken } from "metabase/lib/colors"; import Tooltip from "metabase/components/Tooltip"; @@ -88,6 +90,11 @@ const STEP_UI = { }, }; +function getTestId(step) { + const { type, stageIndex, itemIndex } = step; + return `step-${type}-${stageIndex || 0}-${itemIndex || 0}`; +} + const CONTENT_WIDTH = [11 / 12, 8 / 12]; export default class NotebookStep extends React.Component { @@ -135,35 +142,29 @@ export default class NotebookStep extends React.Component { actions.sort((a, b) => (b.priority || 0) - (a.priority || 0)); const actionButtons = actions.map(action => action.button); - let onRemove = null; - if (step.revert) { - const reverted = step.revert(step.query).clean(); - if (reverted.isValid()) { - onRemove = () => reverted.update(updateQuery); - } - } - return ( - - {(title || onRemove) && ( - - {title} - {onRemove && ( - - )} - - )} + + + {title} + step.revert(step.query).update(updateQuery)} + data-testid="remove-step" + /> + {NotebookStepComponent && ( @@ -198,16 +199,20 @@ export default class NotebookStep extends React.Component { /> )} - {actionButtons.length > 0 && {actionButtons}} + {actionButtons.length > 0 && ( + + {actionButtons} + + )} ); } } -const ColorButton = Button.extend` +const ColorButton = styled(Button)` border: none; - color: ${({ color }) => (color ? color : c("text-medium"))} + color: ${({ color }) => (color ? color : c("text-medium"))}; background-color: ${({ color }) => (color ? lighten(color, 0.61) : null)}; &:hover { color: ${({ color }) => (color ? darken(color, 0.115) : color("brand"))}; diff --git a/frontend/src/metabase/query_builder/components/notebook/lib/steps.js b/frontend/src/metabase/query_builder/components/notebook/lib/steps.js index df1b19e7f4ab..86ff21204c35 100644 --- a/frontend/src/metabase/query_builder/components/notebook/lib/steps.js +++ b/frontend/src/metabase/query_builder/components/notebook/lib/steps.js @@ -110,7 +110,14 @@ const STEPS: StepDefinition[] = [ revert: (query, index) => query.removeJoin(index), clean: (query, index) => { const join = query.joins()[index]; - return !join || join.isValid() ? query : query.removeJoin(index); + if (!join || join.isValid() || join.hasGaps()) { + return query; + } + const cleanJoin = join.clean(); + if (cleanJoin.isValid()) { + return query.updateJoin(index, cleanJoin); + } + return query.removeJoin(index); }, }, { diff --git a/frontend/src/metabase/query_builder/components/notebook/steps/DataStep.jsx b/frontend/src/metabase/query_builder/components/notebook/steps/DataStep.jsx index 1138332df022..314c963a9d33 100644 --- a/frontend/src/metabase/query_builder/components/notebook/steps/DataStep.jsx +++ b/frontend/src/metabase/query_builder/components/notebook/steps/DataStep.jsx @@ -4,49 +4,57 @@ import { connect } from "react-redux"; import { t } from "ttag"; import { DatabaseSchemaAndTableDataSelector } from "metabase/query_builder/components/DataSelector"; -import { NotebookCell, NotebookCellItem } from "../NotebookCell"; - import { getDatabasesList } from "metabase/query_builder/selectors"; -function DataStep({ color, query, databases, updateQuery }) { +import { NotebookCell, NotebookCellItem } from "../NotebookCell"; +import { + FieldsPickerIcon, + FieldPickerContentContainer, + FIELDS_PICKER_STYLES, +} from "../FieldsPickerIcon"; +import FieldsPicker from "./FieldsPicker"; + +function DataStep({ color, query, updateQuery }) { const table = query.table(); + const canSelectTableColumns = table && query.isRaw(); return ( - - query - .setTableId(tableId) - .setDefaultQuery() - .update(updateQuery) - } - isInitiallyOpen={!query.tableId()} - triggerElement={ - !query.tableId() ? ( - - {t`Pick your starting data`} - - ) : ( - - {table && table.displayName()} - + ) } - /> - {table && query.isRaw() && ( - + + query + .setTableId(tableId) + .setDefaultQuery() + .update(updateQuery) + } + isInitiallyOpen={!query.tableId()} + triggerElement={ + + {table ? table.displayName() : t`Pick your starting data`} + + } /> - )} + ); } @@ -55,16 +63,14 @@ export default connect(state => ({ databases: getDatabasesList(state) }))( DataStep, ); -import FieldsPicker from "./FieldsPicker"; - -const DataFieldsPicker = ({ className, query, updateQuery }) => { +const DataFieldsPicker = ({ query, updateQuery, ...props }) => { const dimensions = query.tableDimensions(); const selectedDimensions = query.columnDimensions(); const selected = new Set(selectedDimensions.map(d => d.key())); const fields = query.fields(); return ( d.key())); return (
    {(onSelectAll || onSelectNone) && ( -
  • { - if (isAll) { - onSelectNone(); - } else { - onSelectAll(); - } - }} - > +
  • { + if (isAll) { + onSelectNone(); + } else { + onSelectAll(); + } + }} className="mr1" /> - {isAll && onSelectNone ? t`Select None` : t`Select All`}
  • )} {dimensions.map(dimension => ( -
  • { - onToggleDimension(dimension, !selected.has(dimension.key())); - }} - > - - {dimension.displayName()} +
  • + { + onToggleDimension(dimension, !selected.has(dimension.key())); + }} + className="mr1" + />
  • ))}
diff --git a/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.jsx b/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.jsx index f8175ed5b4bd..caaa3ddcaa17 100644 --- a/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.jsx +++ b/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.jsx @@ -1,23 +1,71 @@ -/* eslint-disable react/prop-types */ -import React from "react"; - -import { Flex } from "grid-styled"; -import cx from "classnames"; +import React, { useRef } from "react"; +import PropTypes from "prop-types"; import _ from "underscore"; import { t } from "ttag"; +import PopoverWithTrigger from "metabase/components/PopoverWithTrigger"; + +import { DatabaseSchemaAndTableDataSelector } from "metabase/query_builder/components/DataSelector"; +import FieldList from "metabase/query_builder/components/FieldList"; +import Join from "metabase-lib/lib/queries/structured/Join"; +import { isDateTimeField } from "metabase/lib/query/field_ref"; + import { NotebookCell, NotebookCellItem, NotebookCellAdd, } from "../NotebookCell"; +import { + FieldsPickerIcon, + FieldPickerContentContainer, + FIELDS_PICKER_STYLES, +} from "../FieldsPickerIcon"; +import FieldsPicker from "./FieldsPicker"; +import { + DimensionContainer, + DimensionSourceName, + JoinStepRoot, + JoinClausesContainer, + JoinClauseRoot, + JoinClauseContainer, + JoinStrategyIcon, + JoinTypeSelectRoot, + JoinTypeOptionRoot, + JoinTypeIcon, + JoinDimensionControlsContainer, + JoinWhereConditionLabelContainer, + JoinWhereConditionLabel, + JoinConditionLabel, + RemoveDimensionIcon, + RemoveJoinIcon, + Row, +} from "./JoinStep.styled"; -import Icon from "metabase/components/Icon"; -import PopoverWithTrigger from "metabase/components/PopoverWithTrigger"; +const stepShape = { + id: PropTypes.string.isRequired, + type: PropTypes.string.isRequired, + query: PropTypes.object.isRequired, + previewQuery: PropTypes.object, + valid: PropTypes.bool.isRequired, + visible: PropTypes.bool.isRequired, + stageIndex: PropTypes.number.isRequired, + itemIndex: PropTypes.number.isRequired, + update: PropTypes.func.isRequired, + revert: PropTypes.func.isRequired, + clean: PropTypes.func.isRequired, + actions: PropTypes.array.isRequired, -import { DatabaseSchemaAndTableDataSelector } from "metabase/query_builder/components/DataSelector"; -import FieldList from "metabase/query_builder/components/FieldList"; -import Join from "metabase-lib/lib/queries/structured/Join"; + previous: stepShape, + next: stepShape, +}; + +const joinStepPropTypes = { + query: PropTypes.object.isRequired, + step: PropTypes.shape(stepShape).isRequired, + color: PropTypes.string, + isLastOpened: PropTypes.bool, + updateQuery: PropTypes.func.isRequired, +}; export default function JoinStep({ color, @@ -25,7 +73,6 @@ export default function JoinStep({ step, updateQuery, isLastOpened, - ...props }) { const isSingleJoinStep = step.itemIndex != null; let joins = query.joins(); @@ -37,190 +84,423 @@ export default function JoinStep({ joins = [new Join({ fields: "all" }, query.joins().length, query)]; } const valid = _.all(joins, join => join.isValid()); + + function addNewJoinClause() { + query.join(new Join({ fields: "all" })).update(updateQuery); + } + return ( - - - {joins.map((join, index) => ( - 1} - updateQuery={updateQuery} - isLastOpened={isLastOpened && index === join.length - 1} - /> - ))} - + + + {joins.map((join, index) => { + const isLast = index === joins.length - 1; + return ( + + 1} + updateQuery={updateQuery} + isLastOpened={isLastOpened && isLast} + /> + + ); + })} + {!isSingleJoinStep && valid && ( { - query.join(new Join({ fields: "all" })).update(updateQuery); - }} + onClick={addNewJoinClause} /> )} - + ); } -class JoinClause extends React.Component { - render() { - const { color, join, updateQuery, showRemove, ...props } = this.props; - const query = join.query(); - if (!query) { - return null; +JoinStep.propTypes = joinStepPropTypes; + +const joinClausePropTypes = { + color: PropTypes.string, + join: PropTypes.object, + updateQuery: PropTypes.func, + showRemove: PropTypes.bool, +}; + +function JoinClause({ color, join, updateQuery, showRemove }) { + const joinDimensionPickersRef = useRef([]); + const parentDimensionPickersRef = useRef([]); + + const query = join.query(); + if (!query) { + return null; + } + + const parentDimensions = join.parentDimensions(); + const parentDimensionOptions = join.parentDimensionOptions(); + const joinDimensions = join.joinDimensions(); + const joinDimensionOptions = join.joinDimensionOptions(); + + const joinedTable = join.joinedTable(); + + const joinConditions = join.getConditions(); + const displayConditions = joinConditions.length > 0 ? joinConditions : [[]]; + + const hasAtLeastOneDimensionSelected = join.getDimensions().length > 0; + + let lhsTable; + if (join.index() === 0) { + // first join's lhs is always the parent table + lhsTable = join.parentTable(); + } else if (join.parentDimensions().length > 0) { + // subsequent can be one of the previously joined tables + // NOTE: `lhsDimension` would probably be a better name for `parentDimension` + lhsTable = join.parentDimensions()[0]?.field().table; + } + + function onSourceTableSet(newJoin) { + if (!newJoin.parentDimensions().length) { + setTimeout(() => { + parentDimensionPickersRef.current[0]?.open(); + }); } + } - let lhsTable; - if (join.index() === 0) { - // first join's lhs is always the parent table - lhsTable = join.parentTable(); - } else if (join.parentDimension()) { - // subsequent can be one of the previously joined tables - // NOTE: `lhsDimension` would probably be a better name for `parentDimension` - lhsTable = join.parentDimension().field().table; + function onParentDimensionChange(index, fieldRef, { overwrite } = {}) { + join + .setParentDimension({ + index, + dimension: fieldRef, + overwriteTemporalUnit: overwrite, + }) + .setDefaultAlias() + .parent() + .update(updateQuery); + if (!join.joinDimensions()[index]) { + joinDimensionPickersRef.current[index]?.open(); } + } - const joinedTable = join.joinedTable(); - const strategyOption = join.strategyOption(); - return ( - - - {(lhsTable && lhsTable.displayName()) || `Previous results`} + function onJoinDimensionChange(index, fieldRef, { overwrite } = {}) { + join + .setJoinDimension({ + index, + dimension: fieldRef, + overwriteTemporalUnit: overwrite, + }) + .parent() + .update(updateQuery); + } + + function addNewDimensionsPair(index) { + join + .addEmptyDimensionsPair() + .parent() + .update(updateQuery); + + // Need to wait, so a new dimensions pair renders + // and a corresponding ref is created, so we can reference it here + setTimeout(() => { + parentDimensionPickersRef.current[index]?.open(); + }); + } + + function removeJoin() { + join.remove().update(updateQuery); + } + + return ( + + + + {lhsTable?.displayName() || t`Previous results`} - - ) : ( - - {`Choose a join type`} - - ) - } - > - {({ onClose }) => ( - { + + + + + + {joinedTable && ( + + + + + + {displayConditions.map((condition, index) => { + const isFirst = index === 0; + const isLast = index === displayConditions.length - 1; + + function removeParentDimension() { join - .setStrategy(strategy) + .setParentDimension({ index, dimension: null }) .parent() .update(updateQuery); - onClose(); - }} - options={join.strategyOptions()} - /> - )} -
- - d)} - tableFilter={table => table.db_id === query.database().id} - selectedDatabaseId={query.databaseId()} - selectedTableId={join.joinSourceTableId()} - setSourceTableFn={tableId => { - const newJoin = join - .setJoinSourceTableId(tableId) - .setDefaultCondition() - .setDefaultAlias(); - newJoin.parent().update(updateQuery); - // _parentDimensionPicker won't be rendered until next update - if (!newJoin.parentDimension()) { - setTimeout(() => { - this._parentDimensionPicker.open(); - }); - } - }} - isInitiallyOpen={join.joinSourceTableId() == null} - triggerElement={ - - {joinedTable ? joinedTable.displayName() : t`Pick a table...`} - - } - /> + } - {joinedTable && ( - - where - - { + function removeJoinDimension() { join - .setParentDimension(fieldRef) - .setDefaultAlias() + .setJoinDimension({ index, dimension: null }) .parent() .update(updateQuery); - if (!join.joinDimension()) { - this._joinDimensionPicker.open(); - } - }} - ref={ref => (this._parentDimensionPicker = ref)} - /> - - = - - { + } + + function removeDimensionPair() { join - .setJoinDimension(fieldRef) + .removeCondition(index) .parent() .update(updateQuery); - }} - ref={ref => (this._joinDimensionPicker = ref)} - /> - - )} + } + + return ( + + + + onParentDimensionChange(index, fieldRef, opts) + } + onRemove={removeParentDimension} + ref={ref => + (parentDimensionPickersRef.current[index] = ref) + } + data-testid="parent-dimension" + /> + = + + + + onJoinDimensionChange(index, fieldRef, opts) + } + onRemove={removeJoinDimension} + ref={ref => + (joinDimensionPickersRef.current[index] = ref) + } + data-testid="join-dimension" + /> + {isLast ? ( + + addNewDimensionsPair(index + 1) + } + onRemoveDimensionPair={removeDimensionPair} + /> + ) : ( + {t`and`} + )} + + + ); + })} + + + )} + + {showRemove && } + + ); +} + +JoinClause.propTypes = joinClausePropTypes; + +const joinDimensionsRightControlPropTypes = { + isValidJoin: PropTypes.bool.isRequired, + isFirst: PropTypes.bool.isRequired, + color: PropTypes.string, + onAddNewDimensionPair: PropTypes.func.isRequired, + onRemoveDimensionPair: PropTypes.func.isRequired, +}; + +function JoinDimensionsRightControl({ + isValidJoin, + isFirst, + color, + onAddNewDimensionPair, + onRemoveDimensionPair, +}) { + if (isValidJoin) { + return ( + + ); + } + if (!isFirst) { + return ( + + ); + } + return null; +} - {join.isValid() && ( +JoinDimensionsRightControl.propTypes = joinDimensionsRightControlPropTypes; + +const joinTablePickerPropTypes = { + join: PropTypes.object, + query: PropTypes.object, + joinedTable: PropTypes.object, + color: PropTypes.string, + updateQuery: PropTypes.func, + onSourceTableSet: PropTypes.func.isRequired, +}; + +function JoinTablePicker({ + join, + query, + joinedTable, + color, + updateQuery, + onSourceTableSet, +}) { + const databases = [ + query.database(), + query.database().savedQuestionsDatabase(), + ].filter(Boolean); + + function onChange(tableId) { + const newJoin = join + .setJoinSourceTableId(tableId) + .setDefaultCondition() + .setDefaultAlias(); + newJoin.parent().update(updateQuery); + onSourceTableSet(newJoin); + } + + return ( + - )} + ) + } + containerStyle={FIELDS_PICKER_STYLES.notebookItemContainer} + rightContainerStyle={FIELDS_PICKER_STYLES.notebookRightItemContainer} + > + table.db_id === query.database().id} + selectedDatabaseId={query.databaseId()} + selectedTableId={join.joinSourceTableId()} + setSourceTableFn={onChange} + isInitiallyOpen={join.joinSourceTableId() == null} + triggerElement={ + + {joinedTable ? joinedTable.displayName() : t`Pick a table...`} + + } + /> + + ); +} - {showRemove && ( - join.remove().update(updateQuery)} - /> - )} - - ); +JoinTablePicker.propTypes = joinTablePickerPropTypes; + +const joinTypePickerPropTypes = { + join: PropTypes.object, + color: PropTypes.string, + updateQuery: PropTypes.func, +}; + +function JoinTypePicker({ join, color, updateQuery }) { + const strategyOption = join.strategyOption(); + + function onChange(strategy) { + join + .setStrategy(strategy) + .parent() + .update(updateQuery); } + + return ( + + ) : ( + + {`Choose a join type`} + + ) + } + > + {({ onClose }) => ( + { + onChange(strategy); + onClose(); + }} + options={join.strategyOptions()} + /> + )} + + ); } +JoinTypePicker.propTypes = joinTypePickerPropTypes; + +const joinStrategyOptionShape = { + name: PropTypes.string.isRequired, + value: PropTypes.string.isRequired, + icon: PropTypes.string.isRequired, +}; + +const joinTypeSelectPropTypes = { + value: PropTypes.string, + onChange: PropTypes.func.isRequired, + options: PropTypes.arrayOf(PropTypes.shape(joinStrategyOptionShape)) + .isRequired, +}; + function JoinTypeSelect({ value, onChange, options }) { return ( -
+ {options.map(option => ( ))} -
+ ); } +JoinTypeSelect.propTypes = joinTypeSelectPropTypes; + +const joinTypeOptionPropTypes = { + ...joinStrategyOptionShape, + selected: PropTypes.bool.isRequired, + onChange: PropTypes.func.isRequired, +}; + function JoinTypeOption({ name, value, icon, selected, onChange }) { return ( - onChange(value)} - > - + onChange(value)}> + {name} - + ); } +JoinTypeOption.propTypes = joinTypeOptionPropTypes; + +const joinDimensionCellItemPropTypes = { + dimension: PropTypes.object, + onRemove: PropTypes.func.isRequired, + color: PropTypes.string, + testID: PropTypes.string, +}; + +function getDimensionSourceName(dimension) { + return dimension.field()?.table?.display_name || t`Previous results`; +} + +function getDimensionDisplayName(dimension) { + if (!dimension) { + return t`Pick a column...`; + } + if (dimension.temporalUnit()) { + return `${dimension.displayName()}: ${dimension.subDisplayName()}`; + } + return dimension.displayName(); +} + +function JoinDimensionCellItem({ dimension, color, testID, onRemove }) { + return ( + + +
+ {dimension && ( + + {getDimensionSourceName(dimension)} + + )} + {getDimensionDisplayName(dimension)} +
+ {dimension && } +
+
+ ); +} + +JoinDimensionCellItem.propTypes = joinDimensionCellItemPropTypes; + +const joinDimensionPickerPropTypes = { + dimension: PropTypes.object, + onChange: PropTypes.func.isRequired, + onRemove: PropTypes.func.isRequired, + options: PropTypes.shape({ + count: PropTypes.number.isRequired, + fks: PropTypes.array.isRequired, + dimensions: PropTypes.arrayOf(PropTypes.object).isRequired, + }).isRequired, + query: PropTypes.object.isRequired, + color: PropTypes.string, + "data-testid": PropTypes.string, +}; + class JoinDimensionPicker extends React.Component { open() { this._popover.open(); } + render() { - const { dimension, onChange, options, query, color } = this.props; + const { dimension, onChange, onRemove, options, query, color } = this.props; + const testID = this.props["data-testid"] || "join-dimension"; + + function onRemoveDimension(e) { + e.stopPropagation(); // don't trigger picker popover + onRemove(); + } + return ( (this._popover = ref)} triggerElement={ - - {dimension ? dimension.displayName() : `Pick a column...`} - + testID={testID} + onRemove={onRemoveDimension} + /> } > {({ onClose }) => ( @@ -281,10 +620,16 @@ class JoinDimensionPicker extends React.Component { fieldOptions={options} table={query.table()} query={query} - onFieldChange={field => { - onChange(field); + onFieldChange={(field, { isSubDimension = false } = {}) => { + if (isDateTimeField(field)) { + onChange(field, { overwrite: isSubDimension }); + } else { + onChange(field); + } onClose(); }} + enableSubDimensions + data-testid={`${testID}-picker`} /> )} @@ -292,47 +637,61 @@ class JoinDimensionPicker extends React.Component { } } -import FieldsPicker from "./FieldsPicker"; +JoinDimensionPicker.propTypes = joinDimensionPickerPropTypes; -const JoinFieldsPicker = ({ className, join, updateQuery }) => { +const joinFieldsPickerPropTypes = { + join: PropTypes.object.isRequired, + updateQuery: PropTypes.func.isRequired, +}; + +const JoinFieldsPicker = ({ join, updateQuery, ...props }) => { const dimensions = join.joinedDimensions(); const selectedDimensions = join.fieldsDimensions(); const selected = new Set(selectedDimensions.map(d => d.key())); + + function onSelectAll() { + join + .setFields("all") + .parent() + .update(updateQuery); + } + + function onSelectNone() { + join + .setFields("none") + .parent() + .update(updateQuery); + } + + function onToggleDimension(dimension) { + join + .setFields( + dimensions + .filter(d => { + if (d === dimension) { + return !selected.has(d.key()); + } else { + return selected.has(d.key()); + } + }) + .map(d => d.mbql()), + ) + .parent() + .update(updateQuery); + } + return ( - join - .setFields("all") - .parent() - .update(updateQuery) - } - onSelectNone={() => - join - .setFields("none") - .parent() - .update(updateQuery) - } - onToggleDimension={(dimension, enable) => { - join - .setFields( - dimensions - .filter(d => { - if (d === dimension) { - return !selected.has(d.key()); - } else { - return selected.has(d.key()); - } - }) - .map(d => d.mbql()), - ) - .parent() - .update(updateQuery); - }} + onSelectAll={onSelectAll} + onSelectNone={onSelectNone} + onToggleDimension={onToggleDimension} /> ); }; + +JoinFieldsPicker.propTypes = joinFieldsPickerPropTypes; diff --git a/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.styled.jsx b/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.styled.jsx new file mode 100644 index 000000000000..c1875a9ae639 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.styled.jsx @@ -0,0 +1,128 @@ +import styled from "styled-components"; +import { color } from "metabase/lib/colors"; +import { space, breakpointMaxMedium } from "metabase/styled-components/theme"; +import Icon from "metabase/components/Icon"; + +export const Row = styled.div` + display: flex; + align-items: center; +`; + +export const JoinStepRoot = styled.div` + display: flex; + align-items: center; +`; + +export const JoinClausesContainer = styled.div` + display: flex; + flex-direction: column; + flex: 1; +`; + +export const JoinClauseContainer = styled.div` + margin-bottom: ${props => (props.isLast ? 0 : "2px")}; +`; + +export const JoinClauseRoot = styled.div` + display: flex; + margin-bottom: ${props => (props.isLast ? 0 : "2px")}; +`; + +export const JoinStrategyIcon = styled(Icon).attrs({ size: 32 })` + color: ${color("brand")}; + margin-right: 6px; + margin-left: 2px; + margin-top: 6px; +`; + +export const JoinTypeSelectRoot = styled.div` + margin: ${space(1)} ${space(1)} 0 ${space(1)}; +`; + +export const JoinTypeOptionRoot = styled.div` + display: flex; + align-items: center; + padding: ${space(1)}; + margin-bottom: ${space(1)}; + cursor: pointer; + border-radius: ${space(1)}; + + color: ${props => props.isSelected && color("text-white")}; + background-color: ${props => props.isSelected && color("brand")}; + + :hover { + color: ${color("text-white")}; + background-color: ${color("brand")}; + + .Icon { + color: ${color("text-white")}; + } + } +`; + +export const JoinTypeIcon = styled(Icon).attrs({ size: 24 })` + margin-right: ${space(1)}; + color: ${props => (props.isSelected ? color("text-white") : color("brand"))}; +`; + +export const JoinDimensionControlsContainer = styled.div` + display: flex; + flex: 1; + align-items: center; + + margin-top: ${props => (props.isFirst ? 0 : space(1))}; + + ${breakpointMaxMedium} { + flex-direction: column; + align-items: flex-start; + } +`; + +export const JoinWhereConditionLabelContainer = styled.div` + display: flex; + align-items: center; + justify-content: center; + height: 60px; +`; + +export const JoinWhereConditionLabel = styled.span.attrs({ children: "on" })` + color: ${color("brand")}; + font-weight: bold; + margin: 0 ${space(2)}; +`; + +export const JoinConditionLabel = styled.span` + font-size: 20; + font-weight: bold; + color: ${color("text-medium")}; + margin-left: 2px; + margin-right: 6px; +`; + +export const DimensionContainer = styled.div` + display: flex; + align-items: center; +`; + +export const DimensionSourceName = styled.div` + display: block; + font-size: 11px; + color: ${color("text-white")}; + opacity: 0.65; +`; + +export const RemoveDimensionIcon = styled(Icon).attrs({ name: "close" })` + cursor: pointer; + color: ${color("text-white")}; + opacity: 0.65; + margin-left: 12px; +`; + +export const RemoveJoinIcon = styled(Icon).attrs({ name: "close", size: 18 })` + cursor: pointer; + color: ${color("text-light")}; + + :hover { + color: ${color("text-medium")}; + } +`; diff --git a/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.unit.spec.js b/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.unit.spec.js new file mode 100644 index 000000000000..61558323a305 --- /dev/null +++ b/frontend/src/metabase/query_builder/components/notebook/steps/JoinStep.unit.spec.js @@ -0,0 +1,486 @@ +/* eslint-disable react/prop-types */ +import React, { useState } from "react"; +import { Provider } from "react-redux"; +import { + render, + screen, + fireEvent, + within, + waitForElementToBeRemoved, +} from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import xhrMock from "xhr-mock"; +import StructuredQuery from "metabase-lib/lib/queries/StructuredQuery"; +import { getStore } from "__support__/entities-store"; +import { + state, + ORDERS, + PRODUCTS, + SAMPLE_DATASET, +} from "__support__/sample_dataset_fixture"; +import JoinStep from "./JoinStep"; + +// Workaround for timeouts on CI +jest.setTimeout(15000); + +describe("Notebook Editor > Join Step", () => { + const TEST_QUERY = { + type: "query", + database: SAMPLE_DATASET.id, + query: { + "source-table": ORDERS.id, + }, + }; + + function JoinStepWrapped({ initialQuery, onChange, ...props }) { + const [query, setQuery] = useState(initialQuery); + return ( + { + const newQuery = query.setDatasetQuery(datasetQuery); + setQuery(newQuery); + onChange(datasetQuery); + }} + /> + ); + } + + async function setup({ joinTable } = {}) { + const query = new StructuredQuery(ORDERS.question(), TEST_QUERY); + const onQueryChange = jest.fn(); + + const TEST_STEP = { + id: "0:join", + type: "join", + itemIndex: 0, + stageIndex: 0, + query, + valid: true, + visible: true, + active: true, + actions: [], + update: jest.fn(), + clean: jest.fn(), + revert: jest.fn(), + }; + + render( + + + , + ); + + if (joinTable) { + await selectTable(new RegExp(joinTable, "i")); + } + + return { onQueryChange }; + } + + function toFieldRef(field, joinedTable) { + if (!field) { + return null; + } + return [ + "field", + field.id, + joinedTable ? { "join-alias": joinedTable.display_name } : null, + ]; + } + + function expectedJoin({ + fields = "all", + joinedTable, + leftField, + rightField, + }) { + const joinFields = Array.isArray(fields) + ? fields.map(field => toFieldRef(field, joinedTable)) + : fields; + + return { + ...TEST_QUERY, + query: { + ...TEST_QUERY.query, + joins: [ + expect.objectContaining({ + fields: joinFields, + "source-table": joinedTable.id, + condition: [ + "=", + toFieldRef(leftField), + toFieldRef(rightField, joinedTable), + ], + }), + ], + }, + }; + } + + function expectedMultiFieldsJoin({ + fields = "all", + joinedTable, + dimensions, + }) { + const joinFields = Array.isArray(fields) + ? fields.map(field => toFieldRef(field, joinedTable)) + : fields; + + const condition = ["and"]; + dimensions.forEach(pair => { + const [leftField, rightField] = pair; + condition.push([ + "=", + toFieldRef(leftField), + toFieldRef(rightField, joinedTable), + ]); + }); + + return { + ...TEST_QUERY, + query: { + ...TEST_QUERY.query, + joins: [ + expect.objectContaining({ + fields: joinFields, + "source-table": joinedTable.id, + condition, + }), + ], + }, + }; + } + + async function selectTable(tableName) { + fireEvent.click(screen.queryByText(/Sample Dataset/i)); + const dataSelector = await screen.findByTestId("data-selector"); + fireEvent.click(within(dataSelector).queryByText(tableName)); + + await waitForElementToBeRemoved(() => + screen.queryByTestId("data-selector"), + ); + } + + function openDimensionPicker(type) { + const openPickerButton = screen.getByTestId(`${type}-dimension`); + fireEvent.click(openPickerButton); + return screen.findByRole("rowgroup"); + } + + beforeEach(() => { + xhrMock.setup(); + xhrMock.get("/api/database", { + body: JSON.stringify({ + total: 1, + data: [SAMPLE_DATASET.getPlainObject()], + }), + }); + xhrMock.get("/api/database/1/schemas", { + body: JSON.stringify(["PUBLIC"]), + }); + xhrMock.get("/api/database/1/schema/PUBLIC", { + body: JSON.stringify( + SAMPLE_DATASET.tables.filter(table => table.schema === "PUBLIC"), + ), + }); + }); + + afterEach(() => { + xhrMock.teardown(); + }); + + it("displays a source table and suggests to pick a join table", async () => { + await setup(); + expect(screen.queryByText("Orders")).toBeInTheDocument(); + expect(screen.queryByText("Pick a table...")).toBeInTheDocument(); + }); + + it("opens a schema browser by default", async () => { + await setup(); + + fireEvent.click(screen.queryByText(/Sample Dataset/i)); + const dataSelector = await screen.findByTestId("data-selector"); + + SAMPLE_DATASET.tables.forEach(table => { + const tableName = new RegExp(table.display_name, "i"); + expect(within(dataSelector).queryByText(tableName)).toBeInTheDocument(); + }); + }); + + it("automatically sets join fields if possible", async () => { + const { onQueryChange } = await setup(); + + await selectTable(/Products/i); + + expect(screen.getByTestId("parent-dimension")).toHaveTextContent( + /Product ID/i, + ); + expect(screen.getByTestId("join-dimension")).toHaveTextContent(/ID/i); + expect(onQueryChange).toHaveBeenLastCalledWith( + expectedJoin({ + joinedTable: PRODUCTS, + leftField: ORDERS.PRODUCT_ID, + rightField: PRODUCTS.ID, + }), + ); + }); + + it("shows a parent dimension's join field picker", async () => { + const ordersFields = Object.values(ORDERS.fieldsLookup()); + await setup({ joinTable: "Products" }); + + fireEvent.click(screen.getByTestId("parent-dimension")); + + const picker = await screen.findByRole("rowgroup"); + expect(picker).toBeInTheDocument(); + expect(picker).toBeVisible(); + expect(within(picker).queryByText("Order")).toBeInTheDocument(); + ordersFields.forEach(field => { + expect( + within(picker).queryByText(field.display_name), + ).toBeInTheDocument(); + }); + }); + + it("can change parent dimension's join field", async () => { + const { onQueryChange } = await setup({ joinTable: "Products" }); + const picker = await openDimensionPicker("parent"); + + fireEvent.click(within(picker).getByText("Tax")); + + expect(onQueryChange).toHaveBeenLastCalledWith( + expectedJoin({ + joinedTable: PRODUCTS, + leftField: ORDERS.TAX, + rightField: PRODUCTS.ID, + }), + ); + }); + + it("shows a join dimension's field picker", async () => { + const productsFields = Object.values(PRODUCTS.fieldsLookup()); + await setup({ joinTable: "Products" }); + + fireEvent.click(screen.getByTestId("join-dimension")); + + const picker = await screen.findByRole("rowgroup"); + expect(picker).toBeInTheDocument(); + expect(picker).toBeVisible(); + expect(within(picker).queryByText("Product")).toBeInTheDocument(); + productsFields.forEach(field => { + expect( + within(picker).queryByText(field.display_name), + ).toBeInTheDocument(); + }); + }); + + it("can change join dimension's field", async () => { + const { onQueryChange } = await setup({ joinTable: "Products" }); + const picker = await openDimensionPicker("join"); + + fireEvent.click(within(picker).getByText("Category")); + + expect(onQueryChange).toHaveBeenLastCalledWith( + expectedJoin({ + joinedTable: PRODUCTS, + leftField: ORDERS.PRODUCT_ID, + rightField: PRODUCTS.CATEGORY, + }), + ); + }); + + it("automatically opens dimensions picker if can't automatically set join fields", async () => { + await setup({ joinTable: "Reviews" }); + + const picker = await screen.findByRole("rowgroup"); + expect(picker).toBeInTheDocument(); + expect(picker).toBeVisible(); + expect(within(picker).queryByText("Order")).toBeInTheDocument(); + }); + + it("can select fields to select from a joined table", async () => { + const { onQueryChange } = await setup({ joinTable: "Products" }); + + fireEvent.click(screen.getByLabelText("table icon")); + fireEvent.click(screen.getByText("Select None")); + fireEvent.click(screen.getByText("Category")); + + expect(onQueryChange).toHaveBeenLastCalledWith( + expectedJoin({ + joinedTable: PRODUCTS, + leftField: ORDERS.PRODUCT_ID, + rightField: PRODUCTS.ID, + fields: [PRODUCTS.CATEGORY], + }), + ); + }); + + it("can clear selected parent dimension", async () => { + const { onQueryChange } = await setup({ joinTable: "Products" }); + const parentDimensionPicker = screen.getByTestId("parent-dimension"); + + fireEvent.click( + within(parentDimensionPicker).queryByLabelText("close icon"), + ); + + expect(screen.getByTestId("parent-dimension")).toHaveTextContent( + "Pick a column...", + ); + expect(screen.queryByRole("rowgroup")).toBe(null); + expect(onQueryChange).toHaveBeenLastCalledWith( + expectedJoin({ + joinedTable: PRODUCTS, + rightField: PRODUCTS.ID, + }), + ); + }); + + it("can clear selected join dimension", async () => { + const { onQueryChange } = await setup({ joinTable: "Products" }); + const joinDimensionPicker = screen.getByTestId("join-dimension"); + + fireEvent.click(within(joinDimensionPicker).queryByLabelText("close icon")); + + expect(screen.getByTestId("join-dimension")).toHaveTextContent( + "Pick a column...", + ); + expect(screen.queryByRole("rowgroup")).toBe(null); + expect(onQueryChange).toHaveBeenLastCalledWith( + expectedJoin({ + joinedTable: PRODUCTS, + leftField: ORDERS.PRODUCT_ID, + }), + ); + }); + + it("hides icons for removing dimensions if dimensions are not set yet", async () => { + await setup({ joinTable: "Reviews" }); + + expect(screen.queryAllByLabelText("close icon")).toHaveLength(0); + }); + + it("shows the fields picker tooltip on control hover", async () => { + await setup({ joinTable: "Products" }); + + userEvent.hover(screen.getByLabelText("table icon")); + + const tooltip = screen.queryByRole("tooltip"); + expect(tooltip).toBeInTheDocument(); + expect(tooltip).toHaveTextContent("Pick columns"); + }); + + it("hides the fields picker tooltip when the picker opens", async () => { + await setup({ joinTable: "Products" }); + + userEvent.click(screen.getByLabelText("table icon")); + userEvent.hover(screen.getByLabelText("table icon")); + + expect(screen.queryByRole("tooltip")).toBe(null); + }); + + it("shows temporal unit for date-time fields", async () => { + await setup({ joinTable: "Products" }); + + fireEvent.click(screen.getByTestId("parent-dimension")); + let picker = await screen.findByRole("rowgroup"); + fireEvent.click(within(picker).queryByText("Created At")); + fireEvent.click(screen.getByTestId("join-dimension")); + picker = await screen.findByRole("rowgroup"); + fireEvent.click(within(picker).queryByText("Created At")); + + expect(screen.getByTestId("parent-dimension")).toHaveTextContent( + "Created At: Day", + ); + expect(screen.getByTestId("join-dimension")).toHaveTextContent( + "Created At: Day", + ); + }); + + describe("joins on multiple fields", () => { + it("does not display a new dimensions pair control until first pair is valid", async () => { + await setup({ joinTable: "Reviews" }); + + expect(screen.queryAllByText("Pick a column...")).toHaveLength(2); + expect(screen.queryByLabelText("add icon")).toBe(null); + }); + + it("can add a new dimension pair", async () => { + await setup({ joinTable: "Products" }); + + fireEvent.click(screen.queryByLabelText("add icon")); + + expect(screen.queryAllByText("Pick a column...")).toHaveLength(2); + }); + + it("automatically opens a parent dimension picker for new fields pair", async () => { + await setup({ joinTable: "Products" }); + + fireEvent.click(screen.queryByLabelText("add icon")); + + const picker = await screen.findByRole("rowgroup"); + expect(picker).toBeInTheDocument(); + expect(picker).toBeVisible(); + expect(within(picker).queryByText("Order")).toBeInTheDocument(); + }); + + it("automatically opens a join dimension picker for new fields pair", async () => { + await setup({ joinTable: "Products" }); + + fireEvent.click(screen.queryByLabelText("add icon")); + let picker = await screen.findByRole("rowgroup"); + fireEvent.click(within(picker).queryByText("Created At")); + + picker = await screen.findByRole("rowgroup"); + expect(picker).toBeInTheDocument(); + expect(picker).toBeVisible(); + expect(within(picker).queryByText("Product")).toBeInTheDocument(); + }); + + it("correctly updates join when adding multiple conditions", async () => { + const { onQueryChange } = await setup({ joinTable: "Products" }); + fireEvent.click(screen.queryByLabelText("add icon")); + + let picker = await screen.findByRole("rowgroup"); + fireEvent.click(within(picker).queryByText("Tax")); + picker = await screen.findByRole("rowgroup"); + fireEvent.click(within(picker).queryByText("Price")); + + expect(onQueryChange).toHaveBeenLastCalledWith( + expectedMultiFieldsJoin({ + dimensions: [ + [ORDERS.PRODUCT_ID, PRODUCTS.ID], + [ORDERS.TAX, PRODUCTS.PRICE], + ], + joinedTable: PRODUCTS, + }), + ); + }); + + it("does not display a new dimensions pair control for a new empty pair", async () => { + await setup({ joinTable: "Products" }); + + fireEvent.click(screen.queryByLabelText("add icon")); + + expect(screen.queryByLabelText("add icon")).toBe(null); + }); + + it("can remove an empty dimension pair", async () => { + await setup({ joinTable: "Products" }); + fireEvent.click(screen.queryByLabelText("add icon")); + + fireEvent.click(screen.queryByTestId("remove-dimension-pair")); + + expect(screen.queryAllByText("Pick a column...")).toEqual([]); + expect(screen.getByTestId("parent-dimension")).toHaveTextContent( + /Product ID/i, + ); + expect(screen.getByTestId("join-dimension")).toHaveTextContent(/ID/i); + }); + }); +}); diff --git a/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.jsx b/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.jsx index 6801fc22e2b4..2ed55cc3dc18 100644 --- a/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.jsx +++ b/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.jsx @@ -3,13 +3,16 @@ import { t } from "ttag"; import PropTypes from "prop-types"; import { Box } from "grid-styled"; +import { PLUGIN_MODERATION } from "metabase/plugins"; import Schemas from "metabase/entities/schemas"; import { SAVED_QUESTIONS_VIRTUAL_DB_ID } from "metabase/lib/constants"; -import { SelectList } from "metabase/components/select-list"; import EmptyState from "metabase/components/EmptyState"; import { generateSchemaId } from "metabase/schema"; -import { SavedQuestionListRoot } from "./SavedQuestionList.styled"; +import { + SavedQuestionListRoot, + SavedQuestionListItem, +} from "./SavedQuestionList.styled"; import { PERSONAL_COLLECTIONS } from "metabase/entities/collections"; const propTypes = { @@ -57,7 +60,7 @@ export default function SavedQuestionList({ return ( {tables.map(t => ( - onSelect(t)} + rightIcon={PLUGIN_MODERATION.getStatusIcon( + t.moderated_status, + )} /> ))} diff --git a/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.styled.jsx b/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.styled.jsx index a9ba50af0e62..f8ca1c8bc18a 100644 --- a/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.styled.jsx +++ b/frontend/src/metabase/query_builder/components/saved-question-picker/SavedQuestionList.styled.jsx @@ -11,3 +11,9 @@ export const SavedQuestionListRoot = styled(SelectList)` min-height: 220px; } `; + +export const SavedQuestionListItem = styled(SelectList.Item)` + .Icon:last-child { + justify-self: start; + } +`; diff --git a/frontend/src/metabase/query_builder/components/saved-question-picker/utils.js b/frontend/src/metabase/query_builder/components/saved-question-picker/utils.js index 69c51fd2b728..733ccd79d10f 100644 --- a/frontend/src/metabase/query_builder/components/saved-question-picker/utils.js +++ b/frontend/src/metabase/query_builder/components/saved-question-picker/utils.js @@ -1,19 +1,9 @@ -import { isPersonalCollection } from "metabase/collections/utils"; -import { PERSONAL_COLLECTIONS } from "metabase/entities/collections"; - -const getCollectionIcon = collection => { - if (collection.id === PERSONAL_COLLECTIONS.id) { - return "group"; - } - - return isPersonalCollection(collection) ? "person" : "folder"; -}; +import { getCollectionIcon } from "metabase/entities/collections"; export function buildCollectionTree(collections) { if (collections == null) { return []; } - return collections.map(collection => ({ id: collection.id, name: collection.name, diff --git a/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx b/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx index 0f41996657f3..ed534dd14837 100644 --- a/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx +++ b/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx @@ -181,14 +181,19 @@ export default class TagEditorParam extends Component {

{t`Filter widget type`}