diff --git a/.travis.yml b/.travis.yml index ccd6d2b03c..8d8403e65e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -58,7 +58,7 @@ before_install: install: - pip install nose pep8 - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then pip install virtualenv; fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then psql -c 'create database travis_ci_test;' -U postgres; fi + - psql -c 'create database codechecker_config;' -U postgres - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then cat postgres.log; fi addons: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5ebd4dc2b6..05822a6fb0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -84,12 +84,12 @@ from libcodechecker import host_check from libcodechecker import util from libcodechecker.analyze import analyzer from libcodechecker.analyze.analyzers import analyzer_types -from libcodechecker.database_handler import SQLServer from libcodechecker.log import build_action from libcodechecker.logger import LoggerFactory # -- 9. imports from the local package -import client_db_access_handler +from . import client_db_access_handler +from product_db_access_handler import ThriftProductHandler # ... your code here ~~~~ diff --git a/Makefile b/Makefile index 984fe1ae39..4ea0cff726 100644 --- a/Makefile +++ b/Makefile @@ -32,6 +32,9 @@ thrift: build_dir thrift -r -o $(BUILD_DIR) -I api/ \ --gen py --gen js:jquery api/authentication.thrift + thrift -r -o $(BUILD_DIR) -I api/ \ + --gen py --gen js:jquery api/products.thrift + package: build_dir gen-docs thrift if [ ! -d "$(BUILD_DIR)/CodeChecker" ]; then \ ./scripts/build_package.py -r $(ROOT) -o $(BUILD_DIR) -b $(BUILD_DIR); \ diff --git a/README.md b/README.md index 0b615d5e3a..9f20ebe0ca 100644 --- a/README.md +++ b/README.md @@ -239,8 +239,9 @@ Additional documentation * [Architecture overview](docs/architecture.md) * [Package layout](docs/package_layout.md) * [Checker documentation](docs/checker_docs.md) - * [Thrift interface](thrift_api/thrift_api.md) + * [Thrift interface](api/README.md) * [Package and integration tests](tests/readme.md) * [Database schema migration](docs/db_schema_guide.md) * [Usage of PostgreSQL database](docs/postgresql_setup.md) - * [Requiring credentials to view analysis results](docs/authentication.md) + * [Requiring credentials to view analysis results (Authentication)](docs/authentication.md) + * [Connecting multiple separate defect databases on the same server (Products)](docs/products.md) diff --git a/alembic.ini b/alembic.ini index a4a8f0a7d9..6c37d9ed81 100644 --- a/alembic.ini +++ b/alembic.ini @@ -1,6 +1,35 @@ -# A generic, single database configuration. +[product_db] +# path to migration scripts +script_location = product_db_migrate + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to database/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat database/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = postgres://codechecker@localhost:5432/config -[alembic] +[run_db] # path to migration scripts script_location = db_migrate @@ -29,7 +58,7 @@ script_location = db_migrate # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = postgres://codechecker@localhost:5432/codechecker +sqlalchemy.url = postgres://codechecker@localhost:5432/default # Logging configuration [loggers] diff --git a/api/README.md b/api/README.md index 72d5c72933..94ef5e9f69 100644 --- a/api/README.md +++ b/api/README.md @@ -9,3 +9,8 @@ See [report_server.thrift](https://raw.githubusercontent.com/Ericsson/codechecke ## Authentication system API The authentication layer is used for supporting privileged-access only access. See [authentication.thrift](https://raw.githubusercontent.com/Ericsson/codechecker/master/thrift_api/authentication.thrift) + +## Product management system API +The product management layer is responsible for handling requests about the +different products and their configuration. See +[products.thrift](https://raw.githubusercontent.com/Ericsson/codechecker/master/thrift_api/products.thrift) \ No newline at end of file diff --git a/api/products.thrift b/api/products.thrift new file mode 100644 index 0000000000..c171b0af5f --- /dev/null +++ b/api/products.thrift @@ -0,0 +1,78 @@ +// ------------------------------------------------------------------------- +// The CodeChecker Infrastructure +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// ------------------------------------------------------------------------- + +include "shared.thrift" + +namespace py ProductManagement +namespace js codeCheckerProductManagement + + +/* +struct PrivilegeRecord { + 1: string name, + 2: bool isGroup +} +typedef list PrivilegeRecords +*/ + +struct DatabaseConnection { + 1: string engine, // The database engine, such as "sqlite" or "postgresql". + 2: string host, + 3: i32 port, + 4: string username_b64, + 5: optional string password_b64, // Database password is NOT sent server->client! + 6: string database // SQLite: Database file path; PostgreSQL: Database name +} + +/* ProductConfiguration carries administrative data regarding product settings. */ +struct ProductConfiguration { + 1: i64 id, + 2: string endpoint, + 3: string displayedName_b64, + 4: string description_b64, + 5: optional DatabaseConnection connection +} +typedef list ProductConfigurations + +/* Product carries data to the end user's product list and tasks. */ +struct Product { + 1: i64 id, + 2: string endpoint, + 3: string displayedName_b64, + 4: string description_b64, + 5: bool connected, // Indicates that the server could set up the database connection properly. + 6: bool accessible // Indicates whether the current user can access this product. +} +typedef list Products + +service codeCheckerProductService { + + // Return the product management API version. + string getAPIVersion(), + + // Returns the CodeChecker version that is running on the server. + string getPackageVersion(), + + // *** Handling of product lists and metadata querying *** // + + // Get the list of product that matches the display name and endpoint + // filters specified. + Products getProducts(1: string productEndpointFilter, + 2: string productNameFilter) + throws (1: shared.RequestFailed requestError), + + Product getCurrentProduct() + throws (1: shared.RequestFailed requestError), + + // *** Handling the add-modify-remove of products registered *** // + + bool addProduct(1: ProductConfiguration product) + throws (1: shared.RequestFailed requestError), + + bool removeProduct(1: i64 productId) + throws (1: shared.RequestFailed requestError) + +} diff --git a/api/report_server.thrift b/api/report_server.thrift index 40f9ec8f62..19007ea4df 100644 --- a/api/report_server.thrift +++ b/api/report_server.thrift @@ -17,7 +17,6 @@ namespace js codeCheckerDBAccess namespace cpp cc.service.codechecker //================================================= -const string API_VERSION = '6.0' const i64 MAX_QUERY_SIZE = 500 //================================================= diff --git a/api/shared.thrift b/api/shared.thrift index f1450d8395..08490a13c7 100644 --- a/api/shared.thrift +++ b/api/shared.thrift @@ -4,6 +4,8 @@ // License. See LICENSE.TXT for details. // ------------------------------------------------------------------------- +const string API_VERSION = '6.0' + //----------------------------------------------------------------------------- struct BugPathEvent { 1: i64 startLine, diff --git a/bin/CodeChecker.py b/bin/CodeChecker.py index 3884d703c6..9c790b0a44 100755 --- a/bin/CodeChecker.py +++ b/bin/CodeChecker.py @@ -63,7 +63,7 @@ def signal_handler(sig, frame): The results can be viewed: * In a web browser: http://localhost:8001 * In the command line: - CodeChecker cmd results -p 8001 -n myproject + CodeChecker cmd results -n myproject Example scenario: Analyzing, and printing results to Terminal (no storage) -------------------------------------------------------------------------- diff --git a/config/package_layout.json b/config/package_layout.json index ae3a398c88..4489c2cca6 100644 --- a/config/package_layout.json +++ b/config/package_layout.json @@ -19,7 +19,8 @@ "ld_logger": "ld_logger", "libcodechecker": "lib/python2.7/libcodechecker", "gencodechecker": "lib/python2.7/gencodechecker", - "codechecker_db_migrate": "lib/python2.7/db_migrate" + "run_db_migrate": "lib/python2.7/run_migrate", + "config_db_migrate": "lib/python2.7/config_migrate" }, "runtime": { "analyzers": { diff --git a/config/version.json b/config/version.json index aa8da2340e..d927cd77c9 100644 --- a/config/version.json +++ b/config/version.json @@ -1,10 +1,14 @@ { - "version":{ + "version": { "major" : "6", "minor" : "0", "revision" : "0" }, - "db_version":{ + "product_db_version": { + "major" : "6", + "minor" : "0" + }, + "run_db_version": { "major" : "6", "minor" : "0" } diff --git a/db_migrate/env.py b/db_migrate/env.py index c7c53952ed..2f7f0bb7df 100644 --- a/db_migrate/env.py +++ b/db_migrate/env.py @@ -9,7 +9,7 @@ # Add your model's MetaData object here # for 'autogenerate' support. try: - from libcodechecker.orm_model import Base + from libcodechecker.server.run_db_model import Base except ImportError: # Assume we are in the source directory import sys @@ -17,7 +17,7 @@ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) - from libcodechecker.orm_model import Base + from libcodechecker.server.run_db_model import Base target_metadata = Base.metadata diff --git a/docs/images/products.png b/docs/images/products.png new file mode 100644 index 0000000000..ee8e02aede Binary files /dev/null and b/docs/images/products.png differ diff --git a/docs/postgresql_setup.md b/docs/postgresql_setup.md index ab3dc7d603..02205c41a8 100644 --- a/docs/postgresql_setup.md +++ b/docs/postgresql_setup.md @@ -55,14 +55,14 @@ This step can be skipped if you always give the path of CodeChecker command. export PATH=~/codechecker_package/CodeChecker/bin:$PATH ~~~~~~ -Check a test project. +Start web server to view the results. ~~~~~~{.sh} -CodeChecker check --dbusername test_user --postgresql -n test_project_check -b "cd my_test_project && make clean && make" +CodeChecker server --dbusername test_user --postgresql ~~~~~~ -Start web server to view the results. +Check a test project. ~~~~~~{.sh} -CodeChecker server --dbusername test_user --postgresql +CodeChecker check --dbusername test_user -n test_project_check -b "cd my_test_project && make clean && make" ~~~~~~ View the results with firefox. diff --git a/docs/products.md b/docs/products.md new file mode 100644 index 0000000000..e88d23b3b4 --- /dev/null +++ b/docs/products.md @@ -0,0 +1,178 @@ +Product management +================== + +![Web interface showing product list](docs/images/products.png) + +The product system allows a single CodeChecker server to serve multiple +separate result databases, named "products", under the same IP address and +authentication domain. + +`CodeChecker server` takes database arguments to initialize a connection to a +**configuration** database. (See the [User guide](docs/user_guide.md) for +details.) This configuration database contains which products are managed by +the server, and contain various other metadata. + +The analysis results are stored in **product** databases. Each product +database is an isolated storage of results, completely separate, and can be +added and removed from a server on the fly. + +Command-line tools that are used to access analysis data take an `--url +PRODUCT_URL` parameter, which specifies the server host, port, and the +product's unique endpoint in the following format: `localhost:8001/Default`, +where `Default` is the product's endpoint. See the User guide for further +details. + +The Web application separates products based on their unique endpoint. The +home page of the server is the product list, shown above. + +## First start + +When a CodeChecker server is started with a SQLite configuration database, and +this database is not yet created (such as when the server is started fresh on +your computer), CodeChecker will automatically set up a `Default` product, +with a SQLite database file next to the configuration database, in +`/home//.codechecker`. + +This does NOT hold true for PostgreSQL configuration backends. These servers, +as PostgreSQL is advanced usage, must be configured manually. + +# Managing products through the command-line tool, `CodeChecker cmd` + +Please see the [User guide](docs/user_guide.md) for overview on the `cmd` +command. + +The `products` subcommand of `CodeChecker cmd` group the actions related to +product configuration. All these commands take a server `--host` and `--port` +as their argument, as the commands control a server, and not an individual +product endpoint. + +~~~~~~~~~~~~~~~~~~~~~ +usage: CodeChecker cmd products [-h] [--verbose {info,debug,debug_analyzer}] + {list,add,del} ... + +CodeChecker organises its databases into products. Each product has an +individually configured database which stores the analysis results. These +subcommands are used to manage the products configured by the server. Please +see the individual subcommands for details. + +optional arguments: + -h, --help show this help message and exit + +available actions: + {list,add,del} + list List products available on the server. + add Register a new product to the server. + del Delete a product from the server's products. + +common arguments: + --host HOST The address of the CodeChecker server to connect to. + (default: localhost) + -p PORT, --port PORT The port the server is running on. (default: 8001) + --verbose {info,debug,debug_analyzer} + Set verbosity level. (default: info) + +Most of these commands require authentication and appropriate access rights. +Please see 'CodeChecker cmd login' to authenticate. +~~~~~~~~~~~~~~~~~~~~~ + +## Listing products (`list`) + +~~~~~~~~~~~~~~~~~~~~~ +usage: CodeChecker cmd products list [-h] [--host HOST] [-p PORT] + [-o {plaintext,rows,table,csv,json}] + [--verbose {info,debug,debug_analyzer}] + +List the name and basic information about products added to the server. The +'S' status column of the product is 'E' if the product database is errorneous, +and 'L' if you don't have access to the product. + +optional arguments: + -h, --help show this help message and exit + +common arguments: + -o {plaintext,rows,table,csv,json}, --output {plaintext,rows,table,csv,json} + The output format to use in showing the data. + (default: plaintext) +~~~~~~~~~~~~~~~~~~~~~ + +## Adding new product (`add`) + +`add` assigns the unique `PRODUCT_NAME` endpoint with a database connection, +making a new product available on the server. + +~~~~~~~~~~~~~~~~~~~~~ +usage: CodeChecker cmd products add [-h] [-n DISPLAY_NAME] + [--description DESCRIPTION] + [--sqlite SQLITE_FILE | --postgresql] + [--dbaddress DBADDRESS] [--dbport DBPORT] + [--dbusername DBUSERNAME] + [--dbpassword DBPASSWORD] + [--dbname DBNAME] [--host HOST] [-p PORT] + [--verbose {info,debug,debug_analyzer}] + PRODUCT_NAME + +Create a new product to be managed by the server by providing the product's +details and database connection. + +positional arguments: + PRODUCT_NAME The URL endpoint where clients can access the analysis + results for this product. + +optional arguments: + -h, --help show this help message and exit + -n DISPLAY_NAME, --name DISPLAY_NAME + A custom display name for the product, which will be + shown in the viewer. This is purely for decoration and + user experience, program calls use the PRODUCT_NAME. + --description DESCRIPTION + A custom textual description to be shown alongside the + product. + +database arguments: + NOTE: These database arguments are relative to the server machine, as it + is the server which will make the database connection. + + --sqlite SQLITE_FILE Path of the SQLite database file to use. Not absolute + paths will be relative to the server's + . (default: .sqlite) + --postgresql Specifies that a PostgreSQL database is to be used + instead of SQLite. See the "PostgreSQL arguments" + section on how to configure the database connection. + +PostgreSQL arguments: + Values of these arguments are ignored, unless '--postgresql' is specified! + The database specified here must exist, and be connectible by the server. + + --dbaddress DBADDRESS, --db-host DBADDRESS + Database server address. (default: localhost) + --dbport DBPORT, --db-port DBPORT + Database server port. (default: 5432) + --dbusername DBUSERNAME, --db-username DBUSERNAME + Username to use for connection. (default: + ) + --dbpassword DBPASSWORD, --db-password DBPASSWORD + Password to use for authenticating the connection. + (default: ) + --dbname DBNAME, --db-name DBNAME + Name of the database to use. (default: ) +~~~~~~~~~~~~~~~~~~~~~ + +## Delete a product (`del`) + +~~~~~~~~~~~~~~~~~~~~~ +usage: CodeChecker cmd products del [-h] [--host HOST] [-p PORT] + [--verbose {info,debug,debug_analyzer}] + PRODUCT_NAME + +Removes the specified product from the list of products managed by the server. +NOTE: This only removes the association and disconnects the server from the +database -- NO actual ANALYSIS DATA is REMOVED. Configuration, such as access +control, however, WILL BE LOST! + +positional arguments: + PRODUCT_NAME The URL endpoint where clients can access the analysis + results for the product. + +optional arguments: + -h, --help show this help message and exit +~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/user_guide.md b/docs/user_guide.md index 3b397cbccb..ad22c0b802 100644 --- a/docs/user_guide.md +++ b/docs/user_guide.md @@ -8,14 +8,16 @@ Running CodeChecker is via its main invocation script, `CodeChecker`: ~~~~~~~~~~~~~~~~~~~~~ usage: CodeChecker [-h] - {checkers,analyze,analyzers,check,cmd,log,parse,plist,quickcheck,server,store,version} + {checkers,analyze,analyzers,check,cmd,log,parse,plist, + quickcheck,server,store,version} ... Run the CodeChecker sourcecode analyzer framework. Please specify a subcommand to access individual features. positional arguments: - {checkers,analyze,analyzers,check,cmd,log,parse,plist,quickcheck,server,store,version} + {checkers,analyze,analyzers,check,cmd,log,parse,plist,quickcheck,server, + store,version} commands checkers List the available checkers for the supported analyzers and show their default status (+ for being @@ -55,7 +57,7 @@ Analyze a project with default settings: The results can be viewed: * In a web browser: http://localhost:8001 * In the command line: - CodeChecker cmd results -p 8001 -n myproject + CodeChecker cmd results -n myproject Example scenario: Analyzing, and printing results to Terminal (no storage) -------------------------------------------------------------------------- @@ -69,8 +71,13 @@ output. ## Default configuration Used ports: + * `5432` - PostgreSQL -* `8001` - CodeChecker result viewer +* `8001` - CodeChecker server + +The server listens only on the local machine. + +The initial product is called `Default`. # Easy analysis wrappers @@ -130,7 +137,7 @@ usage: CodeChecker check [-h] [--keep-tmp] [-c] [--update] -n NAME [--saargs CLANGSA_ARGS_CFG_FILE] [--tidyargs TIDY_ARGS_CFG_FILE] [-e checker/checker-group] [-d checker/checker-group] - [--host HOST] [-p PORT] + [--url PRODUCT_URL] [--verbose {info,debug,debug_analyzer}] Run analysis for a project with storing results in the database. Check only @@ -173,8 +180,7 @@ checker configuration: server arguments: - --host HOST - -p PORT, --port PORT + --url PRODUCT_URL ~~~~~~~~~~~~~~~~~~~~~ ## Quickcheck @@ -713,6 +719,8 @@ void test() { Write suppress data from the suppression annotations found in the source files that were analyzed earlier that created the results. +~~~~~~~~~~~~~~~~~~~~~ + ~~~~ CodeChecker parse ./my_plists --suppress generated.suppress --export-source-suppress ~~~~ @@ -727,8 +735,8 @@ a database. to the database. ~~~~~~~~~~~~~~~~~~~~~ -usage: CodeChecker store [-h] [-t {plist}] [-j JOBS] [-n NAME] - [-f] [--host HOST] [-p PORT] +usage: CodeChecker store [-h] [-t {plist}] [-n NAME] [-f] + [--url PRODUCT_URL] [--verbose {info,debug,debug_analyzer}] [file/folder [file/folder ...]] @@ -745,9 +753,6 @@ optional arguments: -t {plist}, --type {plist}, --input-format {plist} Specify the format the analysis results were created as. (default: plist) - -j JOBS, --jobs JOBS Number of threads to use in storing results. More - threads mean faster operation at the cost of using - more memory. (default: 1) -n NAME, --name NAME The name of the analysis run to use in storing the reports to the database. If not specified, the '-- name' parameter given to 'codechecker-analyze' will be @@ -764,13 +769,12 @@ optional arguments: server arguments: Specifies a 'CodeChecker server' instance which will be used to store the - results. This server must be running and listening prior to the 'store' - command being ran. + results. This server must be running and listening, and the given product + must exist prior to the 'store' command being ran. - --host HOST The IP address or hostname of the CodeChecker server. - (default: localhost) - -p PORT, --port PORT The port of the server to use for storing. (default: - 8001) + --url PRODUCT_URL The URL of the product to store the results for, in + the format of host:port/ProductName. (default: + localhost:8001/Default) The results can be viewed by connecting to such a server in a Web browser or via 'CodeChecker cmd'. @@ -823,13 +827,13 @@ CodeChecker server -w ~/codechecker_wp --dbname myProjectdb --dbport 5432 --dbad The checking process can be started separately on the same machine ~~~~~~~~~~~~~~~~~~~~~ -CodeChecker check -w ~/codechecker_wp -n myProject -b "make -j 4" --host localhost --port 8001 +CodeChecker check -w ~/codechecker_wp -n myProject -b "make -j 4" --url localhost:8001/Default ~~~~~~~~~~~~~~~~~~~~~ or on a different machine ~~~~~~~~~~~~~~~~~~~~~ -CodeChecker check -w ~/codechecker_wp -n myProject -b "make -j 4" --host 192.168.1.1 --dbport 8001 +CodeChecker check -w ~/codechecker_wp -n myProject -b "make -j 4" --url 192.168.1.1:8001/Default ~~~~~~~~~~~~~~~~~~~~~ @@ -849,7 +853,7 @@ Start CodeChecker server locally which connects to a remote database (which is s Start the checking as explained previously. ~~~~~~~~~~~~~~~~~~~~~ -CodeChecker check -w ~/codechecker_wp -n myProject -b "make -j 4" --host 192.168.1.2 --port 8001 +CodeChecker check -w ~/codechecker_wp -n myProject -b "make -j 4" --url 192.168.1.2:8001/Default ~~~~~~~~~~~~~~~~~~~~~ ## 5. `checkers` mode @@ -1004,9 +1008,9 @@ optional arguments: --verbose {info,debug,debug_analyzer} Set verbosity level. (default: info) -database arguments: +configuration database arguments: --sqlite SQLITE_FILE Path of the SQLite database file to use. (default: - /home//.codechecker/codechecker.sqlite) + /config.sqlite) --postgresql Specifies that a PostgreSQL database is to be used instead of SQLite. See the "PostgreSQL arguments" section on how to configure the database connection. @@ -1021,7 +1025,7 @@ PostgreSQL arguments: --dbusername DBUSERNAME, --db-username DBUSERNAME Username to use for connection. (default: codechecker) --dbname DBNAME, --db-name DBNAME - Name of the database to use. (default: codechecker) + Name of the database to use. (default: config) ~~~~~~~~~~~~~~~~~~~~~ To start a server with default configuration, simply execute @@ -1053,8 +1057,9 @@ The `--sqlite` (or `--postgresql` and the various `--db-` arguments) can be used to specify where the database, containing the analysis reports is. `--config-directory` specifies where the server configuration files, such as -[authentication config](docs/authentication.md) is. E.g., one can start two -servers with two different databases, but with the same configuration: +[authentication config](docs/authentication.md) is. For example, one can start +two servers with two different product layout, but with the same authorisation +configuration: ~~~~~~~~~~~~~~~~~~~~~ CodeChecker server --sqlite ~/major_bugs.sqlite -f ~/.codechecker -p 8001 @@ -1063,7 +1068,12 @@ CodeChecker server --sqlite ~/minor_bugs.sqlite -f ~/.codechecker -p 8002 The `--workspace` argument can be used to _shortcut_ this specification: by default, the configuration directory is the _workspace_ itself, and therein -resides the `codechecker.sqlite` file, containing the analysis reports. +resides the `config.sqlite` file, containing the product configuration. + +If the server is started in `--sqlite` mode and fresh, that is, no product +configuration file is found, a product named `Default`, using `Default.sqlite` +in the configuration directory is automatically created. Please see +[Product management](docs/products.md) for details on how to configure products. ### Managing running servers @@ -1104,7 +1114,8 @@ Most of the features available in a Web browser opening the analysis result viewer server on its port is available in the `cmd` tool. ~~~~~~~~~~~~~~~~~~~~~ -usage: CodeChecker cmd [-h] {runs,results,diff,sum,del,suppress,login} ... +usage: CodeChecker cmd [-h] + {runs,results,diff,sum,del,suppress,products,login} ... The command-line client is used to connect to a running 'CodeChecker server' (either remote or local) and quickly inspect analysis results, such as runs, @@ -1115,7 +1126,7 @@ optional arguments: -h, --help show this help message and exit available actions: - {runs,results,diff,sum,del,login} + {runs,results,diff,sum,del,suppress,products,login} runs List the available analysis runs. results List analysis result (finding) summary for a given run. @@ -1124,13 +1135,22 @@ available actions: del Delete analysis runs. suppress Manage and export/import suppressions of a CodeChecker server. + products Access subcommands related to configuring the products + managed by a CodeChecker server. login Authenticate into CodeChecker servers that require privileges. ~~~~~~~~~~~~~~~~~~~~~ The operations available in `cmd` **always** require a running CodeChecker viewer server (i.e. a server started by `CodeChecker server`), and the -connection details (`--host` and `--port`) to access the server. +connection details to access the server. These details either take an URL form +(`--url hostname:port/Productname`) if the command accesses analysis results +in a given product, or a `--host` and `--port` pair, if the command manages +the server. + +A server started by default settings (`CodeChecker server`, see above) +automatically configure the product `Default` under `localhost:8001/Default`, +thus the `--url` parameter can be omitted. Most result-giving commands also take an `--output` format parameter. If this is set to `json`, a more detailed output is given, in JSON format. @@ -1140,6 +1160,9 @@ common arguments: --host HOST The address of the CodeChecker viewer server to connect to. (default: localhost) -p PORT, --port PORT The port the server is running on. (default: 8001) + --url PRODUCT_URL The URL of the product to store the results for, in + the format of host:port/ProductName. (default: + localhost:8001/Default) -o {plaintext,rows,table,csv,json}, --output {plaintext,rows,table,csv,json} The output format to use in showing the data. (default: plaintext) @@ -1150,7 +1173,7 @@ common arguments: ### List runs (`runs`) ~~~~~~~~~~~~~~~~~~~~~ -usage: CodeChecker cmd runs [-h] [--host HOST] [-p PORT] +usage: CodeChecker cmd runs [-h] [--url PRODUCT_URL] [-o {plaintext,rows,table,csv,json}] [--verbose {info,debug,debug_analyzer}] @@ -1164,7 +1187,7 @@ name, summary. ~~~~~~~~~~~~~~~~~~~~~ usage: CodeChecker cmd results [-h] -n RUN_NAME [-s] [--filter FILTER] - [--host HOST] [-p PORT] + [--url PRODUCT_URL] [-o {plaintext,rows,table,csv,json}] [--verbose {info,debug,debug_analyzer}] @@ -1190,8 +1213,9 @@ from the comparison of two runs. ~~~~~~~~~~~~~~~~~~~~~ usage: CodeChecker cmd diff [-h] -b BASE_RUN -n NEW_RUN [-s] [--filter FILTER] - (--new | --resolved | --unresolved) [--host HOST] - [-p PORT] [-o {plaintext,rows,table,csv,json}] + (--new | --resolved | --unresolved) + [--url PRODUCT_URL] + [-o {plaintext,rows,table,csv,json}] [--verbose {info,debug,debug_analyzer}] Compare two analysis runs to show the results that differ between the two. @@ -1244,7 +1268,7 @@ CodeChecker cmd diff -p 8001 --basename my_project --newname my_new_checkin --ne ~~~~~~~~~~~~~~~~~~~~~ usage: CodeChecker cmd sum [-h] (-n RUN_NAME [RUN_NAME ...] | -a) [-s] - [--filter FILTER] [--host HOST] [-p PORT] + [--filter FILTER] [--url PRODUCT_URL] [-o {plaintext,rows,table,csv,json}] [--verbose {info,debug,debug_analyzer}] @@ -1272,7 +1296,7 @@ usage: CodeChecker cmd del [-h] --all-after-run RUN_NAME | --all-after-time TIMESTAMP | --all-before-time TIMESTAMP) - [--host HOST] [-p PORT] + [--url PRODUCT_URL] [--verbose {info,debug,debug_analyzer}] Remove analysis runs from the server based on some criteria. NOTE! When a run @@ -1345,6 +1369,10 @@ server arguments: `--import` **appends** the suppressions found in the given suppress file to the database on the server. +### Manage product configuration of a server (`products`) + +Please see [Product management](docs/products.md) for details. + ### Authenticate to the server (`login`) ~~~~~~~~~~~~~~~~~~~~~ diff --git a/libcodechecker/analyze/store_handler.py b/libcodechecker/analyze/store_handler.py index 4270cd3ca3..715522c0cc 100644 --- a/libcodechecker/analyze/store_handler.py +++ b/libcodechecker/analyze/store_handler.py @@ -18,7 +18,8 @@ from libcodechecker.analyze import skiplist_handler from libcodechecker.logger import LoggerFactory -from libcodechecker.orm_model import * +# TODO: This is a cross-subpackage import. +from libcodechecker.server.run_db_model import * LOG = LoggerFactory.get_new_logger('STORE HANDLER') diff --git a/libcodechecker/cmd/cmd_line_client.py b/libcodechecker/cmd/cmd_line_client.py index e3a5c05f03..3eb5704fe0 100644 --- a/libcodechecker/cmd/cmd_line_client.py +++ b/libcodechecker/cmd/cmd_line_client.py @@ -32,17 +32,6 @@ def default(self, obj): return d -def __check_authentication(client): - """Communicate with the authentication server - to handle authentication requests.""" - result = client.getAuthParameters() - - if result.sessionStillActive: - return True - else: - return False - - def get_run_ids(client): """ Returns a map for run names and run_ids. @@ -111,7 +100,7 @@ def add_filter_conditions(report_filter, filter_str): # --------------------------------------------------------------------------- def handle_list_runs(args): - client = setup_client(args.host, args.port, '/') + client = setup_client(args.product_url) runs = client.getRunData(None) if args.output_format == 'json': @@ -130,7 +119,7 @@ def handle_list_runs(args): def handle_list_results(args): - client = setup_client(args.host, args.port, '/') + client = setup_client(args.product_url) run_info = check_run_names(client, [args.name]) @@ -331,7 +320,7 @@ def printReports(client, reports, output_format): else: print(twodim_to_str(output_format, header, rows)) - client = setup_client(args.host, args.port, '/') + client = setup_client(args.product_url) report_dir_mode = False if os.path.isdir(args.newname): @@ -367,7 +356,7 @@ def printReports(client, reports, output_format): def handle_list_result_types(args): - client = setup_client(args.host, args.port, '/') + client = setup_client(args.product_url) filters = [] report_filter = codeCheckerDBAccess.ttypes.ReportFilter() @@ -408,7 +397,7 @@ def handle_list_result_types(args): def handle_remove_run_results(args): - client = setup_client(args.host, args.port, '/') + client = setup_client(args.product_url) def is_later(d1, d2): dateformat = '%Y-%m-%d %H:%M:%S.%f' @@ -465,7 +454,7 @@ def bug_hash_filter(bug_id, filepath): limit = codeCheckerDBAccess.constants.MAX_QUERY_SIZE - client = setup_client(args.host, args.port, '/') + client = setup_client(args.product_url) run_info = check_run_names(client, [args.name]) run_id, run_date = run_info.get(args.name) diff --git a/libcodechecker/cmd/product_client.py b/libcodechecker/cmd/product_client.py new file mode 100644 index 0000000000..a6ae0dba63 --- /dev/null +++ b/libcodechecker/cmd/product_client.py @@ -0,0 +1,115 @@ +# ------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ------------------------------------------------------------------------- +""" +Argument handlers for the 'CodeChecker cmd product' subcommands. +""" + +import base64 +import sys + +from ProductManagement.ttypes import * + +from libcodechecker.libclient.client import setup_product_client +from libcodechecker.logger import LoggerFactory +from libcodechecker.output_formatters import twodim_to_str + +from cmd_line_client import CmdLineOutputEncoder + +LOG = LoggerFactory.get_new_logger('CMD') + + +def handle_list_products(args): + client = setup_product_client(args.host, args.port) + products = client.getProducts(None, None) + + if args.output_format == 'json': + results = [] + for product in products: + results.append({product.endpoint: product}) + print(CmdLineOutputEncoder().encode(results)) + else: # plaintext, csv + header = ['S', 'Endpoint', 'Name', 'Description'] + rows = [] + for product in products: + name = base64.b64decode(product.displayedName_b64) \ + if product.displayedName_b64 else '' + description = base64.b64decode(product.description_b64) \ + if product.description_b64 else '' + rows.append(('E' if not product.connected + else 'L' if not product.accessible + else '', + product.endpoint, name, description)) + + print(twodim_to_str(args.output_format, header, rows)) + + +def handle_add_product(args): + client = setup_product_client(args.host, args.port) + + # Put together the database connection's descriptor. + if 'postgresql' in args: + db_engine = 'postgresql' + db_host = args.dbaddress + db_port = args.dbport + db_user = args.dbusername + db_pass = args.dbpassword + db_name = args.dbname + else: + db_engine = 'sqlite' + db_host = "" + db_port = 0 + db_user = "" + db_pass = "" + db_name = args.sqlite + + dbc = DatabaseConnection( + engine=db_engine, + host=db_host, + port=db_port, + username_b64=base64.b64encode(db_user), + password_b64=base64.b64encode(db_pass), + database=db_name) + + # Put together the product configuration. + name = base64.b64encode(args.display_name) \ + if 'display_name' in args else None + desc = base64.b64encode(args.description) \ + if 'description' in args else None + + prod = ProductConfiguration( + endpoint=args.product_name, + displayedName_b64=name, + description_b64=desc, + connection=dbc) + + LOG.debug("Sending request to add product...") + success = client.addProduct(prod) + if success: + LOG.info("Product added successfully.") + else: + LOG.error("Adding the product has failed.") + sys.exit(1) + + +def handle_del_product(args): + client = setup_product_client(args.host, args.port) + + # Endpoints substring-match. + products = client.getProducts(args.product_name, None) + products = [product for product in products + if product.endpoint == args.product_name] + + if len(products) == 0: + LOG.error("The product '{0}' does not exist!" + .format(args.product_name)) + return + + success = client.removeProduct(products[0].id) + if success: + LOG.info("Product removed.") + else: + LOG.error("An error occurred in product removal.") + sys.exit(1) diff --git a/libcodechecker/context_base.py b/libcodechecker/context_base.py index 776277c2d4..0e5c00f5b5 100644 --- a/libcodechecker/context_base.py +++ b/libcodechecker/context_base.py @@ -128,9 +128,14 @@ def www_root(self): self.pckg_layout['www']) @property - def migration_root(self): + def run_migration_root(self): return os.path.join(self._package_root, - self.pckg_layout['codechecker_db_migrate']) + self.pckg_layout['run_db_migrate']) + + @property + def config_migration_root(self): + return os.path.join(self._package_root, + self.pckg_layout['config_db_migrate']) @property def db_username(self): diff --git a/libcodechecker/database_handler.py b/libcodechecker/database_handler.py deleted file mode 100644 index 5c6964f562..0000000000 --- a/libcodechecker/database_handler.py +++ /dev/null @@ -1,512 +0,0 @@ -# ------------------------------------------------------------------------- -# The CodeChecker Infrastructure -# This file is distributed under the University of Illinois Open Source -# License. See LICENSE.TXT for details. -# ------------------------------------------------------------------------- -""" -Database server handling. -""" - -from abc import ABCMeta, abstractmethod -import atexit -import os -import subprocess -import sys -import time - -from alembic import command, config -from alembic.util import CommandError -import sqlalchemy -from sqlalchemy import event -from sqlalchemy.engine import Engine -from sqlalchemy.engine.url import URL, make_url -from sqlalchemy.sql.elements import quoted_name - -from libcodechecker import host_check -from libcodechecker import logger -from libcodechecker import pgpass -from libcodechecker import util -from libcodechecker.logger import LoggerFactory -from libcodechecker.orm_model import CC_META -from libcodechecker.orm_model import CreateSession -from libcodechecker.orm_model import DBVersion - -LOG = LoggerFactory.get_new_logger('DB_HANDLER') - - -class SQLServer(object): - """ - Abstract base class for database server handling. An SQLServer instance is - responsible for the initialization, starting, and stopping the database - server, and also for connection string management. - - SQLServer implementations are created via SQLServer.from_cmdline_args(). - - How to add a new database server implementation: - 1, Derive from SQLServer and implement the abstract methods - 2, Add/modify some command line options in CodeChecker.py - 3, Modify SQLServer.from_cmdline_args() in order to create an - instance of the new server type if needed - """ - - __metaclass__ = ABCMeta - - def __init__(self, migration_root): - """ - Sets self.migration_root. migration_root should be the path to the - alembic migration scripts. - """ - - self.migration_root = migration_root - - def _create_or_update_schema(self, use_migration=True): - """ - Creates or updates the database schema. The database server should be - started before this method is called. - - If use_migration is True, this method runs an alembic upgrade to HEAD. - - In the False case, there is no migration support and only SQLAlchemy - meta data is used for schema creation. - - On error sys.exit(1) is called. - """ - - try: - db_uri = self.get_connection_string() - engine = SQLServer.create_engine(db_uri) - - LOG.debug('Update/create database schema') - if use_migration: - LOG.debug('Creating new database session') - session = CreateSession(engine) - connection = session.connection() - - cfg = config.Config() - cfg.set_main_option("script_location", self.migration_root) - cfg.attributes["connection"] = connection - command.upgrade(cfg, "head") - - session.commit() - else: - CC_META.create_all(engine) - - engine.dispose() - LOG.debug('Update/create database schema done') - return True - - except sqlalchemy.exc.SQLAlchemyError as alch_err: - LOG.error(str(alch_err)) - sys.exit(1) - except CommandError as cerr: - LOG.error("Database schema and CodeChecker is incompatible." - "Please update CodeChecker.") - LOG.debug(str(cerr)) - sys.exit(1) - - @abstractmethod - def start(self, db_version_info, wait_for_start=True, init=False): - """ - Starts the database server and initializes the database server. - - On wait_for_start == True, this method returns when the server is up - and ready for connections. Otherwise it only starts the server and - returns immediately. - - On init == True, this it also initializes the database data and schema - if needed. - - On error sys.exit(1) should be called. - """ - pass - - @abstractmethod - def stop(self): - """ - Terminates the database server. - - On error sys.exit(1) should be called. - """ - pass - - @abstractmethod - def get_connection_string(self): - """ - Returns the connection string for SQLAlchemy. - - DO NOT LOG THE CONNECTION STRING BECAUSE IT MAY CONTAIN THE PASSWORD - FOR THE DATABASE! - """ - pass - - @staticmethod - def create_engine(connection_string): - """ - Creates a new SQLAlchemy engine. - """ - - if make_url(connection_string).drivername == 'sqlite+pysqlite': - # FIXME: workaround for locking errors - return sqlalchemy.create_engine( - connection_string, - encoding='utf8', - connect_args={'timeout': 600}) - else: - return sqlalchemy.create_engine(connection_string, - encoding='utf8') - - @staticmethod - def from_cmdline_args(args, migration_root, env=None): - """ - Normally only this method is called form outside of this module in - order to instance the proper server implementation. - - Parameters: - args: the command line arguments from CodeChecker.py - migration_root: path to the database migration scripts - env: a run environment dictionary. - """ - - if not host_check.check_sql_driver(args.postgresql): - LOG.error("The selected SQL driver is not available.") - sys.exit(1) - - if args.postgresql: - LOG.debug("Using PostgreSQLServer") - # TODO: This will be refactored eventually so that - # CodeChecker is no longer bringing up a postgres database... - # It is an external dependency, it is an external - # responsibility. Until then, use the default folder now - # for the new commands who no longer define workspace. - if 'dbdatadir' in args: - data_url = args.dbdatadir - else: - data_url = os.path.join(util.get_default_workspace(), - 'pgsql_data') - return PostgreSQLServer(data_url, - migration_root, - args.dbaddress, - args.dbport, - args.dbusername, - args.dbname, - run_env=env) - else: - LOG.debug("Using SQLiteDatabase") - data_file = os.path.abspath(args.sqlite) - return SQLiteDatabase(data_file, migration_root, run_env=env) - - def check_db_version(self, db_version_info, session=None): - """ - Checks the database version and prints an error message on database - version mismatch. - - - On mismatching or on missing version a sys.exit(1) is called. - - On missing DBVersion table, it returns False - - On compatible DB version, it returns True - - Parameters: - db_version_info (db_version.DBVersionInfo): required database - version. - session: an open database session or None. If session is None, a - new session is created. - """ - - try: - dispose_engine = False - if session is None: - engine = SQLServer.create_engine(self.get_connection_string()) - dispose_engine = True - session = CreateSession(engine) - else: - engine = session.get_bind() - - if not engine.has_table(quoted_name(DBVersion.__tablename__, - True)): - LOG.debug("Missing DBVersion table!") - return False - - version = session.query(DBVersion).first() - if version is None: - # Version is not populated yet - LOG.error('No version information found in the database.') - sys.exit(1) - elif not db_version_info.is_compatible(version.major, - version.minor): - LOG.error('Version mismatch. Expected database version: ' + - str(db_version_info)) - version_from_db = 'v' + str(version.major) + '.' + str( - version.minor) - LOG.error('Version from the database is: ' + version_from_db) - LOG.error('Please update your database.') - sys.exit(1) - - LOG.debug("Database version is compatible.") - return True - finally: - session.commit() - if dispose_engine: - engine.dispose() - - def _add_version(self, db_version_info, session=None): - """ - Fills the DBVersion table. - """ - - engine = None - if session is None: - engine = SQLServer.create_engine(self.get_connection_string()) - session = CreateSession(engine) - - expected = db_version_info.get_expected_version() - LOG.debug('Adding DB version: ' + str(expected)) - - session.add(DBVersion(expected[0], expected[1])) - session.commit() - - if engine: - engine.dispose() - - LOG.debug('Adding DB version done!') - - -class PostgreSQLServer(SQLServer): - """ - Handler for PostgreSQL. - """ - - def __init__(self, data_url, migration_root, host, port, user, database, - password=None, run_env=None): - super(PostgreSQLServer, self).__init__(migration_root) - - self.path = data_url - self.host = host - self.port = port - self.user = user - self.database = database - self.password = password - self.run_env = run_env - self.workspace = os.path.abspath(os.path.join(data_url, "..")) - - self.proc = None - - def _is_database_data_exist(self): - """Check the PostgreSQL instance existence in a given path.""" - - LOG.debug('Checking for database at ' + self.path) - - return os.path.exists(self.path) and \ - os.path.exists(os.path.join(self.path, 'PG_VERSION')) and \ - os.path.exists(os.path.join(self.path, 'base')) - - def _initialize_database_data(self): - """Initialize a PostgreSQL instance with initdb. """ - - LOG.debug('Initializing database at ' + self.path) - - init_db = ['initdb', - '-U', self.user, - '-D', self.path, - '-E', 'SQL_ASCII'] - - err, code = util.call_command(init_db, self.run_env) - - if code != 0: - LOG.error("Couldn't initialize database. Call to 'initdb' " - "returned {0}.".format(code)) - LOG.error(err) - - return code == 0 - - def _get_connection_string(self, database): - """ - Helper method for getting the connection string for the given database. - - database -- The user can force the database name in the returning - connection string. However the password, if any, provided e.g. in a - .pgpass file will be queried based on the database name which is given - as a command line argument, even if it has a default value. The reason - is that sometimes a connection with a common database name is needed, - (e.g. 'postgres'), which requires less user permission. - """ - - port = str(self.port) - driver = host_check.get_postgresql_driver_name() - password = self.password - if driver == 'pg8000' and not password: - pfilepath = os.environ.get('PGPASSFILE') - if pfilepath: - password = pgpass.get_password_from_file(pfilepath, - self.host, - port, - self.database, - self.user) - - extra_args = {'client_encoding': 'utf8'} - return str(URL('postgresql+' + driver, - username=self.user, - password=password, - host=self.host, - port=port, - database=database, - query=extra_args)) - - def _wait_or_die(self): - """ - Wait for database if the database process was stared - with a different client. No polling is possible. - """ - - LOG.debug('Waiting for PostgreSQL') - tries_count = 0 - max_try = 20 - timeout = 5 - while not self._is_running() and tries_count < max_try: - tries_count += 1 - time.sleep(timeout) - - if tries_count >= max_try: - LOG.error('Failed to start database.') - sys.exit(1) - - def _create_database(self): - try: - LOG.debug('Creating new database if not exists.') - - db_uri = self._get_connection_string('postgres') - engine = SQLServer.create_engine(db_uri) - text = \ - "SELECT 1 FROM pg_database WHERE datname='%s'" % self.database - if not bool(engine.execute(text).scalar()): - conn = engine.connect() - # From sqlalchemy documentation: - # The psycopg2 and pg8000 dialects also offer the special level - # AUTOCOMMIT. - conn = conn.execution_options(isolation_level="AUTOCOMMIT") - conn.execute('CREATE DATABASE "%s"' % self.database) - conn.close() - - LOG.debug('Database created: ' + self.database) - - LOG.debug('Database already exists: ' + self.database) - - except sqlalchemy.exc.SQLAlchemyError as alch_err: - LOG.error('Failed to create database!') - LOG.error(str(alch_err)) - sys.exit(1) - - def _is_running(self): - """Is there PostgreSQL instance running on a given host and port.""" - - LOG.debug('Checking if database is running at ' + - self.host + ':' + str(self.port)) - - check_db = ['psql', '-U', self.user, '-c', 'SELECT version();', - '-p', str(self.port), '-h', self.host, '-d', 'postgres'] - err, code = util.call_command(check_db, self.run_env) - return code == 0 - - def start(self, db_version_info, wait_for_start=True, init=False): - """ - Start a PostgreSQL instance with given path, host and port. - Return with process instance. - """ - - LOG.debug('Starting/connecting to database.') - if not self._is_running(): - if not util.is_localhost(self.host): - LOG.info('Database is not running yet.') - sys.exit(1) - - if not self._is_database_data_exist(): - if not init: - # The database does not exists. - LOG.error('Database data is missing!') - LOG.error('Please check your configuration!') - sys.exit(1) - elif not self._initialize_database_data(): - # The database does not exist and cannot create. - LOG.error('Database data is missing and ' - 'the initialization of a new failed!') - LOG.error('Please check your configuration!') - sys.exit(1) - - LOG.info('Starting database') - LOG.debug('Starting database at ' + self.host + ':' + str( - self.port) + ' ' + self.path) - - db_logfile = os.path.join(self.workspace, 'postgresql.log') \ - if LoggerFactory.get_log_level() == logger.DEBUG \ - else os.devnull - self._db_log = open(db_logfile, 'wb') - - start_db = ['postgres', '-i', - '-D', self.path, - '-p', str(self.port), - '-h', self.host] - self.proc = subprocess.Popen(start_db, - bufsize=-1, - env=self.run_env, - stdout=self._db_log, - stderr=subprocess.STDOUT) - - add_version = False - if init: - self._wait_or_die() - self._create_database() - add_version = not self.check_db_version(db_version_info) - self._create_or_update_schema(use_migration=False) - elif wait_for_start: - self._wait_or_die() - add_version = not self.check_db_version(db_version_info) - - if add_version: - self._add_version(db_version_info) - - atexit.register(self.stop) - LOG.debug('Done') - - def stop(self): - if self.proc: - LOG.debug('Terminating database') - self.proc.terminate() - self._db_log.close() - - def get_connection_string(self): - return self._get_connection_string(self.database) - - -class SQLiteDatabase(SQLServer): - """ - Handler for SQLite. - """ - - def __init__(self, data_file, migration_root, run_env=None): - super(SQLiteDatabase, self).__init__(migration_root) - - self.dbpath = data_file - self.run_env = run_env - - def _set_sqlite_pragma(dbapi_connection, connection_record): - cursor = dbapi_connection.cursor() - cursor.execute("PRAGMA foreign_keys=ON") - cursor.close() - - event.listen(Engine, 'connect', _set_sqlite_pragma) - - def start(self, db_version_info, wait_for_start=True, init=False): - if init: - add_version = not self.check_db_version(db_version_info) - self._create_or_update_schema(use_migration=False) - if add_version: - self._add_version(db_version_info) - - if not os.path.exists(self.dbpath): - # The database does not exists - LOG.error('Database (%s) is missing!' % self.dbpath) - sys.exit(1) - - def stop(self): - pass - - def get_connection_string(self): - return str(URL('sqlite+pysqlite', None, None, None, None, self.dbpath)) diff --git a/libcodechecker/generic_package_context.py b/libcodechecker/generic_package_context.py index 27bc28464f..fe3a8c597d 100644 --- a/libcodechecker/generic_package_context.py +++ b/libcodechecker/generic_package_context.py @@ -46,7 +46,8 @@ def __init__(self, package_root, pckg_layout, cfg_dict): self.__package_root = package_root self.__package_version = None - self.__db_version_info = None + self.__product_db_version_info = None + self.__run_db_version_info = None self.__package_build_date = None self.__package_git_hash = None self.__analyzers = {} @@ -82,14 +83,18 @@ def __set_version(self): package_git_hash = vfile_data['git_hash'] package_git_tag = vfile_data['git_describe']['tag'] package_git_dirtytag = vfile_data['git_describe']['dirty'] - database_version = vfile_data['db_version'] + product_database_version = vfile_data['product_db_version'] + run_database_version = vfile_data['run_db_version'] self.__package_version = package_version['major'] + '.' + \ package_version['minor'] + '.' + \ package_version['revision'] - self.__db_version_info = db_version.DBVersionInfo( - database_version['major'], - database_version['minor']) + self.__product_db_version_info = db_version.DBVersionInfo( + product_database_version['major'], + product_database_version['minor']) + self.__run_db_version_info = db_version.DBVersionInfo( + run_database_version['major'], + run_database_version['minor']) self.__package_build_date = package_build_date self.__package_git_hash = package_git_hash @@ -154,8 +159,12 @@ def package_git_tag(self): return self.__package_git_tag @property - def db_version_info(self): - return self.__db_version_info + def product_db_version_info(self): + return self.__product_db_version_info + + @property + def run_db_version_info(self): + return self.__run_db_version_info @property def version_file(self): diff --git a/libcodechecker/libclient/authentication_helper.py b/libcodechecker/libclient/authentication_helper.py index bc3f826c2d..dd73b03e0f 100644 --- a/libcodechecker/libclient/authentication_helper.py +++ b/libcodechecker/libclient/authentication_helper.py @@ -50,11 +50,13 @@ def wrapper(self, *args, **kwargs): if reqfailure.error_code == shared.ttypes.ErrorCode.DATABASE: print('Database error on server') print(str(reqfailure.message)) - if reqfailure.error_code ==\ + elif reqfailure.error_code ==\ shared.ttypes.ErrorCode.AUTH_DENIED: + print('Authentication denied.') raise reqfailure - if reqfailure.error_code ==\ + elif reqfailure.error_code ==\ shared.ttypes.ErrorCode.UNAUTHORIZED: + print('Unauthorised.') raise reqfailure else: print('Other error') diff --git a/libcodechecker/libclient/client.py b/libcodechecker/libclient/client.py index 017b988386..1b627c93b7 100644 --- a/libcodechecker/libclient/client.py +++ b/libcodechecker/libclient/client.py @@ -7,20 +7,21 @@ import getpass import sys -from Authentication import ttypes as AuthTypes - from thrift.Thrift import TApplicationException -from . import thrift_helper -from . import authentication_helper +import shared +from Authentication import ttypes as AuthTypes from libcodechecker import session_manager from libcodechecker.logger import LoggerFactory +from libcodechecker.util import split_product_url -import shared +from . import authentication_helper +from . import thrift_helper +from . import product_helper LOG = LoggerFactory.get_new_logger('CLIENT') -SUPPORTED_VERSION = '6.0' +SUPPORTED_API_VERSION = '6.0' def check_api_version(client): @@ -29,7 +30,7 @@ def check_api_version(client): """ version = client.getAPIVersion() - supp_major_version = SUPPORTED_VERSION.split('.')[0] + supp_major_version = SUPPORTED_API_VERSION.split('.')[0] api_major_version = version.split('.')[0] # There is NO compatibility between major versions. @@ -37,11 +38,8 @@ def check_api_version(client): def handle_auth(host, port, username, login=False): - session = session_manager.SessionManager_Client() - auth_token = session.getToken(host, port) - auth_client = authentication_helper.ThriftAuthHelper(host, port, '/Authentication', @@ -64,9 +62,6 @@ def handle_auth(host, port, username, login=False): if auth_token and handshake.sessionStillActive: LOG.info("You are already logged in.") return - else: - LOG.info("Server requires authentication to access. Please use " - "'CodeChecker cmd login' to authenticate.") except TApplicationException: LOG.info("This server does not support privileged access.") @@ -107,19 +102,12 @@ def handle_auth(host, port, username, login=False): sys.exit(1) -def setup_client(host, port, uri): - """ - Stup the thrift client and check API version and authentication needs. - """ - manager = session_manager.SessionManager_Client() - session_token = manager.getToken(host, port) - +def perform_auth_for_handler(manager, host, port, session_token): # Before actually communicating with the server, # we need to check authentication first. auth_client = authentication_helper.ThriftAuthHelper(host, port, - uri + - 'Authentication', + '/Authentication', session_token) try: auth_response = auth_client.getAuthParameters() @@ -143,6 +131,7 @@ def setup_client(host, port, uri): manager.saveToken(host, port, session_token) LOG.info("Authenticated using pre-configured " "credentials.") + return session_token except shared.ttypes.RequestFailed: print_err = True else: @@ -156,7 +145,92 @@ def setup_client(host, port, uri): "login'.") sys.exit(1) - client = thrift_helper.ThriftClientHelper(host, port, uri, session_token) + +def setup_product_client(host, port, product_name=None): + """ + Setup the Thrift client for the product management endpoint. + """ + + # Check if the user has authenticated. + manager = session_manager.SessionManager_Client() + session_token = manager.getToken(host, port) + session_token_new = perform_auth_for_handler(manager, host, + port, session_token) + if session_token_new: + session_token = session_token_new + + if not product_name: + # Attach to the server-wide product service. + product_client = product_helper.ThriftProductHelper( + host, port, "/Products", session_token) + else: + # Attach to the product service and provide a product name + # as "viewpoint" from which the product service is called. + product_client = product_helper.ThriftProductHelper( + host, port, "/" + product_name + "/Products", session_token) + + # However, in this case, the specified product might not be existing, + # which makes subsequent calls to this API crash (server sends + # HTTP 500 Internal Server Error error page). + if not product_client.getAPIVersion(): + LOG.error("The product '{0}' cannot be communicated with. It " + "either doesn't exist, or the server's configuration " + "is bogus.".format(product_name)) + sys.exit(1) + + if not check_api_version(product_client): + LOG.critical("The server uses a newer version of the API which is " + "incompatible with this client. Please update client.") + sys.exit(1) + + return product_client + + +def setup_client(product_url): + """ + Setup the Thrift client and check API version and authentication needs. + """ + + try: + host, port, product_name = split_product_url(product_url) + except: + LOG.error("Malformed product URL was provided.") + sys.exit(2) # 2 for argument error. + + # Check if the user has authenticated. + manager = session_manager.SessionManager_Client() + session_token = manager.getToken(host, port) + session_token_new = perform_auth_for_handler(manager, host, + port, session_token) + if session_token_new: + session_token = session_token_new + + # Check if the product exists. + product_client = setup_product_client(host, port, + product_name=None) + product = product_client.getProducts(product_name, None) + product_error_str = None + if not (product and len(product) == 1): + product_error_str = "It does not exist." + else: + if product[0].endpoint != product_name: + # Only a "substring" match was found. We explicitly reject it + # on the command-line! + product_error_str = "It does not exist." + elif not product[0].connected: + product_error_str = "The database has issues, or the connection " \ + "is badly configured." + elif not product[0].accessible: + product_error_str = "You do not have access." + + if product_error_str: + LOG.error("The given product '{0}' can not be used! {1}" + .format(product_name, product_error_str)) + sys.exit(1) + + client = thrift_helper.ThriftClientHelper( + host, port, "/" + product_name + "/CodeCheckerService", session_token) + # Test if client can work with the server's API. if not check_api_version(client): LOG.critical("The server uses a newer version of the API which is " diff --git a/libcodechecker/libclient/product_helper.py b/libcodechecker/libclient/product_helper.py new file mode 100644 index 0000000000..0308276766 --- /dev/null +++ b/libcodechecker/libclient/product_helper.py @@ -0,0 +1,105 @@ +# ------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ------------------------------------------------------------------------- + +import os +# import datetime +import socket + +from thrift.transport import THttpClient +from thrift.protocol import TJSONProtocol +from thrift.protocol.TProtocol import TProtocolException + +import shared +from ProductManagement import codeCheckerProductService + +from libcodechecker import session_manager + + +class ThriftProductHelper(object): + def __init__(self, host, port, uri, session_token=None): + self.__host = host + self.__port = port + self.transport = THttpClient.THttpClient(self.__host, self.__port, uri) + self.protocol = TJSONProtocol.TJSONProtocol(self.transport) + self.client = codeCheckerProductService.Client(self.protocol) + + if session_token: + headers = {'Cookie': session_manager.SESSION_COOKIE_NAME + + "=" + session_token} + self.transport.setCustomHeaders(headers) + + # ------------------------------------------------------------ + + def ThriftClientCall(function): + # print type(function) + funcName = function.__name__ + + def wrapper(self, *args, **kwargs): + # print('['+host+':'+str(port)+'] >>>>> ['+funcName+']') + # before = datetime.datetime.now() + self.transport.open() + func = getattr(self.client, funcName) + try: + res = func(*args, **kwargs) + return res + except shared.ttypes.RequestFailed as reqfailure: + if reqfailure.error_code == shared.ttypes.ErrorCode.DATABASE: + print('Database error on server') + print(str(reqfailure.message)) + elif reqfailure.error_code ==\ + shared.ttypes.ErrorCode.AUTH_DENIED: + print('Authentication denied.') + raise reqfailure + elif reqfailure.error_code ==\ + shared.ttypes.ErrorCode.UNAUTHORIZED: + print('Unauthorised.') + raise reqfailure + else: + print('Other error') + print(str(reqfailure)) + except TProtocolException as ex: + print("Connection failed to {0}:{1}" + .format(self.__host, self.__port)) + except socket.error as serr: + errCause = os.strerror(serr.errno) + print(errCause) + print(str(serr)) + finally: + # after = datetime.datetime.now() + # timediff = after - before + # diff = timediff.microseconds/1000 + # print('['+str(diff)+'ms] <<<<< ['+host+':'+str(port)+']') + # print res + self.transport.close() + + return wrapper + + # ----------------------------------------------------------------------- + @ThriftClientCall + def getAPIVersion(self): + pass + + @ThriftClientCall + def getPackageVersion(self): + pass + + # ----------------------------------------------------------------------- + @ThriftClientCall + def getProducts(self, product_endpoint_filter, product_name_filter): + pass + + @ThriftClientCall + def getCurrentProduct(self): + pass + + # ----------------------------------------------------------------------- + @ThriftClientCall + def addProduct(self, product): + pass + + @ThriftClientCall + def removeProduct(self, product_id): + pass diff --git a/libcodechecker/libclient/thrift_helper.py b/libcodechecker/libclient/thrift_helper.py index bc9f995a3b..c33da14396 100644 --- a/libcodechecker/libclient/thrift_helper.py +++ b/libcodechecker/libclient/thrift_helper.py @@ -44,16 +44,15 @@ def wrapper(self, *args, **kwargs): try: res = func(*args, **kwargs) return res - except shared.ttypes.RequestFailed as reqfailure: if reqfailure.error_code == shared.ttypes.ErrorCode.DATABASE: print('Database error on server') print(str(reqfailure.message)) - if reqfailure.error_code ==\ + elif reqfailure.error_code ==\ shared.ttypes.ErrorCode.AUTH_DENIED: print('Authentication denied') print(str(reqfailure.message)) - if reqfailure.error_code ==\ + elif reqfailure.error_code ==\ shared.ttypes.ErrorCode.UNAUTHORIZED: print('Unauthorized to access') print(str(reqfailure.message)) @@ -72,8 +71,8 @@ def wrapper(self, *args, **kwargs): print(errCause) print(str(serr)) print("Check if your CodeChecker server is running.") - - self.transport.close() + finally: + self.transport.close() return wrapper diff --git a/libcodechecker/libhandlers/check.py b/libcodechecker/libhandlers/check.py index e1c407227e..bf901662b4 100644 --- a/libcodechecker/libhandlers/check.py +++ b/libcodechecker/libhandlers/check.py @@ -5,8 +5,8 @@ # ------------------------------------------------------------------------- """ Check implements a wrapper over 'log' + 'analyze' + 'store', essentially -giving an easy way to perform analysis from a log command and print results to -stdout. +giving an easy way to perform analysis from a log command and push them to a +remove CodeChecker server. """ import argparse @@ -18,6 +18,7 @@ from libcodechecker.analyze.analyzers import analyzer_types from libcodechecker.logger import add_verbose_arguments from libcodechecker.logger import LoggerFactory +from libcodechecker.util import split_product_url LOG = LoggerFactory.get_new_logger('CHECK') @@ -309,23 +310,18 @@ def add_arguments_to_parser(parser): server_args = parser.add_argument_group( "server arguments", "Specifies a 'CodeChecker server' instance which will be used to " - "store the results. This server must be running and listening prior " - "to the 'store' command being ran.") + "store the results. This server must be running and listening, and " + "the given product must exist prior to the 'check' command being ran.") - server_args.add_argument('--host', + server_args.add_argument('--url', type=str, - dest="host", + metavar='PRODUCT_URL', + dest="product_url", + default="localhost:8001/Default", required=False, - default="localhost", - help="The IP address or hostname of the " - "CodeChecker server.") - - server_args.add_argument('-p', '--port', - type=int, - dest="port", - required=False, - default=8001, - help="The port of the server to use for storing.") + help="The URL of the product to store the " + "results for, in the format of " + "host:port/ProductName.") # TODO: These arguments have been retroactively removed from 'store' # and are deprecated here. They should be completely removed. @@ -489,8 +485,7 @@ def __update_if_key_exists(source, target, key): input=[report_dir], input_format='plist', force=args.force, - host=args.host, - port=args.port + product_url=args.product_url ) # Some arguments don't have default values. # We can't set these keys to None because it would result in an error @@ -507,9 +502,11 @@ def __update_if_key_exists(source, target, key): store_module.main(store_args) # Show a hint for server start. + host, port, product_url = split_product_url(args.product_url) LOG.info("To view results, open the CodeChecker server " - "'http://{0}:{1}' in your browser".format(args.host, - args.port)) + "'http://{0}:{1}/{2}' in your browser".format(host, + port, + product_url)) except ImportError: LOG.error("Check failed: couldn't import a library.") except Exception as ex: diff --git a/libcodechecker/libhandlers/cmd.py b/libcodechecker/libhandlers/cmd.py index 896c1e6363..d7363bb804 100644 --- a/libcodechecker/libhandlers/cmd.py +++ b/libcodechecker/libhandlers/cmd.py @@ -11,9 +11,11 @@ import argparse import getpass import datetime +import sys from libcodechecker import output_formatters from libcodechecker.cmd import cmd_line_client +from libcodechecker.cmd import product_client from libcodechecker.logger import add_verbose_arguments from libcodechecker.logger import LoggerFactory @@ -60,27 +62,46 @@ def get_argparser_ctor_args(): } -def __add_common_arguments(parser, has_matrix_output=False): +def __add_common_arguments(parser, + needs_product_url=True, + has_matrix_output=False): """ Add some common arguments, like server address and verbosity, to parser. """ common_group = parser.add_argument_group('common arguments') - common_group.add_argument('--host', - type=str, - dest="host", - default="localhost", - required=False, - help="The address of the CodeChecker viewer " - "server to connect to.") - - common_group.add_argument('-p', '--port', - type=int, - dest="port", - default=8001, - required=False, - help="The port the server is running on.") + if needs_product_url is None: + # Explicitly not add anything, the command does not connect to a + # server. + pass + elif needs_product_url: + # Command connects to a product on a server. + common_group.add_argument('--url', + type=str, + metavar='PRODUCT_URL', + dest="product_url", + default="localhost:8001/Default", + required=False, + help="The URL of the product to store the " + "results for, in the format of " + "host:port/ProductName.") + else: + # Command connects to a server directly. + common_group.add_argument('--host', + type=str, + dest="host", + default="localhost", + required=False, + help="The address of the CodeChecker " + "server to connect to.") + + common_group.add_argument('-p', '--port', + type=int, + dest="port", + default=8001, + required=False, + help="The port the server is running on.") if has_matrix_output: common_group.add_argument('-o', '--output', @@ -309,7 +330,227 @@ def __register_suppress(parser): "the database.") -def __register_auth(parser): +def __register_products(parser): + """ + Add argparse subcommand parser for the "product management" action. + """ + + def __register_add(parser): + """ + Add argparse subcommand parser for the "add new product" action. + """ + parser.add_argument("product_name", + type=str, + metavar='PRODUCT_NAME', + default=argparse.SUPPRESS, + help="The URL endpoint where clients can access " + "the analysis results for this product.") + + parser.add_argument('-n', '--name', + type=str, + dest="display_name", + default=argparse.SUPPRESS, + required=False, + help="A custom display name for the product, " + "which will be shown in the viewer. This " + "is purely for decoration and user " + "experience, program calls use the " + "PRODUCT_NAME.") + + parser.add_argument('--description', + type=str, + dest="description", + default=argparse.SUPPRESS, + required=False, + help="A custom textual description to be shown " + "alongside the product.") + + dbmodes = parser.add_argument_group( + "database arguments", + "NOTE: These database arguments are relative to the server " + "machine, as it is the server which will make the database " + "connection.") + + dbmodes = dbmodes.add_mutually_exclusive_group(required=False) + + SQLITE_PRODUCT_NAME_DEFAULT_VAR = '.sqlite' + dbmodes.add_argument('--sqlite', + type=str, + dest="sqlite", + metavar='SQLITE_FILE', + default=SQLITE_PRODUCT_NAME_DEFAULT_VAR, + required=False, + help="Path of the SQLite database file to use. " + "Not absolute paths will be relative to " + "the server's .") + + dbmodes.add_argument('--postgresql', + dest="postgresql", + action='store_true', + required=False, + default=argparse.SUPPRESS, + help="Specifies that a PostgreSQL database is " + "to be used instead of SQLite. See the " + "\"PostgreSQL arguments\" section on how " + "to configure the database connection.") + + PGSQL_PRODUCT_NAME_DEFAULT_VAR = '' + pgsql = parser.add_argument_group( + "PostgreSQL arguments", + "Values of these arguments are ignored, unless '--postgresql' is " + "specified! The database specified here must exist, and be " + "connectible by the server.") + + # TODO: --dbSOMETHING arguments are kept to not break interface from + # old command. Database using commands such as "CodeChecker store" no + # longer supports these --- it would be ideal to break and remove args + # with this style and only keep --db-SOMETHING. + pgsql.add_argument('--dbaddress', '--db-host', + type=str, + dest="dbaddress", + default="localhost", + required=False, + help="Database server address.") + + pgsql.add_argument('--dbport', '--db-port', + type=int, + dest="dbport", + default=5432, + required=False, + help="Database server port.") + + pgsql.add_argument('--dbusername', '--db-username', + type=str, + dest="dbusername", + default=PGSQL_PRODUCT_NAME_DEFAULT_VAR, + required=False, + help="Username to use for connection.") + + pgsql.add_argument('--dbpassword', '--db-password', + type=str, + dest="dbpassword", + default="", + required=False, + help="Password to use for authenticating the " + "connection.") + + pgsql.add_argument('--dbname', '--db-name', + type=str, + dest="dbname", + default=PGSQL_PRODUCT_NAME_DEFAULT_VAR, + required=False, + help="Name of the database to use.") + + def __handle(args): + """Custom handler for 'add' so custom error messages can be + printed without having to capture 'parser' in main.""" + + def arg_match(options): + """Checks and selects the option string specified in 'options' + that are present in the invocation argv.""" + matched_args = [] + for option in options: + if any([arg if option.startswith(arg) else None + for arg in sys.argv[1:]]): + matched_args.append(option) + continue + + return matched_args + + # See if there is a "PostgreSQL argument" specified in the + # invocation without '--postgresql' being there. There is no way + # to distinguish a default argument and a deliberately specified + # argument without inspecting sys.argv. + options = ['--dbaddress', '--dbport', '--dbusername', '--dbname', + '--db-host', '--db-port', '--db-username', '--db-name'] + psql_args_matching = arg_match(options) + if any(psql_args_matching) and \ + 'postgresql' not in args: + first_matching_arg = next(iter([match for match + in psql_args_matching])) + parser.error("argument {0}: not allowed without argument " + "--postgresql".format(first_matching_arg)) + # parser.error() terminates with return code 2. + + # Some arguments get a dynamic default value that depends on the + # value of another argument. + if args.sqlite == SQLITE_PRODUCT_NAME_DEFAULT_VAR: + args.sqlite = args.product_name + '.sqlite' + + if args.dbusername == PGSQL_PRODUCT_NAME_DEFAULT_VAR: + args.dbusername = args.product_name + + if args.dbname == PGSQL_PRODUCT_NAME_DEFAULT_VAR: + args.dbname = args.product_name + + if 'postgresql' not in args: + # The --db-SOMETHING arguments are irrelevant if --postgresql + # is not used. + delattr(args, 'dbaddress') + delattr(args, 'dbport') + delattr(args, 'dbusername') + delattr(args, 'dbpassword') + delattr(args, 'dbname') + else: + # If --postgresql is given, --sqlite is useless. + delattr(args, 'sqlite') + + # If everything is fine, do call the handler for the subcommand. + product_client.handle_add_product(args) + + parser.set_defaults(func=__handle) + + def __register_del(parser): + """ + Add argparse subcommand parser for the "delete product" action. + """ + parser.add_argument("product_name", + type=str, + metavar='PRODUCT_NAME', + default=argparse.SUPPRESS, + help="The URL endpoint where clients can access " + "the analysis results for the product.") + + subcommands = parser.add_subparsers(title='available actions') + + # Create handlers for individual subcommands. + list_p = subcommands.add_parser( + 'list', + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + description="List the name and basic information about products " + "added to the server. The 'S' status column of the " + "product is 'E' if the product database is errorneous, " + "and 'L' if you don't have access to the product.", + help="List products available on the server.") + list_p.set_defaults(func=product_client.handle_list_products) + __add_common_arguments(list_p, + needs_product_url=False, has_matrix_output=True) + + add = subcommands.add_parser( + 'add', + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + description="Create a new product to be managed by the server by " + "providing the product's details and database connection.", + help="Register a new product to the server.") + __register_add(add) + __add_common_arguments(add, needs_product_url=False) + + del_p = subcommands.add_parser( + 'del', + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + description="Removes the specified product from the list of products " + "managed by the server. NOTE: This only removes the " + "association and disconnects the server from the " + "database -- NO actual ANALYSIS DATA is REMOVED. " + "Configuration, such as access control, however, WILL BE " + "LOST!", + help="Delete a product from the server's products.") + __register_del(del_p) + del_p.set_defaults(func=product_client.handle_del_product) + __add_common_arguments(del_p, needs_product_url=False) + + +def __register_login(parser): """ Add argparse subcommand parser for the "handle authentication" action. """ @@ -396,6 +637,23 @@ def add_arguments_to_parser(parser): suppress.set_defaults(func=cmd_line_client.handle_suppress) __add_common_arguments(suppress) + products = subcommands.add_parser( + 'products', + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + description="CodeChecker organises its databases into products. " + "Each product has an individually configured database " + "which stores the analysis results. These subcommands " + "are used to manage the products configured by the " + "server. Please see the individual subcommands for " + "details.", + epilog="Most of these commands require authentication and " + "appropriate access rights. Please see 'CodeChecker cmd " + "login' to authenticate.", + help="Access subcommands related to configuring the products managed " + "by a CodeChecker server.") + __register_products(products) + __add_common_arguments(products, needs_product_url=None) + login = subcommands.add_parser( 'login', formatter_class=argparse.ArgumentDefaultsHelpFormatter, @@ -405,9 +663,9 @@ def add_arguments_to_parser(parser): "action is used to perform an authentication in the " "command-line.", help="Authenticate into CodeChecker servers that require privileges.") - __register_auth(login) + __register_login(login) login.set_defaults(func=cmd_line_client.handle_login) - __add_common_arguments(login) + __add_common_arguments(login, needs_product_url=False) # 'cmd' does not have a main() method in itself, as individual subcommands are # handled later on separately. diff --git a/libcodechecker/libhandlers/plist.py b/libcodechecker/libhandlers/plist.py index 5d1cf67163..9a7e577607 100644 --- a/libcodechecker/libhandlers/plist.py +++ b/libcodechecker/libhandlers/plist.py @@ -8,6 +8,8 @@ functionality of 'store' and 'parse'. """ +# TODO: This command should be removed as a whole. + import argparse import os import sys diff --git a/libcodechecker/libhandlers/server.py b/libcodechecker/libhandlers/server.py index e024635dd5..a6523f24a9 100644 --- a/libcodechecker/libhandlers/server.py +++ b/libcodechecker/libhandlers/server.py @@ -22,11 +22,16 @@ from libcodechecker import session_manager from libcodechecker import util from libcodechecker.analyze import analyzer_env -from libcodechecker.database_handler import SQLServer -from libcodechecker.logger import add_verbose_arguments from libcodechecker.logger import LoggerFactory +from libcodechecker.logger import add_verbose_arguments from libcodechecker.server import client_db_access_server +from libcodechecker.server import database_handler from libcodechecker.server import instance_manager +from libcodechecker.server.config_db_model \ + import IDENTIFIER as CONFIG_META +from libcodechecker.server.run_db_model \ + import IDENTIFIER as RUN_META + LOG = LoggerFactory.get_new_logger('SERVER') @@ -111,35 +116,7 @@ def add_arguments_to_parser(parser): "can access the server over the Internet. " "(Equivalent to specifying '--host \"\"'.)") - # TODO: Refactor the tests so that these arguments can be eliminated. - # These values have absolutely no use outside what the automated tests - # specify. - checksrv = parser.add_argument_group( - "analysis result storage server", - "(These options have no actual use apart from an internal usage in " - "the automated testing of CodeChecker features.)") - - checksrv.add_argument('--check-address', - type=str, - dest="check_address", - default="localhost", - required=False, - help="Set on which IP address or hostname the " - "analysis result server should listen.") - - checksrv.add_argument('--check-port', - type=int, - dest="check_port", - default=argparse.SUPPRESS, - required=None, - help="Also start an analysis result storage server " - "alongside the \"viewer\". This server could " - "be used by remote clients to store new " - "analysis results to the database the viewer " - "server is using. Set on which port the " - "analysis result server should listen.") - - dbmodes = parser.add_argument_group("database arguments") + dbmodes = parser.add_argument_group("configuration database arguments") dbmodes = dbmodes.add_mutually_exclusive_group(required=False) @@ -148,8 +125,8 @@ def add_arguments_to_parser(parser): dest="sqlite", metavar='SQLITE_FILE', default=os.path.join( - util.get_default_workspace(), - "codechecker.sqlite"), + '', + "config.sqlite"), required=False, help="Path of the SQLite database file to use.") @@ -167,8 +144,6 @@ def add_arguments_to_parser(parser): "Values of these arguments are ignored, " "unless '--postgresql' is specified!") - # WARNING: '--dbaddress' default value influences workspace creation - # in SQLite. # TODO: --dbSOMETHING arguments are kept to not break interface from # old command. Database using commands such as "CodeChecker store" no # longer supports these --- it would be ideal to break and remove args @@ -197,7 +172,7 @@ def add_arguments_to_parser(parser): pgsql.add_argument('--dbname', '--db-name', type=str, dest="dbname", - default="codechecker", + default="config", required=False, help="Name of the database to use.") @@ -299,18 +274,23 @@ def arg_match(options): parser.error("argument --config-directory: not allowed with " "argument --workspace") - # If workspace is specified, sqlite is workspace/codechecker.sqlite + # If workspace is specified, sqlite is workspace/config.sqlite # and config_directory is the workspace directory. if len(arg_match(['--workspace', '-w'])) > 0: args.config_directory = args.workspace args.sqlite = os.path.join(args.workspace, - 'codechecker.sqlite') + 'config.sqlite') setattr(args, 'dbdatadir', os.path.join(args.workspace, 'pgsql_data')) # Workspace should not exist as a Namespace key. delattr(args, 'workspace') + if '' in args.sqlite: + # Replace the placeholder variable with the actual value. + args.sqlite = args.sqlite.replace('', + args.config_directory) + if 'postgresql' not in args: # Later called database modules need the argument to be actually # present, even though the default is suppressed in the optstring. @@ -421,17 +401,43 @@ def main(args): check_env = analyzer_env.get_check_env(context.path_env_extra, context.ld_lib_path_extra) - sql_server = SQLServer.from_cmdline_args(args, - context.migration_root, - check_env) + # Create the main database link from the arguments passed over the + # command line. + default_product_path = os.path.join(args.config_directory, + 'Default.sqlite') + create_default_product = 'sqlite' in args and \ + not os.path.exists(args.sqlite) and \ + not os.path.exists(default_product_path) - LOG.debug("Starting database server.") - sql_server.start(context.db_version_info, wait_for_start=True, - init=True) + sql_server = database_handler.SQLServer.from_cmdline_args( + vars(args), CONFIG_META, context.config_migration_root, + interactive=True, env=check_env) - # Start database viewer. - db_connection_string = sql_server.get_connection_string() + LOG.debug("Connecting to product configuration database.") + sql_server.connect(context.product_db_version_info, init=True) + + if create_default_product: + # Create a default product and add it to the configuration database. + LOG.debug("Create default product...") + LOG.debug("Configuring schema and migration...") + + prod_server = database_handler.SQLiteDatabase( + default_product_path, RUN_META, + context.run_migration_root, check_env) + prod_server.connect(context.run_db_version_info, init=True) + + LOG.debug("Connecting database engine for default product") + product_conn_string = prod_server.get_connection_string() + LOG.debug("Default database created and connected.") + + client_db_access_server.add_initial_run_database( + sql_server, product_conn_string) + + LOG.info("Product 'Default' at '{0}' created and set up." + .format(default_product_path)) + + # Start database viewer. checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') @@ -445,12 +451,14 @@ def main(args): 'version': context.package_git_tag} try: - client_db_access_server.start_server(package_data, + client_db_access_server.start_server(args.config_directory, + package_data, args.view_port, - db_connection_string, + sql_server, suppress_handler, args.listen_address, - context) + context, + check_env) except socket.error as err: if err.errno == errno.EADDRINUSE: LOG.error("Server can't be started, maybe the given port number " diff --git a/libcodechecker/libhandlers/store.py b/libcodechecker/libhandlers/store.py index 17a71ca242..71f5d1cd71 100644 --- a/libcodechecker/libhandlers/store.py +++ b/libcodechecker/libhandlers/store.py @@ -29,6 +29,7 @@ from libcodechecker.libclient.client import setup_client from libcodechecker.logger import add_verbose_arguments from libcodechecker.logger import LoggerFactory +from libcodechecker.util import split_product_url LOG = LoggerFactory.get_new_logger('STORE') @@ -134,23 +135,18 @@ def add_arguments_to_parser(parser): server_args = parser.add_argument_group( "server arguments", "Specifies a 'CodeChecker server' instance which will be used to " - "store the results. This server must be running and listening prior " - "to the 'store' command being ran.") + "store the results. This server must be running and listening, and " + "the given product must exist prior to the 'store' command being ran.") - server_args.add_argument('--host', + server_args.add_argument('--url', type=str, - dest="host", + metavar='PRODUCT_URL', + dest="product_url", + default="localhost:8001/Default", required=False, - default="localhost", - help="The IP address or hostname of the " - "CodeChecker server.") - - server_args.add_argument('-p', '--port', - type=int, - dest="port", - required=False, - default=8001, - help="The port of the server to use for storing.") + help="The URL of the product to store the " + "results for, in the format of " + "host:port/ProductName.") add_verbose_arguments(parser) parser.set_defaults(func=main) @@ -316,11 +312,12 @@ def main(args): LOG.info("argument --force was specified: the run with name '" + args.name + "' will be deleted.") - # setup connection to the remote server - client = setup_client(args.host, args.port, '/') + # Setup connection to the remote server. + client = setup_client(args.product_url) - LOG.debug("Initializing client connecting to " + - str(args.host) + ":" + str(args.port) + " done.") + host, port, product_name = split_product_url(args.product_url) + LOG.debug("Initializing client connecting to {0}:{1}/{2} done." + .format(host, port, product_name)) _, zip_file = tempfile.mkstemp('.zip') LOG.debug("Will write mass store ZIP to '{0}'...".format(zip_file)) diff --git a/libcodechecker/libhandlers/version.py b/libcodechecker/libhandlers/version.py index 206911704e..aa4b9af6d9 100644 --- a/libcodechecker/libhandlers/version.py +++ b/libcodechecker/libhandlers/version.py @@ -10,7 +10,7 @@ import argparse import json -from codeCheckerDBAccess import constants +from shared import constants from libcodechecker import generic_package_context from libcodechecker import output_formatters @@ -66,7 +66,8 @@ def main(args): ("Package build date", context.package_build_date), ("Git commit ID (hash)", context.package_git_hash), ("Git tag information", context.package_git_tag), - ("Database schema version", str(context.db_version_info)), + ("Configuration schema version", str(context.product_db_version_info)), + ("Database schema version", str(context.run_db_version_info)), ("Client API version (Thrift)", constants.API_VERSION) ] diff --git a/libcodechecker/server/client_db_access_handler.py b/libcodechecker/server/client_db_access_handler.py index 8524c03a34..c8bfdfe892 100644 --- a/libcodechecker/server/client_db_access_handler.py +++ b/libcodechecker/server/client_db_access_handler.py @@ -35,10 +35,11 @@ from libcodechecker.analyze import plist_parser from libcodechecker.analyze import store_handler from libcodechecker.logger import LoggerFactory -from libcodechecker.orm_model import * from libcodechecker.profiler import timeit -LOG = LoggerFactory.get_new_logger('ACCESS HANDLER') +from run_db_model import * + +LOG = LoggerFactory.get_new_logger('RUN ACCESS HANDLER') def conv(text): @@ -1847,7 +1848,7 @@ def __get_hashes_for_runs(self, session, base_run_ids, new_run_ids): @timeit def getAPIVersion(self): # Returns the thrift api version. - return constants.API_VERSION + return shared.constants.API_VERSION # ----------------------------------------------------------------------- @timeit diff --git a/libcodechecker/server/client_db_access_server.py b/libcodechecker/server/client_db_access_server.py index ba9367b395..6e4679400d 100644 --- a/libcodechecker/server/client_db_access_server.py +++ b/libcodechecker/server/client_db_access_server.py @@ -9,12 +9,14 @@ """ import atexit import base64 +import datetime import errno from multiprocessing.pool import ThreadPool import os import posixpath import socket import urllib +import urlparse try: from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler @@ -24,24 +26,43 @@ SimpleHTTPRequestHandler from sqlalchemy.orm import sessionmaker -from thrift.transport import TTransport from thrift.protocol import TJSONProtocol +from thrift.transport import TTransport from Authentication import codeCheckerAuthentication -from Authentication.ttypes import * from codeCheckerDBAccess import codeCheckerDBAccess -from codeCheckerDBAccess.ttypes import * +from ProductManagement import codeCheckerProductService -from libcodechecker import database_handler from libcodechecker import session_manager from libcodechecker.logger import LoggerFactory +from . import database_handler from . import instance_manager from client_auth_handler import ThriftAuthHandler from client_db_access_handler import ThriftRequestHandler +from config_db_model import Product as ORMProduct +from product_db_access_handler import ThriftProductHandler +from run_db_model import IDENTIFIER as RUN_META LOG = LoggerFactory.get_new_logger('DB ACCESS') +# A list of top-level path elements under the webserver root +# which should not be considered as a product route. +NON_PRODUCT_NAMES = ['products.html', + 'index.html', + 'fonts', + 'images', + 'scripts', + 'style' + ] + +# A list of top-level path elements in requests (such as Thrift endpoints) +# which should not be considered as a product route. +NON_PRODUCT_NAMES += ['Authentication', + 'Products', + 'CodeCheckerService' + ] + class RequestHandler(SimpleHTTPRequestHandler): """ @@ -50,21 +71,16 @@ class RequestHandler(SimpleHTTPRequestHandler): """ def __init__(self, request, client_address, server): - self.Session = server.Session - - self.db_version_info = server.db_version_info - self.manager = server.manager - BaseHTTPRequestHandler.__init__(self, request, client_address, server) def log_message(self, msg_format, *args): - """ Silenting http server. """ + """ Silencing http server. """ return - def check_auth_in_request(self): + def __check_auth_in_request(self): """ Wrapper to handle authentication needs from both GET and POST requests. Returns a session object if correct cookie is presented or creates a @@ -72,7 +88,7 @@ def check_auth_in_request(self): present. """ - if not self.manager.isEnabled(): + if not self.server.manager.isEnabled(): return None success = None @@ -90,9 +106,10 @@ def check_auth_in_request(self): values = cookie.split("=") if len(values) == 2 and \ values[0] == session_manager.SESSION_COOKIE_NAME: - if self.manager.is_valid(values[1], True): + if self.server.manager.is_valid(values[1], True): # The session cookie contains valid data. - success = self.manager.get_session(values[1], True) + success = self.server.manager.get_session(values[1], + True) if success is None: # Session cookie was invalid (or not found...) @@ -106,7 +123,8 @@ def check_auth_in_request(self): self.headers.getheader("Authorization"). replace("Basic ", "")) - session = self.manager.create_or_get_session(authString) + session = self.server.manager.create_or_get_session( + authString) if session: LOG.info("Client from " + client_host + ":" + str(client_port) + @@ -123,31 +141,122 @@ def check_auth_in_request(self): return success + def __get_product_name(self): + """ + Get product name from the request's URI. + """ + + # A standard request from a browser looks like: + # http://localhost:8001/[product-name]/#{request-parts} + # where the parts are, e.g.: run=[run_id]&report=[report_id] + # + # Rewrite the "product-name" so that the web-server deploys the + # viewer client from the www/ folder. + + # The split array looks like ['', 'product-name', ...]. + first_part = urlparse.urlparse(self.path).path.split('/', 2)[1] + return first_part if first_part not in NON_PRODUCT_NAMES else None + def do_GET(self): - auth_session = self.check_auth_in_request() - if not self.manager.isEnabled() or auth_session: - self.send_response(200) - if auth_session: - self.send_header("Set-Cookie", - session_manager.SESSION_COOKIE_NAME + "=" + - auth_session.token + "; Path=/") - SimpleHTTPRequestHandler.do_GET(self) - else: - self.send_response(401) - self.send_header("WWW-Authenticate", 'Basic realm="' + - self.manager.getRealm()["realm"] + '"') + """ + Handles the webbrowser access (GET requests). + """ + + LOG.info("{0}:{1} -- GET {2}".format(self.client_address[0], + str(self.client_address[1]), + self.path)) + + auth_session = self.__check_auth_in_request() + if self.server.manager.isEnabled() and not auth_session: + realm = self.server.manager.getRealm()["realm"] + error_body = self.server.manager.getRealm()["error"] + + self.send_response(401) # 401 Unauthorised + self.send_header("WWW-Authenticate", + 'Basic realm="{0}"'.format(realm)) self.send_header("Content-type", "text/plain") - self.send_header("Content-length", str(len( - self.manager.getRealm()["error"]))) + self.send_header("Content-length", str(len(error_body))) self.send_header('Connection', 'close') self.end_headers() - self.wfile.write(self.manager.getRealm()["error"]) + self.wfile.write(error_body) + return + else: + product_name = self.__get_product_name() + + if product_name is not None and product_name != '': + if not self.server.get_product(product_name): + LOG.info("Product named '{0}' does not exist." + .format(product_name)) + self.send_error( + 404, + "The product {0} does not exist.".format(product_name)) + return + + if product_name + '/' not in self.path: + # /prod must be routed to /prod/index.html first, so later + # queries for web resources are '/prod/style...' as + # opposed to '/style...', which would result in "style" + # being considered product name. + LOG.debug("Redirecting user from /{0} to /{0}/index.html" + .format(product_name)) + + # WARN: Browsers cache '308 Permanent Redirect' responses, + # in the event of debugging this, use Private Browsing! + self.send_response(308) + self.send_header("Location", + self.path.replace(product_name, + product_name + '/', 1)) + self.end_headers() + return + else: + # Serves the main page and the resources: + # /prod/(index.html) -> /(index.html) + # /prod/styles/(...) -> /styles/(...) + LOG.debug("Product routing before " + self.path) + self.path = self.path.replace( + "{0}/".format(product_name), "", 1) + LOG.debug("Product routing after: " + self.path) + else: + if self.path in ['/', '/index.html']: + only_product = self.server.get_only_product() + if only_product and only_product.connected: + LOG.debug("Redirecting '/' to ONLY product '/{0}'" + .format(only_product.endpoint)) + + self.send_response(307) # 307 Temporary Redirect + self.send_header("Location", + '/{0}'.format(only_product.endpoint)) + self.end_headers() + return + + # Route homepage queries to serving the product list. + LOG.debug("Serving product list as homepage.") + self.path = '/products.html' + else: + # The path requested does not specify a product: it is most + # likely a resource file. + LOG.debug("Serving resource '{0}'".format(self.path)) + + self.send_response(200) # 200 OK + if auth_session is not None: + # Browsers get a standard cookie for session. + self.send_header( + "Set-Cookie", + "{0}={1}; Path=/".format( + session_manager.SESSION_COOKIE_NAME, + auth_session.token)) + + SimpleHTTPRequestHandler.do_GET(self) def do_POST(self): - """ Handling thrift messages. """ + """ + Handles POST queries, which are usually Thrift messages. + """ + client_host, client_port = self.client_address - LOG.debug("Processing request from {0}:{1}".format(client_host, - str(client_port))) + LOG.info("{0}:{1} -- POST {2}".format(client_host, + str(client_port), + self.path)) # Create new thrift handler. checker_md_docs = self.server.checker_md_docs @@ -160,7 +269,6 @@ def do_POST(self): output_protocol_factory = protocol_factory itrans = TTransport.TFileObjectTransport(self.rfile) - otrans = TTransport.TFileObjectTransport(self.wfile) itrans = TTransport.TBufferedTransport(itrans, int(self.headers[ 'Content-Length'])) @@ -169,8 +277,8 @@ def do_POST(self): iprot = input_protocol_factory.getProtocol(itrans) oprot = output_protocol_factory.getProtocol(otrans) - auth_session = self.check_auth_in_request() - if self.manager.isEnabled() and self.path != '/Authentication' \ + auth_session = self.__check_auth_in_request() + if self.server.manager.isEnabled() and self.path != '/Authentication' \ and not auth_session: # Bail out if the user is not authenticated... # This response has the possibility of melting down Thrift clients, @@ -179,37 +287,72 @@ def do_POST(self): LOG.debug(client_host + ":" + str(client_port) + " Invalid access, credentials not found " + "- session refused.") - self.send_response(401) - self.send_header("Content-type", "text/plain") - self.send_header("Content-length", str(0)) - self.end_headers() - + self.send_error(401) return # Authentication is handled, we may now respond to the user. try: - if self.path == '/Authentication': - # Authentication requests must be routed to a different - # handler. - - auth_handler = ThriftAuthHandler(self.manager, + product_name = self.__get_product_name() + request_endpoint = self.path.replace("/{0}".format(product_name), + "", 1) + + product = None + session_makers = {} + if product_name: + # The current request came through a product route, and not + # to the main endpoint. + product = self.server.get_product(product_name) + if not product.connected: + # If the product is not connected, try reconnecting... + LOG.debug("Request's product '{0}' is not connected! " + "Attempting reconnect...".format(product_name)) + product.connect() + + if not product.connected: + # If the reconnection fails, send an error to the user. + self.send_error( # 500 Internal Server Error + 500, "Product '{0}' database connection failed!" + .format(product_name)) + return + + session_makers['run_db'] = product.session_factory + + if request_endpoint == '/Authentication': + auth_handler = ThriftAuthHandler(self.server.manager, auth_session) processor = codeCheckerAuthentication.Processor(auth_handler) - else: - if auth_session: - LOG.debug("Accessing as user " + auth_session.user) - else: - LOG.debug("Unauthenticated access.") - - acc_handler = ThriftRequestHandler(self.Session, - auth_session, - checker_md_docs, - checker_md_docs_map, - suppress_handler, - self.db_version_info, - version) - + elif request_endpoint == '/Products': + # The product server needs its own endpoint to the + # configuration database. + session_makers['product_db'] = self.server.product_session + + prod_handler = ThriftProductHandler( + self.server, + session_makers['product_db'], + product, + version) + processor = codeCheckerProductService.Processor(prod_handler) + elif request_endpoint == '/CodeCheckerService': + # This endpoint is a product's report_server. + if not product: + self.send_error( # 404 Not Found + 404, "The specified product '{0}' does not exist!" + .format(product_name)) + return + + acc_handler = ThriftRequestHandler( + session_makers['run_db'], + auth_session, + checker_md_docs, + checker_md_docs_map, + suppress_handler, + self.server.context.run_db_version_info, + version) processor = codeCheckerDBAccess.Processor(acc_handler) + else: + self.send_error(404, # 404 Not Fount + "No endpoint named '{0}'.".format(self.path)) + return processor.process(iprot, oprot) result = otrans.getvalue() @@ -254,6 +397,129 @@ def translate_path(self, path): return path +class Product(object): + """ + Represents a product, which is a distinct storage of analysis reports in + a separate database (and database connection) with its own access control. + """ + + # The amount of SECONDS that need to pass after the last unsuccessful + # connect() call so the next could be made. + CONNECT_RETRY_TIMEOUT = 300 + + def __init__(self, orm_object, context, check_env): + """ + Set up a new managed product object for the configuration given. + """ + self.__orm_object = orm_object + self.__context = context + self.__check_env = check_env + self.__engine = None + self.__session = None + self.__connected = False + + self.__last_connect_attempt = None + + @property + def id(self): + return self.__orm_object.id + + @property + def endpoint(self): + """ + Returns the accessible URL endpoint of the product. + """ + return self.__orm_object.endpoint + + @property + def name(self): + """ + Returns the display name of the product. + """ + return self.__orm_object.display_name + + @property + def session_factory(self): + """ + Returns the session maker on this product's database engine which + can be used to initiate transactional connections. + """ + return self.__session + + @property + def connected(self): + """ + Returns whether the product has a valid connection to the managed + database. + """ + return self.__connected + + @property + def last_connection_failure(self): + """ + Returns the reason behind the last executed connection attempt's + failure. + """ + return self.__last_connect_attempt[1] if self.__last_connect_attempt \ + else None + + def connect(self): + """ + Initiates the actual connection to the database configured for the + product. + """ + if self.__connected: + return + + if self.__last_connect_attempt and \ + (datetime.datetime.now() - self.__last_connect_attempt[0]). \ + total_seconds() <= Product.CONNECT_RETRY_TIMEOUT: + return + + LOG.debug("Connecting database for product '{0}'". + format(self.__orm_object.endpoint)) + + # We need to connect to the database and perform setting up the + # schema. + LOG.debug("Configuring schema and migration...") + sql_server = database_handler.SQLServer.from_connection_string( + self.__orm_object.connection, + RUN_META, + self.__context.config_migration_root, + interactive=False, + env=self.__check_env) + + try: + sql_server.connect(self.__context.run_db_version_info, init=True) + + # Create the SQLAlchemy engine. + LOG.debug("Connecting database engine") + self.__engine = sql_server.create_engine() + self.__session = sessionmaker(bind=self.__engine) + + self.__connected = True + self.__last_connect_attempt = None + LOG.debug("Database connected.") + except Exception as ex: + LOG.error("The database for product '{0}' cannot be connected to." + .format(self.__orm_object.endpoint)) + LOG.error(ex.message) + self.__connected = False + self.__last_connect_attempt = (datetime.datetime.now(), ex.message) + + def teardown(self): + """ + Disposes the database connection to the product's backend. + """ + if not self.__connected: + return + + self.__engine.dispose() + + self.__session = None + self.__engine = None + + class CCSimpleHttpServer(HTTPServer): """ Simple http server to handle requests from the clients. @@ -264,29 +530,41 @@ class CCSimpleHttpServer(HTTPServer): def __init__(self, server_address, RequestHandlerClass, - db_conn_string, + config_directory, + product_db_sql_server, pckg_data, suppress_handler, - db_version_info, + context, + check_env, manager): LOG.debug("Initializing HTTP server...") + self.config_directory = config_directory self.www_root = pckg_data['www_root'] self.doc_root = pckg_data['doc_root'] self.checker_md_docs = pckg_data['checker_md_docs'] self.checker_md_docs_map = pckg_data['checker_md_docs_map'] self.version = pckg_data['version'] self.suppress_handler = suppress_handler - self.db_version_info = db_version_info - self.__engine = database_handler.SQLServer.create_engine( - db_conn_string) - - self.Session = sessionmaker(bind=self.__engine) + self.context = context + self.check_env = check_env self.manager = manager + self.__products = {} - self.__request_handlers = ThreadPool(processes=10) + # Create a database engine for the configuration database. + LOG.debug("Creating database engine for CONFIG DATABASE...") + self.__engine = product_db_sql_server.create_engine() + self.product_session = sessionmaker(bind=self.__engine) + + # Load the initial list of products and create the connections. + sess = self.product_session() + products = sess.query(ORMProduct).all() + for product in products: + self.add_product(product) + sess.close() + self.__request_handlers = ThreadPool(processes=10) try: HTTPServer.__init__(self, server_address, RequestHandlerClass, @@ -297,7 +575,7 @@ def __init__(self, def process_request_thread(self, request, client_address): try: - # Finish_request instatiates request handler class. + # Finish_request instantiates request handler class. self.finish_request(request, client_address) self.shutdown_request(request) except socket.error as serr: @@ -315,9 +593,48 @@ def process_request(self, request, client_address): self.__request_handlers.apply_async(self.process_request_thread, (request, client_address)) + def add_product(self, orm_product): + """ + Adds a product to the list of product databases connected to + by the server. + """ + if orm_product.endpoint in self.__products: + raise Exception("This product is already configured!") + + LOG.info("Setting up product '{0}'".format(orm_product.endpoint)) + conn = Product(orm_product, self.context, self.check_env) + self.__products[conn.endpoint] = conn + + conn.connect() + + def get_product(self, endpoint): + """ + Get the product connection object for the given endpoint, or None. + """ + return self.__products.get(endpoint, None) + + def get_only_product(self): + """ + Returns the Product object for the only product connected to by the + server, or None, if there are 0 or >= 2 products managed. + """ + return self.__products.items()[0][1] if len(self.__products) == 1 \ + else None + + def remove_product(self, endpoint): + product = self.get_product(endpoint) + if not product: + raise ValueError("The product with the given endpoint '{0}' does " + "not exist!".format(endpoint)) + + LOG.info("Disconnecting product '{0}'".format(endpoint)) + product.teardown() + + del self.__products[endpoint] -def start_server(package_data, port, db_conn_string, suppress_handler, - listen_address, context): + +def start_server(config_directory, package_data, port, db_conn_string, + suppress_handler, listen_address, context, check_env): """ Start http server to handle web client and thrift requests. """ @@ -330,10 +647,12 @@ def start_server(package_data, port, db_conn_string, suppress_handler, http_server = CCSimpleHttpServer(server_addr, RequestHandler, + config_directory, db_conn_string, package_data, suppress_handler, - context.db_version_info, + context, + check_env, session_manager.SessionManager()) try: @@ -361,3 +680,32 @@ def unregister_handler(pid): atexit.register(unregister_handler, os.getpid()) http_server.serve_forever() LOG.info("Webserver quit.") + + +def add_initial_run_database(config_sql_server, product_connection): + """ + Create a default run database as SQLite in the config directory, + and add it to the list of products in the config database specified by + db_conn_string. + """ + + # Connect to the configuration database + LOG.debug("Creating database engine for CONFIG DATABASE...") + __engine = config_sql_server.create_engine() + product_session = sessionmaker(bind=__engine) + + # Load the initial list of products and create the connections. + sess = product_session() + products = sess.query(ORMProduct).all() + if len(products) != 0: + raise ValueError("Called create_initial_run_database on non-empty " + "config database -- you shouldn't have done this!") + + LOG.debug("Adding default product to the config db...") + product = ORMProduct('Default', product_connection, 'Default', + "Default product created at server start.") + sess.add(product) + sess.commit() + sess.close() + + LOG.debug("Default product set up.") diff --git a/libcodechecker/server/config_db_model.py b/libcodechecker/server/config_db_model.py new file mode 100644 index 0000000000..3365496691 --- /dev/null +++ b/libcodechecker/server/config_db_model.py @@ -0,0 +1,93 @@ +# ------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ------------------------------------------------------------------------- +""" +SQLAlchemy ORM model for the product configuration database. +""" + +from __future__ import print_function +from __future__ import unicode_literals + +from sqlalchemy import * +from sqlalchemy.ext.declarative import declarative_base + +CC_META = MetaData(naming_convention={ + "ix": 'ix_%(column_0_label)s', + "uq": "uq_%(table_name)s_%(column_0_name)s", + "ck": "ck_%(table_name)s_%(column_0_name)s", + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_%(table_name)s" +}) + +# Create base class for ORM classes. +Base = declarative_base(metadata=CC_META) + + +class DBVersion(Base): + __tablename__ = 'db_version' + # TODO: constraint, only one line in this table! + major = Column(Integer, primary_key=True) + minor = Column(Integer, primary_key=True) + + def __init__(self, major, minor): + self.major = major + self.minor = minor + + +# class Superuser(Base): +# __tablename__ = 'superusers' +# +# id = Column(Integer, autoincrement=True, primary_key=True) +# name = Column(String, nullable=False) +# is_group = Column(Boolean, nullable=False) +# +# def __init__(self, name, is_group): +# self.name = name +# self.is_group = is_group + + +class Product(Base): + __tablename__ = 'products' + + __table_args__ = ( + UniqueConstraint('endpoint'), + ) + + id = Column(Integer, autoincrement=True, primary_key=True) + endpoint = Column(String, nullable=False) + connection = Column(String, nullable=False) + display_name = Column(String, nullable=False) + description = Column(Text) + + def __init__(self, endpoint, conn_str, name=None, description=None): + self.endpoint = endpoint + self.connection = conn_str + self.display_name = name if name else endpoint + self.description = description + + +# class ProductAdmin(Base): +# __tablename__ = 'product_admins' +# +# id = Column(Integer, autoincrement=True, primary_key=True) +# product_id = Column(Integer, +# ForeignKey('products.id', +# deferrable=False, +# initially='IMMEDIATE', +# ondelete='CASCADE'), +# nullable=False) +# name = Column(String, nullable=False) +# is_group = Column(Boolean, nullable=False) +# +# def __init__(self, product_id, name, is_group): +# self.product_id = product_id +# self.name = name +# self.is_group = is_group + +IDENTIFIER = { + 'identifier': "ConfigDatabase", + 'orm_meta': CC_META, + 'version_class': DBVersion +} diff --git a/libcodechecker/server/database_handler.py b/libcodechecker/server/database_handler.py new file mode 100644 index 0000000000..e467412536 --- /dev/null +++ b/libcodechecker/server/database_handler.py @@ -0,0 +1,439 @@ +# ------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ------------------------------------------------------------------------- +""" +Database connection handling to a database backend. +""" + +from abc import ABCMeta, abstractmethod +import os +import threading + +from alembic import command, config +from alembic.util import CommandError +import sqlalchemy +from sqlalchemy import event +from sqlalchemy.engine.url import URL, make_url +from sqlalchemy.orm import sessionmaker +from sqlalchemy.sql.elements import quoted_name + +from libcodechecker import host_check +from libcodechecker import pgpass +from libcodechecker import util +from libcodechecker.logger import LoggerFactory + +LOG = LoggerFactory.get_new_logger('DATABASE HANDLER') + + +class SQLServer(object): + """ + Abstract base class for database server handling. An SQLServer instance is + responsible for the connection management towards the database. + + SQLServer implementations are created via SQLServer.from_cmdline_args(). + + How to add a new database server implementation: + 1, Derive from SQLServer and implement the abstract methods + 2, Add/modify some command line options in CodeChecker.py + 3, Modify SQLServer.from_cmdline_args() in order to create an + instance of the new server type if needed + """ + + __metaclass__ = ABCMeta + + def __init__(self, model_meta, migration_root): + """ + Sets self.migration_root. migration_root should be the path to the + alembic migration scripts. + + Also sets the created class' model identifier to the given meta dict. + """ + + self.__model_meta = model_meta + self.migration_root = migration_root + + def _create_or_update_schema(self, use_migration=True): + """ + Creates or updates the database schema. The database server has to be + started before this method is called. + + If use_migration is True, this method runs an alembic upgrade to HEAD. + + In the False case, there is no migration support and only SQLAlchemy + meta data is used for schema creation. + + On error sys.exit(1) is called. + """ + + try: + engine = self.create_engine() + + LOG.debug("Update/create database schema for {0}" + .format(self.__model_meta['identifier'])) + if use_migration: + LOG.debug("Creating new database session") + session = sessionmaker(bind=engine)() + connection = session.connection() + + cfg = config.Config() + cfg.set_main_option("script_location", self.migration_root) + cfg.attributes["connection"] = connection + command.upgrade(cfg, "head") + + session.commit() + session.close() + else: + LOG.debug("Creating full schema.") + self.__model_meta['orm_meta'].create_all(engine) + + engine.dispose() + LOG.debug("Update/create database schema: Done") + return True + + except sqlalchemy.exc.SQLAlchemyError as alch_err: + LOG.error(str(alch_err)) + raise + except CommandError as cerr: + LOG.error("Database schema and CodeChecker is incompatible." + "Please update CodeChecker.") + LOG.debug(str(cerr)) + raise + + @abstractmethod + def connect(self, db_version_info, init=False): + """ + Starts the database server and initializes the database server. + + On init == True, this it also initializes the database data and schema + if needed. + + On error sys.exit(1) should be called. + """ + pass + + @abstractmethod + def get_connection_string(self): + """ + Returns the connection string for SQLAlchemy. + + DO NOT LOG THE CONNECTION STRING BECAUSE IT MAY CONTAIN THE PASSWORD + FOR THE DATABASE! + """ + pass + + def get_model_identifier(self): + return self.__model_meta['identifier'] + + def _register_engine_hooks(self, engine): + """ + This method registers hooks, if needed, related to the engine created + by create_engine. + """ + pass + + def create_engine(self): + """ + Creates a new SQLAlchemy engine. + """ + + if make_url(self.get_connection_string()).drivername == \ + 'sqlite+pysqlite': + # FIXME: workaround for locking errors + engine = sqlalchemy.create_engine(self.get_connection_string(), + encoding='utf8', + connect_args={'timeout': 600}) + else: + engine = sqlalchemy.create_engine(self.get_connection_string(), + encoding='utf8') + + self._register_engine_hooks(engine) + return engine + + @staticmethod + def from_connection_string(connection_string, model_meta, migration_root, + interactive=False, env=None): + """ + Normally only this method is called form outside of this module in + order to instance the proper server implementation. + + Parameters: + args: the command line arguments from CodeChecker.py + model_meta: the meta identifier of the database model to use + migration_root: path to the database migration scripts + env: a run environment dictionary. + """ + + url = make_url(connection_string) + + # Create an args for from_cmdline_args. + args = {} + if 'postgresql' in url.drivername: + args['postgresql'] = True + args['dbaddress'] = url.host + args['dbport'] = url.port + args['dbusername'] = url.username + args['dbpassword'] = url.password + args['dbname'] = url.database + elif 'sqlite' in url.drivername: + args['postgresql'] = False + args['sqlite'] = url.database + + return SQLServer.from_cmdline_args(args, model_meta, migration_root, + interactive, env) + + @staticmethod + def from_cmdline_args(args, model_meta, migration_root, + interactive=False, env=None): + """ + Normally only this method is called form outside of this module in + order to instance the proper server implementation. + + Parameters: + args: the command line arguments from CodeChecker.py, but as a + dictionary (if argparse.Namespace, use vars(args)). + model_meta: the meta identifier of the database model to use + migration_root: path to the database migration scripts + interactive: whether or not the database connection can be + interactive on the server's shell. + env: a run environment dictionary. + """ + + if not host_check.check_sql_driver(args['postgresql']): + LOG.error("The selected SQL driver is not available!") + raise IOError("The SQL driver to be used is not available!") + + if args['postgresql']: + LOG.debug("Using PostgreSQL:") + return PostgreSQLServer(model_meta, + migration_root, + args['dbaddress'], + args['dbport'], + args['dbusername'], + args['dbname'], + password=args['dbpassword'] + if 'dbpassword' in args else None, + interactive=interactive, + run_env=env) + else: + LOG.debug("Using SQLite:") + data_file = os.path.abspath(args['sqlite']) + LOG.debug("Database at " + data_file) + return SQLiteDatabase(data_file, model_meta, + migration_root, run_env=env) + + def check_db_version(self, db_version_info, session=None): + """ + Checks the database version and prints an error message on database + version mismatch. + + - On mismatching or on missing version a sys.exit(1) is called. + - On missing DBVersion table, it returns False + - On compatible DB version, it returns True + + Parameters: + db_version_info (db_version.DBVersionInfo): required database + version. + session: an open database session or None. If session is None, a + new session is created. + """ + + DBVersion = self.__model_meta['version_class'] + + try: + dispose_engine = False + if session is None: + engine = self.create_engine() + dispose_engine = True + session = sessionmaker(bind=engine)() + else: + engine = session.get_bind() + + if not engine.has_table(quoted_name(DBVersion.__tablename__, + True)): + LOG.debug("Missing DBVersion table!") + return False + + version = session.query(DBVersion).first() + if version is None: + # Version is not populated yet + raise ValueError("No version information found in " + "the database.") + elif not db_version_info.is_compatible(version.major, + version.minor): + LOG.error("Version mismatch. Expected database version: " + + str(db_version_info)) + version_from_db = 'v' + str(version.major) + '.' + str( + version.minor) + LOG.error("Version from the database is: " + version_from_db) + LOG.error("Please update your database.") + raise ValueError("Version mismatch in database!") + + LOG.debug("Database version is compatible.") + return True + finally: + session.commit() + session.close() + if dispose_engine: + engine.dispose() + + def _add_version(self, db_version_info, session=None): + """ + Fills the DBVersion table. + """ + + engine = None + if session is None: + engine = self.create_engine() + session = sessionmaker(bind=engine)() + + expected = db_version_info.get_expected_version() + LOG.debug("Adding DB version: " + str(expected)) + + DBVersion = self.__model_meta['version_class'] + session.add(DBVersion(expected[0], expected[1])) + session.commit() + session.close() + + if engine: + engine.dispose() + + LOG.debug("Adding DB version done!") + + +class PostgreSQLServer(SQLServer): + """ + Handler for PostgreSQL. + """ + + def __init__(self, model_meta, migration_root, host, port, user, database, + password=None, interactive=False, run_env=None): + super(PostgreSQLServer, self).__init__(model_meta, migration_root) + + self.host = host + self.port = port + self.user = user + self.database = database + self.password = password + self.interactive = interactive + self.run_env = run_env + + def _get_connection_string(self, database): + """ + Helper method for getting the connection string for the given database. + + database -- The user can force the database name in the returning + connection string. However the password, if any, provided e.g. in a + .pgpass file will be queried based on the database name which is given + as a command line argument, even if it has a default value. The reason + is that sometimes a connection with a common database name is needed, + (e.g. 'postgres'), which requires less user permission. + """ + + driver = host_check.get_postgresql_driver_name() + password = self.password + if driver == 'pg8000' and not password: + pfilepath = os.environ.get('PGPASSFILE') + if pfilepath: + password = pgpass.get_password_from_file(pfilepath, + self.host, + str(self.port), + self.database, + self.user) + + extra_args = {'client_encoding': 'utf8'} + return str(URL('postgresql+' + driver, + username=self.user, + password=password, + host=self.host, + port=str(self.port), + database=database, + query=extra_args)) + + def connect(self, db_version_info, init=False): + """ + Connect to a PostgreSQL instance with given path, host and port. + """ + + LOG.debug("Connecting to database...") + + LOG.debug("Checking if database is running at [{0}:{1}]" + .format(self.host, str(self.port))) + + check_db = ['psql', + '-h', self.host, + '-p', str(self.port), + '-U', self.user, + '-d', self.database, + '-c', 'SELECT version();'] + + if not self.interactive: + # Do not prompt for password in non-interactive mode. + check_db.append('--no-password') + + # If the user has a password pre-specified, use that for the + # 'psql' call! + env = self.run_env if self.run_env else os.environ() + env = env.copy() + if self.password: + env['PGPASSWORD'] = self.password + + err, code = util.call_command(check_db, env) + + if code: + LOG.error("Database is not running, or cannot be connected to.") + LOG.error(err) + raise IOError( + "Database is not running, or cannot be connected to.") + + add_version = False + if init: + add_version = not self.check_db_version(db_version_info) + self._create_or_update_schema(use_migration=False) + + if add_version: + self._add_version(db_version_info) + + LOG.debug("Done. Connected to database.") + + def get_connection_string(self): + return self._get_connection_string(self.database) + + +class SQLiteDatabase(SQLServer): + """ + Handler for SQLite. + """ + + def __init__(self, data_file, model_meta, migration_root, run_env=None): + super(SQLiteDatabase, self).__init__(model_meta, migration_root) + + self.dbpath = data_file + self.run_env = run_env + + def _register_engine_hooks(self, engine): + """ + SQLite databases need FOREIGN KEYs to be enabled, which is handled + through this connection hook. + """ + def _set_sqlite_pragma(dbapi_connection, connection_record): + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() + + event.listen(engine, 'connect', _set_sqlite_pragma) + + def connect(self, db_version_info, init=False): + if init: + add_version = not self.check_db_version(db_version_info) + self._create_or_update_schema(use_migration=False) + if add_version: + self._add_version(db_version_info) + + if not os.path.exists(self.dbpath): + LOG.error("Database file (%s) is missing!" % self.dbpath) + raise IOError("Database file (%s) is missing!" % self.dbpath) + + def get_connection_string(self): + return str(URL('sqlite+pysqlite', None, None, None, None, self.dbpath)) diff --git a/libcodechecker/server/instance_manager.py b/libcodechecker/server/instance_manager.py index ddf1cbbbe8..7678b3a819 100644 --- a/libcodechecker/server/instance_manager.py +++ b/libcodechecker/server/instance_manager.py @@ -17,12 +17,15 @@ import stat -def __getInstanceDescriptorPath(): - return os.path.join(os.path.expanduser("~"), ".codechecker.instances.json") +def __getInstanceDescriptorPath(folder=None): + if not folder: + folder = os.path.expanduser("~") + return os.path.join(folder, ".codechecker.instances.json") -def __makeInstanceDescriptorFile(): - descriptor = __getInstanceDescriptorPath() + +def __makeInstanceDescriptorFile(folder=None): + descriptor = __getInstanceDescriptorPath(folder) if not os.path.exists(descriptor): with open(descriptor, 'w') as f: json.dump([], f) @@ -49,12 +52,12 @@ def __checkInstance(hostname, pid): return False -def __rewriteInstanceFile(append, remove): +def __rewriteInstanceFile(append, remove, folder=None): """This helper method reads the user's instance descriptor and manages it eliminating dead records, appending new ones and reserialising the file.""" - __makeInstanceDescriptorFile() - with open(__getInstanceDescriptorPath(), 'r+') as f: + __makeInstanceDescriptorFile(folder) + with open(__getInstanceDescriptorPath(folder), 'r+') as f: portalocker.lock(f, portalocker.LOCK_EX) # After reading, check every instance if they are still valid and @@ -77,7 +80,7 @@ def __rewriteInstanceFile(append, remove): portalocker.unlock(f) -def register(pid, workspace, port): +def register(pid, workspace, port, folder=None): """ Adds the specified CodeChecker server instance to the user's instance descriptor. @@ -87,24 +90,25 @@ def register(pid, workspace, port): "hostname": socket.gethostname(), "workspace": workspace, "port": port}], - []) + [], + folder) -def unregister(pid): +def unregister(pid, folder=None): """ Removes the specified CodeChecker server instance from the user's instance descriptor. """ - __rewriteInstanceFile([], [socket.gethostname() + ":" + str(pid)]) + __rewriteInstanceFile([], [socket.gethostname() + ":" + str(pid)], folder) -def list(): +def list(folder=None): """Returns the list of running servers for the current user.""" # This method does NOT write the descriptor file. - descriptor = __getInstanceDescriptorPath() + descriptor = __getInstanceDescriptorPath(folder) instances = [] if os.path.exists(descriptor): with open(descriptor, 'r') as f: diff --git a/libcodechecker/server/product_db_access_handler.py b/libcodechecker/server/product_db_access_handler.py new file mode 100644 index 0000000000..bcd65839eb --- /dev/null +++ b/libcodechecker/server/product_db_access_handler.py @@ -0,0 +1,314 @@ +# ------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ------------------------------------------------------------------------- +""" +Handle Thrift requests for the product manager service. +""" + +import base64 +import os + +import sqlalchemy + +import shared +from ProductManagement import ttypes + +from libcodechecker.logger import LoggerFactory +from libcodechecker.profiler import timeit + +from config_db_model import * +from database_handler import SQLServer + +LOG = LoggerFactory.get_new_logger('PRODUCT HANDLER') + + +class ThriftProductHandler(object): + """ + Connect to database and handle thrift client requests. + """ + + def __init__(self, + server, + config_session, + routed_product, + package_version): + + self.__server = server + self.__package_version = package_version + self.__session = config_session + self.__product = routed_product + + @timeit + def getAPIVersion(self): + return shared.constants.API_VERSION + + @timeit + def getPackageVersion(self): + return self.__package_version + + @timeit + def getProducts(self, product_endpoint_filter, product_name_filter): + """ + Get the list of products configured on the server. + """ + + result = [] + + try: + session = self.__session() + prods = session.query(Product) + + if product_endpoint_filter: + prods = prods.filter(Product.endpoint.ilike( + '%{0}%'.format(product_endpoint_filter))) + + if product_name_filter: + prods = prods.filter(Product.display_name.ilike( + '%{0}%'.format(product_name_filter))) + + prods = prods.all() + + for prod in prods: + server_product = self.__server.get_product(prod.endpoint) + + if not server_product: + # TODO: Better support this, if the configuration database + # is mounted to multiple servers? + LOG.error("Product '{0}' was found in the configuration " + "database, but no database connection is " + "present. Was the configuration database " + "connected to multiple servers?" + .format(prod.endpoint)) + LOG.info("Please restart the server to make this " + "product available.") + continue + + # Clients are expected to use this method to query if + # the product exists and usable. Usability sometimes requires + # "updating" the "is connected" status of the database. + if not server_product.connected: + server_product.connect() + + name = base64.b64encode(prod.display_name.encode('utf-8')) + descr = base64.b64encode(prod.description.encode('utf-8')) \ + if prod.description else None + + result.append(ttypes.Product( + id=prod.id, + endpoint=prod.endpoint, + displayedName_b64=name, + description_b64=descr, + connected=server_product.connected, + accessible=True)) + + return result + + except sqlalchemy.exc.SQLAlchemyError as alchemy_ex: + msg = str(alchemy_ex) + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, + msg) + finally: + session.close() + + @timeit + def getCurrentProduct(self): + """ + Return information about the current product. + + The request MUST be routed as /product-name/ProductService! + """ + + if not self.__product: + msg = "Requested current product from ProductService but the " \ + "request came through the main endpoint." + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.IOERROR, + msg) + + try: + session = self.__session() + prod = session.query(Product).get(self.__product.id) + + server_product = self.__server.get_product(prod.endpoint) + if not server_product: + # TODO: Like above, better support this. + LOG.error("Product '{0}' was found in the configuration " + "database, but no database connection is " + "present. Was the configuration database " + "connected to multiple servers?" + .format(prod.endpoint)) + LOG.info("Please restart the server to make this " + "product available.") + raise shared.ttypes.RequestFailed( + shared.ttypes.ErrorCode.DATABASE, + "Product exists, but was not connected to this server.") + + name = base64.b64encode(prod.display_name.encode('utf-8')) + descr = base64.b64encode(prod.description.encode('utf-8')) \ + if prod.description else None + + return ttypes.Product( + id=prod.id, + endpoint=prod.endpoint, + displayedName_b64=name, + description_b64=descr, + connected=server_product.connected, + accessible=True) + + except sqlalchemy.exc.SQLAlchemyError as alchemy_ex: + msg = str(alchemy_ex) + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, + msg) + finally: + session.close() + + @timeit + def addProduct(self, product): + """ + Add the given product to the products configured by the server. + """ + + LOG.info("User requested add product '{0}'".format(product.endpoint)) + + dbc = product.connection + if not dbc: + msg = "Product cannot be added without a database configuration!" + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.GENERAL, + msg) + + if self.__server.get_product(product.endpoint): + msg = "A product endpoint '/{0}' is already configured!" \ + .format(product.endpoint) + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.GENERAL, + msg) + + # Some values come encoded as Base64, decode these. + displayed_name = base64.b64decode(product.displayedName_b64)\ + .decode('utf-8') if product.displayedName_b64 else product.endpoint + description = base64.b64decode(product.description_b64) \ + .decode('utf-8') if product.description_b64 else None + + dbuser = "codechecker" + dbpass = "" + if dbc.username_b64 and dbc.username_b64 != '': + dbuser = base64.b64decode(dbc.username_b64) + if dbc.password_b64 and dbc.password_b64 != '': + dbpass = base64.b64decode(dbc.password_b64) + + if dbc.engine == 'sqlite' and not os.path.isabs(dbc.database): + # Transform the database relative path to be under the + # server's config directory. + dbc.database = os.path.join(self.__server.config_directory, + dbc.database) + + # Transform arguments into a database connection string. + if dbc.engine == 'postgresql': + conn_str_args = {'postgresql': True, + 'sqlite': False, + 'dbaddress': dbc.host, + 'dbport': dbc.port, + 'dbusername': dbuser, + 'dbpassword': dbpass, + 'dbname': dbc.database} + elif dbc.engine == 'sqlite': + conn_str_args = {'postgresql': False, + 'sqlite': dbc.database} + else: + msg = "Database engine '{0}' unknown!".format(dbc.engine) + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.GENERAL, + msg) + + conn_str = SQLServer \ + .from_cmdline_args(conn_str_args, IDENTIFIER, None, False, None) \ + .get_connection_string() + + # Create the product's entity in the database. + try: + orm_prod = Product( + endpoint=product.endpoint, + conn_str=conn_str, + name=displayed_name, + description=description) + + LOG.debug("Attempting database connection to new product...") + + # Connect and create the database schema. + self.__server.add_product(orm_prod) + LOG.debug("Product database successfully connected to.") + + connection_wrapper = self.__server.get_product(product.endpoint) + if connection_wrapper.last_connection_failure: + msg = "The configured connection for '/{0}' failed: {1}" \ + .format(product.endpoint, + connection_wrapper.last_connection_failure) + LOG.error(msg) + + self.__server.remove_product(product.endpoint) + + raise shared.ttypes.RequestFailed( + shared.ttypes.ErrorCode.IOERROR, msg) + + session = self.__session() + session.add(orm_prod) + session.commit() + LOG.debug("Product configuration added to database successfully.") + + # The orm_prod object above is not bound to the database as it + # was just created. We use the actual database-backed configuration + # entry to handle connections, so a "reconnect" is issued here. + self.__server.remove_product(product.endpoint) + + orm_prod = session.query(Product) \ + .filter(Product.endpoint == product.endpoint).one() + self.__server.add_product(orm_prod) + + LOG.debug("Product database connected and ready to serve.") + return True + + except sqlalchemy.exc.SQLAlchemyError as alchemy_ex: + msg = str(alchemy_ex) + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, + msg) + finally: + if session: + session.close() + + @timeit + def removeProduct(self, product_id): + """ + Disconnect the product specified by the ID from the server. + """ + + try: + session = self.__session() + product = session.query(Product).get(product_id) + if product is None: + msg = "Product with ID {0} does not exist!".format(product_id) + LOG.error(msg) + raise shared.ttypes.RequestFailed( + shared.ttypes.ErrorCode.DATABASE, + msg) + + LOG.info("User requested to remove product '{0}'" + .format(product.endpoint)) + self.__server.remove_product(product.endpoint) + + session.delete(product) + session.commit() + return True + + except sqlalchemy.exc.SQLAlchemyError as alchemy_ex: + msg = str(alchemy_ex) + LOG.error(msg) + raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, + msg) + finally: + session.close() diff --git a/libcodechecker/orm_model.py b/libcodechecker/server/run_db_model.py similarity index 95% rename from libcodechecker/orm_model.py rename to libcodechecker/server/run_db_model.py index 98c3db0506..2c89b010fd 100644 --- a/libcodechecker/orm_model.py +++ b/libcodechecker/server/run_db_model.py @@ -4,8 +4,9 @@ # License. See LICENSE.TXT for details. # ------------------------------------------------------------------------- """ -ORM model. +SQLAlchemy ORM model for the analysis run storage database. """ + from __future__ import print_function from __future__ import unicode_literals @@ -268,15 +269,8 @@ class ReviewStatus(Base): date = Column(DateTime, nullable=False) -def CreateSchema(engine): - """ Creates the schema if it does not exists. - Do not check version or do migration yet. """ - Base.metadata.create_all(engine) - - -def CreateSession(engine): - """ Creates a scoped session factory that can act like a session. - The factory uses a thread_local registry, so every thread have - its own session. """ - SessionFactory = scoped_session(sessionmaker(bind=engine)) - return SessionFactory +IDENTIFIER = { + 'identifier': "RunDatabase", + 'orm_meta': CC_META, + 'version_class': DBVersion +} diff --git a/libcodechecker/util.py b/libcodechecker/util.py index 986b8eaee3..7a4bd3c1a7 100644 --- a/libcodechecker/util.py +++ b/libcodechecker/util.py @@ -235,3 +235,55 @@ def get_default_workspace(): """ workspace = os.path.join(os.path.expanduser("~"), '.codechecker') return workspace + + +def split_product_url(url): + """ + Sets up a Thrift CodeCheckerService client for the given product URL. + """ + LOG.debug("Parsing product url '{0}'".format(url)) + if url.startswith("http"): + url = url.replace("http://", "").replace("https://", "") + + if url.endswith("/"): + url = url.rstrip("/") + + if url.startswith("/"): + url = url.lstrip("/") + + # A valid product_url looks like this: 'localhost:8001/Product'. + host, port, product_name = "localhost", 8001, "Default" + try: + parts = url.split("/") + + if len(parts) == 1: + # If only one word is given in the URL, consider it as product + # name, but then it cannot begin with a number. + product_name = parts[0] + if product_name[0].isdigit(): + raise ValueError("Product name was given in URL, but it " + "cannot begin with a number!") + elif len(parts) == 2: + # URL is at least something/product-name. + product_name = parts[1] + + # Something is either a hostname, or a host:port. + server_addr = parts[0].split(":") + if len(server_addr) == 2: + host, port = server_addr[0], int(server_addr[1]) + elif len(server_addr) == 1: + # We consider "localhost/product" as "localhost:8001/product". + host = server_addr[0] + else: + raise ValueError("The server's address is not in a valid " + "'host:port' format!") + else: + raise ValueError("Product URL can not contain extra '/' chars.") + except: + LOG.error("The specified product URL is invalid.") + raise + + LOG.debug("Result: On server '{0}:{1}', product '{2}'" + .format(host, port, product_name)) + + return host, port, product_name diff --git a/product_db_migrate/README b/product_db_migrate/README new file mode 100644 index 0000000000..98e4f9c44e --- /dev/null +++ b/product_db_migrate/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/product_db_migrate/env.py b/product_db_migrate/env.py new file mode 100644 index 0000000000..a3327d5608 --- /dev/null +++ b/product_db_migrate/env.py @@ -0,0 +1,75 @@ +from __future__ import with_statement +from alembic import context +from sqlalchemy import engine_from_config, pool + +# This is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Add your model's MetaData object here +# for 'autogenerate' support. +try: + from libcodechecker.server.config_db_model import Base +except ImportError: + # Assume we are in the source directory + import sys + import os + + sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + ".."))) + from libcodechecker.server.config_db_model import Base + +target_metadata = Base.metadata + + +# Other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + """ + connectable = config.attributes.get('connection', None) + if connectable is None: + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/product_db_migrate/script.py.mako b/product_db_migrate/script.py.mako new file mode 100644 index 0000000000..43c09401bc --- /dev/null +++ b/product_db_migrate/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/scripts/build_package.py b/scripts/build_package.py index 82eb758716..1c6f4b38b4 100755 --- a/scripts/build_package.py +++ b/scripts/build_package.py @@ -725,10 +725,16 @@ def build_package(repository_root, build_package_config, env=None): style.write(css.read() + "\n") # CodeChecker db migrate. - LOG.debug('Copy codechecker database migration') + LOG.debug('Copy codechecker config database migration') + source = os.path.join(repository_root, 'product_db_migrate') + target = os.path.join(package_root, + package_layout['config_db_migrate']) + copy_tree(source, target) + + LOG.debug('Copy codechecker run database migration') source = os.path.join(repository_root, 'db_migrate') target = os.path.join(package_root, - package_layout['codechecker_db_migrate']) + package_layout['run_db_migrate']) copy_tree(source, target) # License. diff --git a/tests/Makefile b/tests/Makefile index fb56583b42..cde77d7b73 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -25,13 +25,29 @@ pep8: pep8 bin libcodechecker scripts tests UNIT_TEST_CMD = nosetests $(NOSECFG) tests/unit + +CODECHECKER_CMD = $(BUILD_DIR)/CodeChecker/bin/CodeChecker +SHUTDOWN_SERVER_CMD = echo "Shutting down server..."; \ + ${CODECHECKER_CMD} server -l; \ + ${CODECHECKER_CMD} server --config-directory $(BUILD_DIR)/workspace \ + --port `cat "$(BUILD_DIR)/workspace/serverport"` --stop; \ + rm -f "$(BUILD_DIR)/workspace/serverport"; \ + ${CODECHECKER_CMD} server -l + +# Preserve the error or no error status of the previous command but always +# be able to shut down servers. +EXIT_HAPPY = { ${SHUTDOWN_SERVER_CMD}; exit 0; } +EXIT_ERROR = { ${SHUTDOWN_SERVER_CMD}; exit 1; } + FUNCTIONAL_TEST_CMD = $(REPO_ROOT) $(CLANG_VERSION) $(TEST_PROJECT) \ - nosetests $(NOSECFG) tests/functional + nosetests $(NOSECFG) tests/functional \ + && ${EXIT_HAPPY} || ${EXIT_ERROR} run_test: package venv_dev $(ACTIVATE_DEV_VENV) && \ $(REPO_ROOT) $(CLANG_VERSION) $(TEST_PROJECT) \ - nosetests $(NOSECFG) ${TEST} + nosetests $(NOSECFG) ${TEST} \ + && ${EXIT_HAPPY} || ${EXIT_ERROR} test_unit: venv_dev $(ACTIVATE_DEV_VENV) && $(UNIT_TEST_CMD) diff --git a/tests/functional/analyze/__init__.py b/tests/functional/analyze/__init__.py index 0cc2315229..41128d849f 100644 --- a/tests/functional/analyze/__init__.py +++ b/tests/functional/analyze/__init__.py @@ -11,10 +11,7 @@ import os import shutil -from libtest import codechecker from libtest import env -from libtest import get_free_port -from libtest import project # Test workspace should be initialized in this module. diff --git a/tests/functional/authentication/__init__.py b/tests/functional/authentication/__init__.py index ca75014f17..fe00aecf3b 100644 --- a/tests/functional/authentication/__init__.py +++ b/tests/functional/authentication/__init__.py @@ -7,12 +7,10 @@ """Setup for the package tests.""" -import json import multiprocessing import os import shutil import subprocess -import sys import time from libtest import project @@ -36,8 +34,6 @@ def setup_package(): test_project = 'cpp' - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -48,17 +44,19 @@ def setup_package(): skip_list_file = None - # Setup environment varaibled for the test cases. - host_port_cfg = env.get_host_port_cfg() + # Setup environment variables for the test cases. + host_port_cfg = {'viewer_host': 'localhost', + 'viewer_port': env.get_free_port(), + 'viewer_product': 'authentication'} test_env = env.test_env() + test_env['HOME'] = TEST_WORKSPACE codechecker_cfg = { 'suppress_file': suppress_file, 'skip_list_file': skip_list_file, 'check_env': test_env, 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, 'checkers': [] } @@ -78,17 +76,31 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() - + """Stop the CodeChecker server and clean up after the tests.""" # TODO If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + # Removing the product through this server requires credentials. + codechecker_cfg = env.import_test_cfg(TEST_WORKSPACE)['codechecker_cfg'] + codechecker.login(codechecker_cfg, + TEST_WORKSPACE, "cc", "test") + codechecker.remove_test_package_product(TEST_WORKSPACE, + codechecker_cfg['check_env']) + + __STOP_SERVER.set() + + # The custom server stated in a separate home needs to be waited, so it + # can properly execute its finalizers. + time.sleep(5) + print("Removing: " + TEST_WORKSPACE) - shutil.rmtree(TEST_WORKSPACE) + shutil.rmtree(TEST_WORKSPACE, ignore_errors=True) +# This server uses custom server configuration, which is brought up here +# and torn down by the package itself --- it does not connect to the +# test run's "master" server. def _start_server(codechecker_cfg, test_config, auth=False): """Start the CodeChecker server.""" def start_server_proc(event, server_cmd, checking_env): @@ -102,7 +114,9 @@ def start_server_proc(event, server_cmd, checking_env): if proc.poll() is None: proc.terminate() - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) + server_cmd = codechecker.serv_cmd(codechecker_cfg['workspace'], + str(codechecker_cfg['viewer_port']), + env.get_postgresql_cfg()) server_proc = multiprocessing.Process( name='server', @@ -110,5 +124,6 @@ def start_server_proc(event, server_cmd, checking_env): args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) server_proc.start() + # Wait for server to start and connect to database. time.sleep(20) diff --git a/tests/functional/authentication/test_authentication.py b/tests/functional/authentication/test_authentication.py index 566916f7dd..317f1c4e40 100644 --- a/tests/functional/authentication/test_authentication.py +++ b/tests/functional/authentication/test_authentication.py @@ -14,6 +14,7 @@ from thrift.protocol.TProtocol import TProtocolException +from libtest import codechecker from libtest import env @@ -30,11 +31,7 @@ def setUp(self): test_class = self.__class__.__name__ print('Running ' + test_class + ' tests in ' + self._test_workspace) - tcfg = os.path.join(self._test_workspace, 'test_config.json') - with open(tcfg, 'r') as cfg: - t = json.load(cfg) - self._host = t['codechecker_cfg']['viewer_host'] - self._port = t['codechecker_cfg']['viewer_port'] + self._test_cfg = env.import_test_cfg(self._test_workspace) def test_privileged_access(self): """ @@ -63,6 +60,19 @@ def test_privileged_access(self): self.assertIsNotNone(self.sessionToken, "Valid credentials didn't give us a token!") + # We need to run an authentication on the command-line, so that the + # product-adding feature is accessible by us. + codechecker.login(self._test_cfg['codechecker_cfg'], + self._test_workspace, "cc", "test") + # We still need to create a product on the new server, because + # in PostgreSQL mode, the same database is used for configuration + # by the newly started instance of this test suite too. + codechecker.add_test_package_product( + self._test_cfg['codechecker_cfg'], + self._test_workspace, + # Use the test's home directory to find the session token file. + self._test_cfg['codechecker_cfg']['check_env']) + handshake = auth_client.getAuthParameters() self.assertTrue(handshake.requiresAuthentication, "Privileged server " + @@ -88,6 +98,10 @@ def test_privileged_access(self): self.assertTrue(result, "Server did not allow us to destroy session.") + # Kill the session token that was created by login() too. + codechecker.logout(self._test_cfg['codechecker_cfg'], + self._test_workspace) + try: client.getAPIVersion() success = False @@ -114,8 +128,11 @@ def test_nonauth_storage(self): test_dir = os.path.dirname(os.path.realpath(__file__)) report_file = os.path.join(test_dir, 'clang-5.0-trunk.plist') + codechecker_cfg = self._test_cfg['codechecker_cfg'] + store_cmd = [env.codechecker_cmd(), 'store', '--name', 'auth', - '--host', str(self._host), '--port', str(self._port), + # Use the 'Default' product. + '--url', env.parts_to_url(codechecker_cfg), report_file] with self.assertRaises(subprocess.CalledProcessError): diff --git a/tests/functional/comment/__init__.py b/tests/functional/comment/__init__.py index 362c14734b..d9ae32e8ac 100644 --- a/tests/functional/comment/__init__.py +++ b/tests/functional/comment/__init__.py @@ -4,13 +4,8 @@ # This file is distributed under the University of Illinois Open Source # License. See LICENSE.TXT for details. # ----------------------------------------------------------------------------- -from time import sleep - """Setup for the test package comment.""" -from subprocess import CalledProcessError - -import json import multiprocessing import os import shutil @@ -42,8 +37,6 @@ def setup_package(): test_project_path = project.path(test_project) - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -54,11 +47,13 @@ def setup_package(): skip_list_file = None - # Setup environment variabled for test cases. - host_port_cfg = env.get_host_port_cfg() + # Setup environment variables for the test cases. + host_port_cfg = {'viewer_host': 'localhost', + 'viewer_port': env.get_free_port(), + 'viewer_product': 'comment'} test_env = env.test_env() - test_env["HOME"] = TEST_WORKSPACE + test_env['HOME'] = TEST_WORKSPACE codechecker_cfg = { 'suppress_file': suppress_file, @@ -66,14 +61,12 @@ def setup_package(): 'check_env': test_env, 'workspace': TEST_WORKSPACE, 'reportdir': os.path.join(TEST_WORKSPACE, 'reports'), - 'pg_db_config': pg_db_config, 'checkers': ['-d', 'core.CallAndMessage', '-e', 'core.StackAddressEscape'] } codechecker_cfg.update(host_port_cfg) - test_config['codechecker_cfg'] = codechecker_cfg # Start the CodeChecker server. print("Starting server to get results") env.enable_auth(TEST_WORKSPACE) @@ -83,6 +76,12 @@ def setup_package(): "cc", "test") + # We still need to create a product on the new server, because + # in PostgreSQL mode, the same database is used for configuration + # by the newly started instances. + codechecker.add_test_package_product(host_port_cfg, TEST_WORKSPACE, + test_env) + # Check the test project for the first time. test_project_name = project_info['name'] + '_' + uuid.uuid4().hex @@ -108,17 +107,28 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() - + """Stop the CodeChecker server and clean up after the tests.""" # TODO: If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + + __STOP_SERVER.set() + + # The custom server stated in a separate home needs to be waited, so it + # can properly execute its finalizers. + time.sleep(5) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE, ignore_errors=True) +# This server uses custom server configuration, which is brought up here +# and torn down by the package itself --- it does not connect to the +# test run's "master" server. def _start_server(codechecker_cfg, test_config, auth=False): """Start the CodeChecker server.""" @@ -134,7 +144,9 @@ def start_server_proc(event, server_cmd, checking_env): if proc.poll() is None: proc.terminate() - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) + server_cmd = codechecker.serv_cmd(codechecker_cfg['workspace'], + str(codechecker_cfg['viewer_port']), + env.get_postgresql_cfg()) server_proc = multiprocessing.Process( name='server', diff --git a/tests/functional/comment_unauth/__init__.py b/tests/functional/comment_unauth/__init__.py index 1a5ac4762c..f0eb105728 100644 --- a/tests/functional/comment_unauth/__init__.py +++ b/tests/functional/comment_unauth/__init__.py @@ -4,28 +4,17 @@ # This file is distributed under the University of Illinois Open Source # License. See LICENSE.TXT for details. # ----------------------------------------------------------------------------- -from time import sleep - """Setup for the test package comment.""" -from subprocess import CalledProcessError - -import json -import multiprocessing import os import shutil -import subprocess import sys -import time import uuid from libtest import codechecker from libtest import env from libtest import project -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() - # Test workspace should be initialized in this module. TEST_WORKSPACE = None @@ -34,7 +23,7 @@ def setup_package(): """Setup the environment for the tests. """ global TEST_WORKSPACE - TEST_WORKSPACE = env.get_workspace('comment') + TEST_WORKSPACE = env.get_workspace('comment_unauth') os.environ['TEST_WORKSPACE'] = TEST_WORKSPACE @@ -42,8 +31,6 @@ def setup_package(): test_project_path = project.path(test_project) - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -54,9 +41,6 @@ def setup_package(): skip_list_file = None - # Setup environment variabled for test cases. - host_port_cfg = env.get_host_port_cfg() - test_env = env.test_env() test_env["HOME"] = TEST_WORKSPACE @@ -66,18 +50,19 @@ def setup_package(): 'check_env': test_env, 'workspace': TEST_WORKSPACE, 'reportdir': os.path.join(TEST_WORKSPACE, 'reports'), - 'pg_db_config': pg_db_config, 'checkers': ['-d', 'core.CallAndMessage', '-e', 'core.StackAddressEscape'] } - codechecker_cfg.update(host_port_cfg) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'comment_unauth' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) - test_config['codechecker_cfg'] = codechecker_cfg - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, test_config, False) - print("server started") + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) # Check the test project for the first time. test_project_name = project_info['name'] + '_' + uuid.uuid4().hex @@ -104,40 +89,15 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE, ignore_errors=True) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/functional/delete_runs/__init__.py b/tests/functional/delete_runs/__init__.py index b07b0c8ca9..e7e63ef0e8 100644 --- a/tests/functional/delete_runs/__init__.py +++ b/tests/functional/delete_runs/__init__.py @@ -7,23 +7,15 @@ """Setup for the test package delete_runs.""" -from subprocess import CalledProcessError - -import multiprocessing import os import shutil -import subprocess import sys import time from libtest import codechecker from libtest import env -from libtest import get_free_port from libtest import project -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() - # Test workspace should be initialized in this module. TEST_WORKSPACE = None @@ -37,10 +29,6 @@ def setup_package(): # Set the TEST_WORKSPACE used by the tests. os.environ['TEST_WORKSPACE'] = TEST_WORKSPACE - # PostgreSQL configuration might be empty if tests are run - # with SQLite. - pg_db_config = env.get_postgresql_cfg() - test_config = {} test_project = 'simple' @@ -76,19 +64,18 @@ def setup_package(): 'skip_list_file': skip_list_file, 'check_env': test_env, 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, 'checkers': [] } - # Get new unique port numbers for this test run. - host_port_cfg = env.get_host_port_cfg() - - # Extend the checker configuration with the port numbers. - codechecker_cfg.update(host_port_cfg) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'delete_runs' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, test_config, False) + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) for i in range(0, 5): # Clean the test project, if needed by the tests. @@ -122,38 +109,15 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/functional/delete_runs/test_delete_runs.py b/tests/functional/delete_runs/test_delete_runs.py index b2aa5ebd6d..e830f114b1 100644 --- a/tests/functional/delete_runs/test_delete_runs.py +++ b/tests/functional/delete_runs/test_delete_runs.py @@ -34,8 +34,9 @@ def setUp(self): test_class = self.__class__.__name__ print('Running ' + test_class + ' tests in ' + test_workspace) - self._cc_port = env.import_test_cfg(test_workspace)[ - 'codechecker_cfg']['viewer_port'] + codechecker_cfg = env.import_test_cfg(test_workspace)[ + 'codechecker_cfg'] + self.server_url = env.parts_to_url(codechecker_cfg) # Get the test project configuration from the prepared test workspace. self._testproject_data = env.setup_test_proj_cfg(test_workspace) @@ -79,7 +80,7 @@ def none_exists(runs): del_cmd = [self._codechecker_cmd, 'cmd', 'del', '--all-after-run', run2_name, - '-p', str(self._cc_port)] + '--url', self.server_url] run_cmd(del_cmd) self.assertTrue(all_exists( @@ -104,7 +105,7 @@ def none_exists(runs): del_cmd = [self._codechecker_cmd, 'cmd', 'del', '--all-before-time', date_run2, - '-p', str(self._cc_port)] + '--url', self.server_url] run_cmd(del_cmd) self.assertTrue(all_exists( @@ -117,7 +118,7 @@ def none_exists(runs): del_cmd = [self._codechecker_cmd, 'cmd', 'del', '--name', run2_name, - '-p', str(self._cc_port)] + '--url', self.server_url] run_cmd(del_cmd) self.assertTrue(none_exists( diff --git a/tests/functional/detection_status/__init__.py b/tests/functional/detection_status/__init__.py index 8b6bcbc626..648875e71f 100644 --- a/tests/functional/detection_status/__init__.py +++ b/tests/functional/detection_status/__init__.py @@ -7,18 +7,13 @@ """Setup for the test package detection_status.""" -import multiprocessing import os import shutil -import subprocess import time from libtest import codechecker from libtest import env -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() - # Test workspace should be initialized in this module. TEST_WORKSPACE = None @@ -38,58 +33,34 @@ def setup_package(): 'skip_list_file': None, 'check_env': env.test_env(), 'workspace': TEST_WORKSPACE, - 'pg_db_config': env.get_postgresql_cfg(), 'checkers': [], 'test_project': 'hello' } - # Get new unique port numbers for this test run. - host_port_cfg = env.get_host_port_cfg() - - # Extend the checker configuration with the port numbers. - codechecker_cfg.update(host_port_cfg) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'detection_status' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, {}, False) + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) # Export the test configuration to the workspace. env.export_test_cfg(TEST_WORKSPACE, {'codechecker_cfg': codechecker_cfg}) def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/functional/diff/__init__.py b/tests/functional/diff/__init__.py index 6360a4c142..1b6f42d47c 100644 --- a/tests/functional/diff/__init__.py +++ b/tests/functional/diff/__init__.py @@ -7,26 +7,16 @@ """Setup for the package tests.""" -import json -import multiprocessing import os -import shlex import shutil -import subprocess import sys -import time import uuid -from subprocess import CalledProcessError -from libtest import get_free_port -from libtest import project from libtest import codechecker from libtest import env +from libtest import project -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() - # Test workspace used to diff tests. TEST_WORKSPACE = None @@ -34,7 +24,7 @@ def setup_package(): """ Setup the environment for the tests. - Check the test project twice, then start the server. + Check the test project twice. """ global TEST_WORKSPACE @@ -46,8 +36,6 @@ def setup_package(): test_project_path = project.path(test_project) - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -58,9 +46,6 @@ def setup_package(): skip_list_file = None - # Setup environment variabled for test cases. - host_port_cfg = env.get_host_port_cfg() - test_env = env.test_env() codechecker_cfg = { @@ -70,22 +55,25 @@ def setup_package(): 'force': True, 'workspace': TEST_WORKSPACE, 'reportdir': os.path.join(TEST_WORKSPACE, 'reports'), - 'pg_db_config': pg_db_config, 'checkers': ['-d', 'core.CallAndMessage', '-e', 'core.StackAddressEscape'] } - codechecker_cfg.update(host_port_cfg) - test_config['codechecker_cfg'] = codechecker_cfg ret = project.clean(test_project, test_env) if ret: sys.exit(ret) - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, test_config, False) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'diff' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) + + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) test_project_name_base = project_info['name'] + '_' + uuid.uuid4().hex @@ -123,10 +111,6 @@ def setup_package(): sys.exit(1) print("CodeChecker analyze of test project was successful.") - if pg_db_config: - print("Waiting for PotgreSQL to stop.") - codechecker.wait_for_postgres_shutdown(TEST_WORKSPACE) - # Order of the test run names matter at comparison! codechecker_cfg['run_names'] = [test_project_name_base, test_project_name_new] @@ -138,40 +122,15 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/functional/diff/test_diff.py b/tests/functional/diff/test_diff.py index 1fa7ce4bfa..9b677531af 100644 --- a/tests/functional/diff/test_diff.py +++ b/tests/functional/diff/test_diff.py @@ -7,21 +7,21 @@ """ Diff feature tests. Comparing results from two runs. """ -import os -import unittest -import logging +import logging +import os import re -import shared import subprocess +import unittest +import shared from codeCheckerDBAccess.ttypes import DiffType from codeCheckerDBAccess.ttypes import CompareData from codeCheckerDBAccess.ttypes import ReportFilter_v2 -from libtest.thrift_client_to_db import get_all_run_results_v2 -from libtest.debug_printer import print_run_results from libtest import env +from libtest.debug_printer import print_run_results +from libtest.thrift_client_to_db import get_all_run_results_v2 def get_severity_level(name): @@ -72,6 +72,8 @@ def setUp(self): self._test_config = env.import_test_cfg(test_workspace) self._run_names = env.get_run_names(test_workspace) + self._url = env.parts_to_url(self._test_config['codechecker_cfg']) + def test_get_diff_res_count_new(self): """ Count the new results with no filter. @@ -297,7 +299,7 @@ def test_get_diff_checker_counts_core_unresolved(self): diff_res = self._cc_client.getCheckerCounts([base_run_id], report_filter, cmp_data) - # Unesolved core checkers. + # Unresolved core checkers. test_res = {'core.NullDereference': 4, 'core.DivideZero': 3} self.assertDictEqual(diff_res, test_res) @@ -480,12 +482,9 @@ def test_local_compare_res_count_new(self): Count the new results with no filter in local compare mode. """ base_run_name = self._run_names[0] - vh = self._test_config['codechecker_cfg']['viewer_host'] - vp = self._test_config['codechecker_cfg']['viewer_port'] diff_cmd = [self._codechecker_cmd, "cmd", "diff", "--new", - "--host", vh, - "--port", str(vp), + "--url", self._url, "-b", base_run_name, "-n", self._report_dir ] @@ -505,12 +504,9 @@ def test_local_compare_res_count_resovled(self): Count the resolved results with no filter in local compare mode. """ base_run_name = self._run_names[0] - vh = self._test_config['codechecker_cfg']['viewer_host'] - vp = self._test_config['codechecker_cfg']['viewer_port'] diff_cmd = [self._codechecker_cmd, "cmd", "diff", "--resolved", - "--host", vh, - "--port", str(vp), + "--url", self._url, "-b", base_run_name, "-n", self._report_dir ] @@ -530,12 +526,9 @@ def test_local_compare_res_count_unresovled(self): Count the unresolved results with no filter in local compare mode. """ base_run_name = self._run_names[0] - vh = self._test_config['codechecker_cfg']['viewer_host'] - vp = self._test_config['codechecker_cfg']['viewer_port'] diff_cmd = [self._codechecker_cmd, "cmd", "diff", "--unresolved", - "--host", vh, - "--port", str(vp), + "--url", self._url, "-b", base_run_name, "-n", self._report_dir ] diff --git a/tests/functional/diff/test_regression_diff.py b/tests/functional/diff/test_regression_diff.py index 0acea763c4..fe8b9bb5c0 100644 --- a/tests/functional/diff/test_regression_diff.py +++ b/tests/functional/diff/test_regression_diff.py @@ -7,20 +7,20 @@ """ Diff feature tests. Comparing results from two runs. """ -import os -import unittest -import logging +import logging +import os import re import shared import subprocess +import unittest from codeCheckerDBAccess.ttypes import DiffType from codeCheckerDBAccess.ttypes import ReportFilter -from libtest.thrift_client_to_db import get_all_run_results -from libtest.debug_printer import print_run_results from libtest import env +from libtest.debug_printer import print_run_results +from libtest.thrift_client_to_db import get_all_run_results def get_severity_level(name): @@ -71,6 +71,8 @@ def setUp(self): self._test_config = env.import_test_cfg(test_workspace) self._run_names = env.get_run_names(test_workspace) + self._url = env.parts_to_url(self._test_config['codechecker_cfg']) + def test_get_diff_res_count_new(self): """ Count the new results with no filter. @@ -244,12 +246,9 @@ def test_local_compare_res_count_new(self): Count the new results with no filter in local compare mode. """ base_run_name = self._run_names[0] - vh = self._test_config['codechecker_cfg']['viewer_host'] - vp = self._test_config['codechecker_cfg']['viewer_port'] diff_cmd = [self._codechecker_cmd, "cmd", "diff", "--new", - "--host", vh, - "--port", str(vp), + "--url", self._url, "-b", base_run_name, "-n", self._report_dir ] @@ -269,12 +268,9 @@ def test_local_compare_res_count_resovled(self): Count the resolved results with no filter in local compare mode. """ base_run_name = self._run_names[0] - vh = self._test_config['codechecker_cfg']['viewer_host'] - vp = self._test_config['codechecker_cfg']['viewer_port'] diff_cmd = [self._codechecker_cmd, "cmd", "diff", "--resolved", - "--host", vh, - "--port", str(vp), + "--url", self._url, "-b", base_run_name, "-n", self._report_dir ] @@ -294,12 +290,9 @@ def test_local_compare_res_count_unresovled(self): Count the unresolved results with no filter in local compare mode. """ base_run_name = self._run_names[0] - vh = self._test_config['codechecker_cfg']['viewer_host'] - vp = self._test_config['codechecker_cfg']['viewer_port'] diff_cmd = [self._codechecker_cmd, "cmd", "diff", "--unresolved", - "--host", vh, - "--port", str(vp), + "--url", self._url, "-b", base_run_name, "-n", self._report_dir ] diff --git a/tests/functional/func_template/template__init__.py b/tests/functional/func_template/template__init__.py index 4250b359a9..ef4c59d0d1 100644 --- a/tests/functional/func_template/template__init__.py +++ b/tests/functional/func_template/template__init__.py @@ -13,23 +13,15 @@ """ -from subprocess import CalledProcessError - -import multiprocessing import os import shutil -import subprocess import sys -import time import uuid from libtest import codechecker from libtest import env -from libtest import get_free_port from libtest import project -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() # Test workspace should be initialized in this module. TEST_WORKSPACE = None @@ -49,10 +41,6 @@ def setup_package(): # find different errors. clang_version = env.clang_to_test() - # PostgreSQL configuration might be empty if tests are run - # with SQLite. - pg_db_config = env.get_postgresql_cfg() - test_config = {} test_project = 'cpp' @@ -88,19 +76,18 @@ def setup_package(): 'skip_list_file': skip_list_file, 'check_env': test_env, 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, 'checkers': [] } - # Get new unique port numbers for this test run. - host_port_cfg = env.get_host_port_cfg() - - # Extend the checker configuration with the port numbers. - codechecker_cfg.update(host_port_cfg) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = '$TEST_NAME$' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, test_config, False) + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) # Clean the test project, if needed by the tests. ret = project.clean(test_project) @@ -125,38 +112,15 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/functional/func_template/template_test.py b/tests/functional/func_template/template_test.py index 881141ebdd..188ecc1c45 100644 --- a/tests/functional/func_template/template_test.py +++ b/tests/functional/func_template/template_test.py @@ -46,9 +46,6 @@ def setUp(self): self._cc_client = env.setup_viewer_client(test_workspace) self.assertIsNotNone(self._cc_client) - # Setup a server client to test server API calls. - self._report = env.setup_server_client(test_workspace) - # Get the CodeChecker cmd if needed for the tests. self._codechecker_cmd = env.codechecker_cmd() diff --git a/tests/functional/instance_manager/__init__.py b/tests/functional/instance_manager/__init__.py index 340972bb9d..d3083aa9c6 100644 --- a/tests/functional/instance_manager/__init__.py +++ b/tests/functional/instance_manager/__init__.py @@ -7,20 +7,12 @@ """Setup for the package tests.""" -import copy -import json import multiprocessing import os -import shlex import shutil import subprocess -import sys import time -import uuid -from subprocess import CalledProcessError -from libtest import get_free_port -from libtest import project from libtest import codechecker from libtest import env @@ -42,29 +34,36 @@ def setup_package(): test_config = {} - # Setup environment varaibled for the test cases. - host_port_cfg = env.get_host_port_cfg() + test_env = env.test_env() + test_env['HOME'] = TEST_WORKSPACE + + # Setup environment variables for the test cases. + host_port_cfg = {'viewer_host': 'localhost', + 'viewer_port': env.get_free_port()} codechecker_cfg = { 'workspace': TEST_WORKSPACE, + 'check_env': test_env, 'run_names': [], 'checkers': [] } - codechecker_cfg.update(host_port_cfg) - test_config['codechecker_1'] = codechecker_cfg # We need a second server codechecker_cfg = { 'workspace': TEST_WORKSPACE, + 'check_env': test_env, 'run_names': [], 'checkers': [] } - host_port_cfg = env.get_host_port_cfg() + host_port_cfg = {'viewer_host': 'localhost', + 'viewer_port': env.get_free_port()} + if host_port_cfg['viewer_port'] == \ test_config['codechecker_1']['viewer_port']: host_port_cfg['viewer_port'] = int(host_port_cfg['viewer_port']) + 1 + codechecker_cfg.update(host_port_cfg) test_config['codechecker_2'] = codechecker_cfg @@ -91,7 +90,10 @@ def teardown_package(): shutil.rmtree(TEST_WORKSPACE) -def start_server(codechecker_cfg, test_config, event): +# This server uses multiple custom servers, which are brought up here +# and torn down by the package itself --- it does not connect to the +# test run's "master" server. +def start_server(codechecker_cfg, event): """Start the CodeChecker server.""" def start_server_proc(event, server_cmd, checking_env): """Target function for starting the CodeChecker server.""" @@ -104,13 +106,16 @@ def start_server_proc(event, server_cmd, checking_env): if proc.poll() is None: proc.terminate() - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) + server_cmd = codechecker.serv_cmd(codechecker_cfg['workspace'], + str(codechecker_cfg['viewer_port']), + None) server_proc = multiprocessing.Process( name='server', target=start_server_proc, - args=(event, server_cmd, env.test_env())) + args=(event, server_cmd, codechecker_cfg['check_env'])) server_proc.start() + # Wait for server to start and connect to database. - time.sleep(15) + time.sleep(5) diff --git a/tests/functional/instance_manager/test_instances.py b/tests/functional/instance_manager/test_instances.py index 4bc9fa4a3d..7202976e24 100644 --- a/tests/functional/instance_manager/test_instances.py +++ b/tests/functional/instance_manager/test_instances.py @@ -19,17 +19,7 @@ from . import start_server -def run_cmd(cmd): - print(cmd) - proc = subprocess.Popen(cmd, - stdout=subprocess.PIPE) - - out, _ = proc.communicate() - print(out) - return proc.returncode - - -class Instances(unittest.TestCase): +class TestInstances(unittest.TestCase): """ Server instance manager tests. """ @@ -38,18 +28,32 @@ def setUp(self): # Get the test workspace used to tests. self._test_workspace = os.environ['TEST_WORKSPACE'] + test_cfg = env.import_test_cfg(self._test_workspace) + self._test_env = test_cfg['codechecker_1']['check_env'] + self.home = self._test_env['HOME'] + test_class = self.__class__.__name__ print('Running ' + test_class + ' tests in ' + self._test_workspace) + def run_cmd(self, cmd): + print(cmd) + proc = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + env=self._test_env) + + out, _ = proc.communicate() + print(out) + return proc.returncode + def testServerStart(self): """Started server writes itself to instance list.""" test_cfg = env.import_test_cfg(self._test_workspace) codechecker_1 = test_cfg['codechecker_1'] EVENT_1.clear() - start_server(codechecker_1, test_cfg, EVENT_1) + start_server(codechecker_1, EVENT_1) - instance = [i for i in instance_manager.list() + instance = [i for i in instance_manager.list(self.home) if i['port'] == codechecker_1['viewer_port'] and i['workspace'] == self._test_workspace] @@ -64,10 +68,11 @@ def testServerStartSecondary(self): codechecker_1 = test_cfg['codechecker_1'] codechecker_2 = test_cfg['codechecker_2'] EVENT_2.clear() - start_server(codechecker_2, test_cfg, EVENT_2) + start_server(codechecker_2, EVENT_2) # Workspaces must match, servers were started in the same workspace. - instance_workspaces = [i['workspace'] for i in instance_manager.list() + instance_workspaces = [i['workspace'] + for i in instance_manager.list(self.home) if i['workspace'] == self._test_workspace] self.assertEqual(len(instance_workspaces), 2, @@ -75,7 +80,7 @@ def testServerStartSecondary(self): " was not found twice in the instance list.") # Exactly one server should own each port generated - instance_ports = [i['port'] for i in instance_manager.list() + instance_ports = [i['port'] for i in instance_manager.list(self.home) if i['port'] == codechecker_1['viewer_port'] or i['port'] == codechecker_2['viewer_port']] @@ -100,10 +105,10 @@ def testShutdownRecordKeeping(self): codechecker_1 = test_cfg['codechecker_1'] codechecker_2 = test_cfg['codechecker_2'] - instance_1 = [i for i in instance_manager.list() + instance_1 = [i for i in instance_manager.list(self.home) if i['port'] == codechecker_1['viewer_port'] and i['workspace'] == self._test_workspace] - instance_2 = [i for i in instance_manager.list() + instance_2 = [i for i in instance_manager.list(self.home) if i['port'] == codechecker_2['viewer_port'] and i['workspace'] == self._test_workspace] @@ -121,28 +126,28 @@ def testShutdownTerminateByCmdline(self): # NOTE: Yet again keep the lexicographical flow, no renames! - EVENT_2.clear() test_cfg = env.import_test_cfg(self._test_workspace) codechecker_1 = test_cfg['codechecker_1'] codechecker_2 = test_cfg['codechecker_2'] - start_server(codechecker_2, test_cfg, EVENT_2) + EVENT_2.clear() + start_server(codechecker_2, EVENT_2) # Kill the server, but yet again give a grace period. - self.assertEqual(0, run_cmd([env.codechecker_cmd(), - 'server', '--stop', - '--view-port', - str(codechecker_2['viewer_port']), - '--workspace', - self._test_workspace]), + self.assertEqual(0, self.run_cmd([env.codechecker_cmd(), + 'server', '--stop', + '--view-port', + str(codechecker_2['viewer_port']), + '--workspace', + self._test_workspace]), "The stop command didn't return exit code 0.") time.sleep(5) # Check if the remaining server is still there, # we need to make sure that --stop only kills the specified server! - instance_1 = [i for i in instance_manager.list() + instance_1 = [i for i in instance_manager.list(self.home) if i['port'] == codechecker_1['viewer_port'] and i['workspace'] == self._test_workspace] - instance_2 = [i for i in instance_manager.list() + instance_2 = [i for i in instance_manager.list(self.home) if i['port'] == codechecker_2['viewer_port'] and i['workspace'] == self._test_workspace] @@ -155,19 +160,19 @@ def testShutdownTerminateByCmdline(self): " instance list.") # Kill the first server via cmdline too. - self.assertEqual(0, run_cmd([env.codechecker_cmd(), - 'server', '--stop', - '--view-port', - str(codechecker_1['viewer_port']), - '--workspace', - self._test_workspace]), + self.assertEqual(0, self.run_cmd([env.codechecker_cmd(), + 'server', '--stop', + '--view-port', + str(codechecker_1['viewer_port']), + '--workspace', + self._test_workspace]), "The stop command didn't return exit code 0.") time.sleep(5) - instance_1 = [i for i in instance_manager.list() + instance_1 = [i for i in instance_manager.list(self.home) if i['port'] == codechecker_1['viewer_port'] and i['workspace'] == self._test_workspace] - instance_2 = [i for i in instance_manager.list() + instance_2 = [i for i in instance_manager.list(self.home) if i['port'] == codechecker_2['viewer_port'] and i['workspace'] == self._test_workspace] @@ -189,19 +194,19 @@ def testShutdownTerminateStopAll(self): codechecker_2 = test_cfg['codechecker_2'] EVENT_1.clear() EVENT_2.clear() - start_server(codechecker_1, test_cfg, EVENT_1) - start_server(codechecker_2, test_cfg, EVENT_2) + start_server(codechecker_1, EVENT_1) + start_server(codechecker_2, EVENT_2) - self.assertEqual(len(instance_manager.list()), 2, + self.assertEqual(len(instance_manager.list(self.home)), 2, "Two servers were started but they don't appear " "in the instance list.") # Kill the servers via cmdline. - self.assertEqual(0, run_cmd([env.codechecker_cmd(), - 'server', '--stop-all']), + self.assertEqual(0, self.run_cmd([env.codechecker_cmd(), + 'server', '--stop-all']), "The stop-all command didn't return exit code 0.") time.sleep(5) - self.assertEqual(len(instance_manager.list()), 0, + self.assertEqual(len(instance_manager.list(self.home)), 0, "Both servers were allegedly stopped but they " "did not disappear.") diff --git a/tests/functional/report_viewer_api/__init__.py b/tests/functional/report_viewer_api/__init__.py index d5401d9890..f66c5d3497 100644 --- a/tests/functional/report_viewer_api/__init__.py +++ b/tests/functional/report_viewer_api/__init__.py @@ -7,29 +7,21 @@ """Setup for the package tests.""" -import json -import multiprocessing import os -import shlex import shutil -import subprocess import sys -import time import uuid -from subprocess import CalledProcessError -from libtest import get_free_port -from libtest import project from libtest import codechecker from libtest import env +from libtest import project + -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() TEST_WORKSPACE = None def setup_package(): - """Setup the environment for the tests. Then start the server.""" + """Setup the environment for the tests.""" global TEST_WORKSPACE TEST_WORKSPACE = env.get_workspace('report_viewer_api') @@ -38,8 +30,6 @@ def setup_package(): test_project = 'cpp' - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -55,9 +45,6 @@ def setup_package(): skip_list_file = None - # Get port numbers for the tests. - host_port_cfg = env.get_host_port_cfg() - test_env = env.test_env() codechecker_cfg = { @@ -65,18 +52,22 @@ def setup_package(): 'skip_list_file': skip_list_file, 'check_env': test_env, 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, 'checkers': [] } - codechecker_cfg.update(host_port_cfg) - ret = project.clean(test_project) if ret: sys.exit(ret) - print("Starting server to get results.") - _start_server(codechecker_cfg, test_config, False) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'report_viewer_api' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) + + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) test_project_name = project_info['name'] + '_' + uuid.uuid4().hex @@ -112,39 +103,15 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: if environment variable is set keep the workspace # and print out the path global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/functional/review_status/__init__.py b/tests/functional/review_status/__init__.py index ea28121b58..c67abd8e07 100644 --- a/tests/functional/review_status/__init__.py +++ b/tests/functional/review_status/__init__.py @@ -7,21 +7,15 @@ """Setup for the test package review_status.""" -import multiprocessing import os import shutil -import subprocess import sys -import time import uuid from libtest import codechecker from libtest import env from libtest import project -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() - # Test workspace should be initialized in this module. TEST_WORKSPACE = None @@ -36,8 +30,6 @@ def setup_package(): test_project = 'cpp' - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -46,9 +38,6 @@ def setup_package(): skip_list_file = None - # Get port numbers for the tests. - host_port_cfg = env.get_host_port_cfg() - test_env = env.test_env() codechecker_cfg = { @@ -56,19 +45,22 @@ def setup_package(): 'skip_list_file': skip_list_file, 'check_env': test_env, 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, 'checkers': [] } - codechecker_cfg.update(host_port_cfg) - ret = project.clean(test_project, test_env) if ret: sys.exit(ret) - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, test_config, False) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'review_status' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) + + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) test_project_name = project_info['name'] + '_' + uuid.uuid4().hex @@ -88,39 +80,15 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" - # TODO: if environment variable is set keep the workspace - # and print out the path + # TODO: If environment variable is set keep the workspace + # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/functional/review_status/test_review_status.py b/tests/functional/review_status/test_review_status.py index 2d2001b943..c7f1334d64 100644 --- a/tests/functional/review_status/test_review_status.py +++ b/tests/functional/review_status/test_review_status.py @@ -11,8 +11,8 @@ import os import unittest -from libtest.thrift_client_to_db import get_all_run_results from libtest import env +from libtest.thrift_client_to_db import get_all_run_results import shared diff --git a/tests/functional/skip/__init__.py b/tests/functional/skip/__init__.py index bd4c0234a6..db4cd0be46 100644 --- a/tests/functional/skip/__init__.py +++ b/tests/functional/skip/__init__.py @@ -7,30 +7,21 @@ """Setup for the package tests.""" -import json -import multiprocessing import os -import shlex import shutil -import subprocess import sys -import time import uuid -from subprocess import CalledProcessError -from libtest import get_free_port -from libtest import project from libtest import codechecker from libtest import env +from libtest import project + -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() TEST_WORKSPACE = None def setup_package(): - """Setup the environment for the tests. Check the test project twice, - then start the server.""" + """Setup the environment for the tests.""" global TEST_WORKSPACE TEST_WORKSPACE = env.get_workspace('skip') @@ -39,8 +30,6 @@ def setup_package(): test_project = 'cpp' - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -55,9 +44,6 @@ def setup_package(): os.remove(skip_list_file) _generate_skip_list_file(skip_list_file) - # Get port numbers for the tests. - host_port_cfg = env.get_host_port_cfg() - test_env = env.test_env() codechecker_cfg = { @@ -65,19 +51,22 @@ def setup_package(): 'skip_file': skip_list_file, 'check_env': test_env, 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, 'checkers': [] } - codechecker_cfg.update(host_port_cfg) - ret = project.clean(test_project, test_env) if ret: sys.exit(ret) - # Start the CodeChecker server. - print("Starting server to get results.") - _start_server(codechecker_cfg, test_config, False) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'skip' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) + + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) test_project_name = project_info['name'] + '_' + uuid.uuid4().hex @@ -97,43 +86,20 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: If environment variable is set keep the workspace # and print out the path. global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) - - def _generate_skip_list_file(skip_list_file): """ Generate skip list file. diff --git a/tests/functional/suppress/__init__.py b/tests/functional/suppress/__init__.py index 0f45badd81..e6e33dab0b 100644 --- a/tests/functional/suppress/__init__.py +++ b/tests/functional/suppress/__init__.py @@ -7,158 +7,30 @@ """Setup for the package tests.""" -import multiprocessing import os import shutil -import subprocess -import sys -import time -import uuid -from libtest import project -from libtest import codechecker from libtest import env -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() + TEST_WORKSPACE = None def setup_package(): - """Setup the environment for the tests. Check the test project twice, - then start the server.""" + """Setup the environment for the tests.""" global TEST_WORKSPACE TEST_WORKSPACE = env.get_workspace('suppress') os.environ['TEST_WORKSPACE'] = TEST_WORKSPACE - test_project = 'cpp' - - pg_db_config = env.get_postgresql_cfg() - - test_config = {} - - project_info = project.get_info(test_project) - - test_config['test_project'] = project_info - - # Generate a suppress file for the tests. - suppress_file = os.path.join(TEST_WORKSPACE, 'suppress_file') - if os.path.isfile(suppress_file): - os.remove(suppress_file) - _generate_suppress_file(suppress_file) - - skip_list_file = None - - # Get port numbers for the tests. - host_port_cfg = env.get_host_port_cfg() - - test_env = env.test_env() - - codechecker_cfg = { - 'suppress_file': None, - 'skip_list_file': skip_list_file, - 'check_env': test_env, - 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, - 'checkers': [] - } - - codechecker_cfg.update(host_port_cfg) - - ret = project.clean(test_project, test_env) - if ret: - sys.exit(ret) - - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, test_config, False) - - test_project_name = project_info['name'] + '_' + uuid.uuid4().hex - - ret = codechecker.check(codechecker_cfg, - test_project_name, - project.path(test_project)) - - if ret: - sys.exit(1) - print("Analyzing the test project was successful.") - - codechecker_cfg['run_names'] = [test_project_name] - - test_config['codechecker_cfg'] = codechecker_cfg - - env.export_test_cfg(TEST_WORKSPACE, test_config) - def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" - # TODO: if environment variable is set keep the workspace - # and print out the path + # TODO: If environment variable is set keep the workspace + # and print out the path. global TEST_WORKSPACE print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) - - -def _generate_suppress_file(suppress_file): - """ - Create a dummy suppress file just to check if the old and the new - suppress format can be processed. - """ - print("Generating suppress file: " + suppress_file) - - import calendar - import hashlib - import random - - hash_version = '1' - suppress_stuff = [] - for _ in range(10): - curr_time = calendar.timegm(time.gmtime()) - random_integer = random.randint(1, 9999999) - suppress_line = str(curr_time) + str(random_integer) - suppress_stuff.append( - hashlib.md5(suppress_line).hexdigest() + '#' + hash_version) - - s_file = open(suppress_file, 'w') - for k in suppress_stuff: - s_file.write(k + '||' + 'idziei éléáálk ~!@#$#%^&*() \n') - s_file.write( - k + '||' + 'test_~!@#$%^&*.cpp' + - '||' + 'idziei éléáálk ~!@#$%^&*(\n') - s_file.write( - hashlib.md5(suppress_line).hexdigest() + '||' + - 'test_~!@#$%^&*.cpp' + '||' + 'idziei éléáálk ~!@#$%^&*(\n') - - s_file.close() diff --git a/tests/functional/suppress/test_suppress_generation.py b/tests/functional/suppress/test_suppress_generation.py index 5def960677..ac692655d5 100644 --- a/tests/functional/suppress/test_suppress_generation.py +++ b/tests/functional/suppress/test_suppress_generation.py @@ -7,10 +7,10 @@ Test source-code level suppression data writing to suppress file. """ -from subprocess import CalledProcessError import os import shlex import subprocess +from subprocess import CalledProcessError import unittest from libtest import env @@ -24,7 +24,7 @@ class TestSuppress(unittest.TestCase): def setUp(self): self.test_workspace = os.environ['TEST_WORKSPACE'] self.test_dir = os.path.join( - os.path.dirname(__file__), "test_files") + os.path.dirname(__file__), 'test_files') def test_source_suppress_export(self): """ @@ -48,8 +48,8 @@ def __call(command): return cerr.returncode analyze_cmd = ['CodeChecker', 'analyze', - os.path.join(self.test_dir, "build.json"), - "--output", os.path.join(self.test_workspace, "reports") + os.path.join(self.test_dir, 'build.json'), + '--output', os.path.join(self.test_workspace, 'reports') ] ret = __call(analyze_cmd) self.assertEqual(ret, 0, "Couldn't create analysis of test project.") @@ -58,15 +58,15 @@ def __call(command): "generated.suppress") extract_cmd = ['CodeChecker', 'parse', - os.path.join(self.test_workspace, "reports"), - "--suppress", generated_file, - "--export-source-suppress" + os.path.join(self.test_workspace, 'reports'), + '--suppress', generated_file, + '--export-source-suppress' ] __call(extract_cmd) self.assertEqual(ret, 0, "Failed to generate suppress file.") with open(generated_file, 'r') as generated: - with open(os.path.join(self.test_dir, "expected.suppress"), + with open(os.path.join(self.test_dir, 'expected.suppress'), 'r') as expected: self.assertEqual(generated.read().strip(), expected.read().strip(), diff --git a/tests/functional/update/__init__.py b/tests/functional/update/__init__.py index 265b835b87..6534514d93 100644 --- a/tests/functional/update/__init__.py +++ b/tests/functional/update/__init__.py @@ -7,20 +7,15 @@ """Setup for the package tests.""" -import multiprocessing import os import shutil -import subprocess import sys -import time import uuid -from libtest import project from libtest import codechecker from libtest import env +from libtest import project -# Stopping event for CodeChecker server. -__STOP_SERVER = multiprocessing.Event() TEST_WORKSPACE = None @@ -35,8 +30,6 @@ def setup_package(): test_project = 'cpp' - pg_db_config = env.get_postgresql_cfg() - test_config = {} project_info = project.get_info(test_project) @@ -54,9 +47,6 @@ def setup_package(): skip_list_file = None - # Get port numbers for the tests. - host_port_cfg = env.get_host_port_cfg() - test_env = env.test_env() codechecker_cfg = { @@ -64,15 +54,18 @@ def setup_package(): 'skip_list_file': skip_list_file, 'check_env': test_env, 'workspace': TEST_WORKSPACE, - 'pg_db_config': pg_db_config, 'checkers': [] } - codechecker_cfg.update(host_port_cfg) + # Start or connect to the running CodeChecker server and get connection + # details. + print("This test uses a CodeChecker server... connecting...") + server_access = codechecker.start_or_get_server() + server_access['viewer_product'] = 'update' + codechecker.add_test_package_product(server_access, TEST_WORKSPACE) - # Start the CodeChecker server. - print("Starting server to get results") - _start_server(codechecker_cfg, test_config, False) + # Extend the checker configuration with the server access. + codechecker_cfg.update(server_access) ret = project.clean(test_project, test_env) if ret: @@ -93,39 +86,15 @@ def setup_package(): def teardown_package(): - """Stop the CodeChecker server.""" - __STOP_SERVER.set() + """Clean up after the test.""" # TODO: if environment variable is set keep the workspace # and print out the path global TEST_WORKSPACE + check_env = env.import_test_cfg(TEST_WORKSPACE)[ + 'codechecker_cfg']['check_env'] + codechecker.remove_test_package_product(TEST_WORKSPACE, check_env) + print("Removing: " + TEST_WORKSPACE) shutil.rmtree(TEST_WORKSPACE) - - -def _start_server(codechecker_cfg, test_config, auth=False): - """Start the CodeChecker server.""" - - def start_server_proc(event, server_cmd, checking_env): - """Target function for starting the CodeChecker server.""" - proc = subprocess.Popen(server_cmd, env=checking_env) - - # Blocking termination until event is set. - event.wait() - - # If proc is still running, stop it. - if proc.poll() is None: - proc.terminate() - - server_cmd = codechecker.serv_cmd(codechecker_cfg, test_config) - - server_proc = multiprocessing.Process( - name='server', - target=start_server_proc, - args=(__STOP_SERVER, server_cmd, codechecker_cfg['check_env'])) - - server_proc.start() - - # Wait for server to start and connect to database. - time.sleep(20) diff --git a/tests/libtest/__init__.py b/tests/libtest/__init__.py index 7111ad9c8f..55bce2ecca 100644 --- a/tests/libtest/__init__.py +++ b/tests/libtest/__init__.py @@ -3,15 +3,3 @@ # This file is distributed under the University of Illinois Open Source # License. See LICENSE.TXT for details. # ----------------------------------------------------------------------------- -import socket - - -def get_free_port(): - '''Get a free port from the OS.''' - - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.bind(('', 0)) - free_port = s.getsockname()[1] - s.close() - - return free_port diff --git a/tests/libtest/codechecker.py b/tests/libtest/codechecker.py index f5a1f49a25..89000126c1 100644 --- a/tests/libtest/codechecker.py +++ b/tests/libtest/codechecker.py @@ -7,9 +7,10 @@ import os import shlex import subprocess +from subprocess import CalledProcessError import time -from subprocess import CalledProcessError, Popen, PIPE, STDOUT +from . import env from . import project @@ -33,21 +34,34 @@ def wait_for_postgres_shutdown(workspace): def login(codechecker_cfg, test_project_path, username, password): """ - Log in to a server - + Perform a command-line login to the server. """ print("Logging in") + port = str(codechecker_cfg['viewer_port']) login_cmd = ['CodeChecker', 'cmd', 'login', '-u', username, '--verbose', 'debug', '--host', 'localhost', - '--port', str(codechecker_cfg['viewer_port'])] - auth_creds = {'client_autologin': True, - "credentials": {"*": username+":"+password}} + '--port', port] + auth_creds = {'client_autologin': True, + 'credentials': {}} auth_file = os.path.join(test_project_path, ".codechecker.passwords.json") + if not os.path.exists(auth_file): + # Create a default authentication file for the user, which has + # proper structure. + with open(auth_file, 'w') as outfile: + json.dump(auth_creds, outfile) + else: + with open(auth_file, 'r') as infile: + auth_creds = json.load(infile) + + # Write the new credentials to the file and save it. + auth_creds['credentials']['localhost:' + port] = username + ':' + password with open(auth_file, 'w') as outfile: json.dump(auth_creds, outfile) + print("Added '" + username + ':' + password + "' to credentials file.") + try: print(' '.join(login_cmd)) out = subprocess.call(shlex.split(' '.join(login_cmd)), @@ -60,6 +74,45 @@ def login(codechecker_cfg, test_project_path, username, password): return cerr.returncode +def logout(codechecker_cfg, test_project_path): + """ + Perform a command-line logout from a server. This method also clears the + credentials assigned to the server. + """ + print("Logging out") + port = str(codechecker_cfg['viewer_port']) + logout_cmd = ['CodeChecker', 'cmd', 'login', + '--logout', + '--verbose', 'debug', + '--host', 'localhost', + '--port', port] + + auth_file = os.path.join(test_project_path, ".codechecker.passwords.json") + if os.path.exists(auth_file): + # Remove the credentials associated with the throw-away test server. + with open(auth_file, 'r') as infile: + auth_creds = json.load(infile) + + del auth_creds['credentials']['localhost:' + port] + + with open(auth_file, 'w') as outfile: + json.dump(auth_creds, outfile) + print("Removed credentials from 'localhost:" + port + "'.") + else: + print("Credentials file did not exist. Did you login()?") + + try: + print(' '.join(logout_cmd)) + out = subprocess.call(shlex.split(' '.join(logout_cmd)), + cwd=test_project_path, + env=codechecker_cfg['check_env']) + print out + return 0 + except OSError as cerr: + print("Failed to call:\n" + ' '.join(cerr)) + return cerr.returncode + + def check(codechecker_cfg, test_project_name, test_project_path): """ Check a test project. @@ -78,9 +131,7 @@ def check(codechecker_cfg, test_project_name, test_project_path): '--analyzers', 'clangsa', '--quiet-build', '--verbose', 'debug'] - check_cmd.extend(['--host', 'localhost', - '--port', str(codechecker_cfg['viewer_port']) - ]) + check_cmd.extend(['--url', env.parts_to_url(codechecker_cfg)]) suppress_file = codechecker_cfg.get('suppress_file') if suppress_file: @@ -165,8 +216,7 @@ def store(codechecker_cfg, test_project_name, report_path): """ store_cmd = ['CodeChecker', 'store', - '--host', 'localhost', - '--port', str(codechecker_cfg['viewer_port']), + '--url', env.parts_to_url(codechecker_cfg), '--name', test_project_name, report_path] @@ -186,31 +236,160 @@ def store(codechecker_cfg, test_project_name, report_path): return cerr.returncode -def serv_cmd(codechecker_cfg, test_config): +def serv_cmd(config_dir, port, pg_config=None): server_cmd = ['CodeChecker', 'server', - '-w', codechecker_cfg['workspace']] - - suppress_file = codechecker_cfg.get('suppress_file') - if suppress_file: - server_cmd.extend(['--suppress', suppress_file]) + '--config-directory', config_dir] server_cmd.extend(['--host', 'localhost', - '--port', - str(codechecker_cfg['viewer_port']) - ]) + '--port', str(port)]) + # server_cmd.extend(['--verbose', 'debug']) - psql_cfg = codechecker_cfg.get('pg_db_config') - if psql_cfg: + if pg_config: server_cmd.append('--postgresql') - server_cmd += _pg_db_config_to_cmdline_params(psql_cfg) + server_cmd += _pg_db_config_to_cmdline_params(pg_config) + else: + server_cmd += ['--sqlite', os.path.join(config_dir, 'config.sqlite')] print(server_cmd) return server_cmd +def start_or_get_server(): + """ + Create a global CodeChecker server with the given configuration. + """ + config_dir = env.get_workspace(None) + portfile = os.path.join(config_dir, 'serverport') + + if os.path.exists(portfile): + print("A server appears to be already running...") + with open(portfile, 'r') as f: + port = int(f.read()) + else: + port = env.get_free_port() + print("Setting up CodeChecker server in " + config_dir + " :" + + str(port)) + + with open(portfile, 'w') as f: + f.write(str(port)) + + pg_config = env.get_postgresql_cfg() + + server_cmd = serv_cmd(config_dir, port, pg_config) + + print("Starting server...") + subprocess.Popen(server_cmd, env=env.test_env()) + + # Wait for server to start and connect to database. + # We give a bit of grace period here as a separate subcommand needs to + # attach. + time.sleep(5) + + if pg_config: + # The behaviour is that CodeChecker servers only configure a + # 'Default' product in SQLite mode, if the server was started + # brand new. But certain test modules might make use of a + # default product, so we now manually have to create it. + print("PostgreSQL server does not create 'Default' product...") + print("Creating it now!") + add_test_package_product({'viewer_host': 'localhost', + 'viewer_port': port, + 'viewer_product': 'Default'}, + 'Default') + return { + 'viewer_host': 'localhost', + 'viewer_port': port + } + + +def add_test_package_product(server_data, test_folder, check_env=None): + """ + Add a product for a test suite to the server provided by server_data. + Server must be running before called. + + server_data must contain three keys: viewer_{host, port, product}. + """ + + if not check_env: + check_env = env.test_env() + + add_command = ['CodeChecker', 'cmd', 'products', 'add', + server_data['viewer_product'], + '--host', server_data['viewer_host'], + '--port', str(server_data['viewer_port']), + '--name', os.path.basename(test_folder), + '--description', "Automatically created product for test."] + + # If tests are running on postgres, we need to create a database. + pg_config = env.get_postgresql_cfg() + if pg_config: + pg_config['dbname'] = server_data['viewer_product'] + + psql_command = ['psql', + '-h', pg_config['dbaddress'], + '-p', str(pg_config['dbport']), + '-d', 'postgres', + '-c', "CREATE DATABASE \"" + pg_config['dbname'] + "\"" + ] + if 'dbusername' in pg_config: + psql_command += ['-U', pg_config['dbusername']] + + print(psql_command) + subprocess.call(psql_command, env=check_env) + + add_command.append('--postgresql') + add_command += _pg_db_config_to_cmdline_params(pg_config) + else: + # SQLite databases are put under the workspace of the appropriate test. + add_command += ['--sqlite', + os.path.join(test_folder, 'data.sqlite')] + + print(add_command) + + # The schema creation is a synchronous call. + subprocess.call(add_command, env=check_env) + + +def remove_test_package_product(test_folder, check_env=None): + """ + Remove the product associated with the given test folder. + The folder must exist, as the server configuration is read from the folder. + """ + + if not check_env: + check_env = env.test_env() + + server_data = env.import_test_cfg(test_folder)['codechecker_cfg'] + print(server_data) + + del_command = ['CodeChecker', 'cmd', 'products', 'del', + server_data['viewer_product'], + '--host', server_data['viewer_host'], + '--port', str(server_data['viewer_port'])] + + print(del_command) + subprocess.call(del_command, env=check_env) + + # If tests are running on postgres, we need to delete the database. + pg_config = env.get_postgresql_cfg() + if pg_config: + pg_config['dbname'] = server_data['viewer_product'] + + psql_command = ['psql', + '-h', pg_config['dbaddress'], + '-p', str(pg_config['dbport']), + '-d', 'postgres', + '-c', "DROP DATABASE \"" + pg_config['dbname'] + "\""] + if 'dbusername' in pg_config: + psql_command += ['-U', pg_config['dbusername']] + + print(psql_command) + subprocess.call(psql_command, env=check_env) + + def _pg_db_config_to_cmdline_params(pg_db_config): """Format postgres config dict to CodeChecker cmdline parameters.""" params = [] diff --git a/tests/libtest/env.py b/tests/libtest/env.py index 11d44fba0e..b0e4ac99de 100644 --- a/tests/libtest/env.py +++ b/tests/libtest/env.py @@ -8,17 +8,30 @@ import json import tempfile import shutil +import socket import sys -from . import get_free_port from thrift_client_to_db import get_viewer_client -from thrift_client_to_db import get_server_client from thrift_client_to_db import get_auth_client from functional import PKG_ROOT from functional import REPO_ROOT +def get_free_port(): + """ + Get a free port from the OS. + """ + # TODO: Prone to errors if the OS assigns port to someone else before use. + + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.bind(('', 0)) + free_port = s.getsockname()[1] + s.close() + + return free_port + + def get_postgresql_cfg(): """ Returns PostgreSQL config if should be used based on the environment @@ -26,10 +39,10 @@ def get_postgresql_cfg(): """ use_postgresql = os.environ.get('TEST_USE_POSTGRESQL', '') == 'true' if use_postgresql: - pg_db_config = {} - pg_db_config['dbaddress'] = 'localhost' - pg_db_config['dbname'] = 'testDb' - pg_db_config['dbport'] = os.environ.get('TEST_DBPORT', get_free_port()) + pg_db_config = {'dbaddress': 'localhost', + 'dbport': os.environ.get('TEST_DBPORT'), + 'dbname': 'codechecker_config' + } if os.environ.get('TEST_DBUSERNAME', False): pg_db_config['dbusername'] = os.environ['TEST_DBUSERNAME'] return pg_db_config @@ -37,49 +50,36 @@ def get_postgresql_cfg(): return None -def get_host_port_cfg(): - - test_config = { - 'server_port': get_free_port(), - 'server_host': 'localhost', - 'viewer_port': get_free_port(), - 'viewer_host': 'localhost', - } - return test_config - - def clang_to_test(): return "clang_"+os.environ.get('TEST_CLANG_VERSION', 'stable') def setup_viewer_client(workspace, - uri='/', + endpoint='/CodeCheckerService', auto_handle_connection=True, session_token=None): - # read port and host from the test config file - port = import_test_cfg(workspace)['codechecker_cfg']['viewer_port'] - host = import_test_cfg(workspace)['codechecker_cfg']['viewer_host'] - - return get_viewer_client(port=port, - host=host, - uri=uri, + # Read port and host from the test config file. + codechecker_cfg = import_test_cfg(workspace)['codechecker_cfg'] + port = codechecker_cfg['viewer_port'] + host = codechecker_cfg['viewer_host'] + product = codechecker_cfg['viewer_product'] + + return get_viewer_client(host=host, + port=port, + product=product, + endpoint=endpoint, auto_handle_connection=auto_handle_connection, session_token=session_token) -def setup_server_client(workspace): - port = import_test_cfg(workspace)['codechecker_cfg']['server_port'] - host = import_test_cfg(workspace)['codechecker_cfg']['server_host'] - return get_server_client(port, host) - - def setup_auth_client(workspace, uri='/Authentication', auto_handle_connection=True, session_token=None): - port = import_test_cfg(workspace)['codechecker_cfg']['viewer_port'] - host = import_test_cfg(workspace)['codechecker_cfg']['viewer_host'] + codechecker_cfg = import_test_cfg(workspace)['codechecker_cfg'] + port = codechecker_cfg['viewer_port'] + host = codechecker_cfg['viewer_host'] return get_auth_client(port=port, host=host, @@ -108,12 +108,25 @@ def get_run_names(workspace): return import_test_cfg(workspace)['codechecker_cfg']['run_names'] +def parts_to_url(codechecker_cfg): + """ + Creates a product URL string from the test configuration dict. + """ + return codechecker_cfg['viewer_host'] + ':' + \ + str(codechecker_cfg['viewer_port']) + '/' + \ + codechecker_cfg['viewer_product'] + + def get_workspace(test_id='test'): tmp_dir = os.path.join(REPO_ROOT, 'build') base_dir = os.path.join(tmp_dir, 'workspace') if not os.path.exists(base_dir): os.makedirs(base_dir) - return tempfile.mkdtemp(prefix=test_id+"-", dir=base_dir) + + if test_id: + return tempfile.mkdtemp(prefix=test_id+"-", dir=base_dir) + else: + return base_dir def clean_wp(workspace): diff --git a/tests/libtest/thrift_client_to_db.py b/tests/libtest/thrift_client_to_db.py index a13de61889..c7232fbfa9 100644 --- a/tests/libtest/thrift_client_to_db.py +++ b/tests/libtest/thrift_client_to_db.py @@ -11,10 +11,8 @@ import shared -from thrift.protocol import TBinaryProtocol from thrift.protocol import TJSONProtocol from thrift.transport import THttpClient -from thrift.transport import TSocket from thrift.transport import TTransport @@ -81,30 +79,18 @@ def __exit__(self, type, value, tb): self._transport.close() -class CCReportHelper(ThriftAPIHelper): - - def __init__(self, host, port, auto_handle_connection=True): - # import only if necessary; some tests may not add this to PYTHONPATH - from DBThriftAPI import CheckerReport - - transport = TTransport.TBufferedTransport(TSocket.TSocket(host, port)) - protocol = TBinaryProtocol.TBinaryProtocol(transport) - client = CheckerReport.Client(protocol) - super(CCReportHelper, self).__init__(transport, client, - auto_handle_connection) - - class CCViewerHelper(ThriftAPIHelper): - def __init__(self, host, port, uri, auto_handle_connection=True, - session_token=None): - # import only if necessary; some tests may not add this to PYTHONPATH + def __init__(self, host, port, product, endpoint, + auto_handle_connection=True, session_token=None): + # Import only if necessary; some tests may not add this to PYTHONPATH. from libcodechecker import session_manager from codeCheckerDBAccess import codeCheckerDBAccess from codeCheckerDBAccess.constants import MAX_QUERY_SIZE self.max_query_size = MAX_QUERY_SIZE - transport = THttpClient.THttpClient(host, port, uri) + transport = THttpClient.THttpClient(host, port, + '/' + product + endpoint) protocol = TJSONProtocol.TJSONProtocol(transport) client = codeCheckerDBAccess.Client(protocol) if session_token: @@ -151,7 +137,7 @@ class CCAuthHelper(ThriftAPIHelper): def __init__(self, host, port, uri, auto_handle_connection=True, session_token=None): - # import only if necessary; some tests may not add this to PYTHONPATH + # Import only if necessary; some tests may not add this to PYTHONPATH. from libcodechecker import session_manager from Authentication import codeCheckerAuthentication @@ -169,6 +155,28 @@ def __getattr__(self, attr): return partial(self._thrift_client_call, attr) +class CCProductHelper(ThriftAPIHelper): + + def __init__(self, host, port, uri, auto_handle_connection=True, + session_token=None): + # Import only if necessary; some tests may not add this to PYTHONPATH. + from libcodechecker import session_manager + from ProductManagement import codeCheckerProductService + + transport = THttpClient.THttpClient(host, port, uri) + protocol = TJSONProtocol.TJSONProtocol(transport) + client = codeCheckerProductService.Client(protocol) + if session_token: + headers = {'Cookie': session_manager.SESSION_COOKIE_NAME + + "=" + session_token} + transport.setCustomHeaders(headers) + super(CCProductHelper, self).__init__(transport, + client, auto_handle_connection) + + def __getattr__(self, attr): + return partial(self._thrift_client_call, attr) + + def get_all_run_results(client, run_id, sort_mode=[], filters=[]): """ Get all the results for a run. @@ -220,23 +228,26 @@ def get_all_run_results_v2(client, run_id, sort_mode=[], filters=None): return results -def get_viewer_client(port, host='localhost', uri='/', +def get_viewer_client(product, port, host='localhost', + endpoint='/CodeCheckerService', auto_handle_connection=True, session_token=None): - return CCViewerHelper(host, - port, - uri, + return CCViewerHelper(host, port, product, + endpoint, auto_handle_connection, session_token) -def get_server_client(port, host='localhost'): - return CCReportHelper(host, port) - - def get_auth_client(port, host='localhost', uri='/Authentication', auto_handle_connection=True, session_token=None): return CCAuthHelper(host, port, uri, auto_handle_connection, session_token) + + +def get_product_client(port, host='localhost', uri='/Products', + auto_handle_connection=True, session_token=None): + return CCProductHelper(host, port, uri, + auto_handle_connection, + session_token) diff --git a/tests/unit/test_product_url.py b/tests/unit/test_product_url.py new file mode 100644 index 0000000000..e3fabf3d02 --- /dev/null +++ b/tests/unit/test_product_url.py @@ -0,0 +1,89 @@ +# ----------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ----------------------------------------------------------------------------- + +""" Unit tests for understanding product URLs. """ + +import unittest + +from libcodechecker.util import split_product_url + + +class product_urlTest(unittest.TestCase): + """ + Testing the product URL splitter. + """ + + def testFullURL(self): + """ + Whole product URL understanding. + """ + def test(host, port, name, protocol=None): + url = ''.join([protocol + "://" if protocol else "", + host, ":", str(port), "/", name]) + + shost, sport, sname = split_product_url(url) + self.assertEqual(shost, host) + self.assertEqual(sport, port) + self.assertEqual(sname, name) + + test("localhost", 8001, "Default") + test("localhost", 8002, "MyProduct") + test("another.server", 80, "CodeChecker", "http") + test("very-secure.another.server", 443, "CodeChecker", "https") + + def testProductName(self): + """ + Understanding only a product name specified. + """ + def test(name, protocol=None): + url = ''.join([protocol + "://" if protocol else "", name]) + + shost, sport, sname = split_product_url(url) + self.assertEqual(shost, "localhost") + self.assertEqual(sport, 8001) + self.assertEqual(sname, name) + + test("Default") + test("Default", "http") + test("MyProduct") + test("Enterprise-Product", "https") + + def testHostAndProductName(self): + """ + Understanding a host and a product name specified. + """ + def test(host, name, protocol=None): + url = ''.join([protocol + "://" if protocol else "", + host, "/", name]) + + shost, sport, sname = split_product_url(url) + self.assertEqual(shost, host) + self.assertEqual(sport, 8001) + self.assertEqual(sname, name) + + test("localhost", "Default") + test("otherhost", "awesome123", "http") + + # 8080/product as if 8080 was a host name. + test("8080", "MyProduct") + test("super", "verygood", "https") + + def testBadProductNames(self): + """ + Parser throws on bad product URLs? + """ + + with self.assertRaises(ValueError): + split_product_url("123notaproductname") + + with self.assertRaises(ValueError): + split_product_url("localhost//containsExtraChar") + + with self.assertRaises(ValueError): + split_product_url("in:valid:format/product") + + with self.assertRaises(ValueError): + split_product_url("localhost:12PortIsANumber34/Foo") diff --git a/www/index.html b/www/index.html index 5144c748c5..b95cf3e8b8 100644 --- a/www/index.html +++ b/www/index.html @@ -40,6 +40,8 @@ + + diff --git a/www/products.html b/www/products.html new file mode 100644 index 0000000000..e0af183986 --- /dev/null +++ b/www/products.html @@ -0,0 +1,102 @@ + + + + CodeChecker + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/www/scripts/codecheckerviewer/CommentView.js b/www/scripts/codecheckerviewer/CommentView.js index 8d44fbebbe..560b2a87fd 100644 --- a/www/scripts/codecheckerviewer/CommentView.js +++ b/www/scripts/codecheckerviewer/CommentView.js @@ -20,9 +20,9 @@ define([ 'dijit/tree/ObjectStoreModel', 'codechecker/HtmlTree', 'codechecker/util'], -function (declare, dom, style, topic, Memory, Observable, ConfirmDialog, Dialog, - ContentPane, Button, SimpleTextarea, TextBox, ObjectStoreModel, HtmlTree, - util) { +function (declare, dom, style, topic, Memory, Observable, ConfirmDialog, + Dialog, ContentPane, Button, SimpleTextarea, TextBox, ObjectStoreModel, + HtmlTree, util) { var Reply = declare(ContentPane, { constructor : function () { @@ -207,4 +207,4 @@ function (declare, dom, style, topic, Memory, Observable, ConfirmDialog, Dialog, }); } }); -}); \ No newline at end of file +}); diff --git a/www/scripts/codecheckerviewer/ListOfRuns.js b/www/scripts/codecheckerviewer/ListOfRuns.js index 0cd1e56128..56090028d5 100644 --- a/www/scripts/codecheckerviewer/ListOfRuns.js +++ b/www/scripts/codecheckerviewer/ListOfRuns.js @@ -20,25 +20,6 @@ define([ function (declare, domConstruct, ItemFileWriteStore, topic, Dialog, Button, RadioButton, TextBox, BorderContainer, ContentPane, DataGrid, util) { - function prettifyDuration(seconds) { - var prettyDuration = "--------"; - - if (seconds >= 0) { - var durHours = Math.floor(seconds / 3600); - var durMins = Math.floor(seconds / 60) - durHours * 60; - var durSecs = seconds - durMins * 60 - durHours * 3600; - - var prettyDurHours = (durHours < 10 ? '0' : '') + durHours; - var prettyDurMins = (durMins < 10 ? '0' : '') + durMins; - var prettyDurSecs = (durSecs < 10 ? '0' : '') + durSecs; - - prettyDuration - = prettyDurHours + ':' + prettyDurMins + ':' + prettyDurSecs; - } - - return prettyDuration; - } - /** * This function helps to format a data grid cell with two radio buttons. * @param args {runData, listOfRunsGrid} - the value from the data store that @@ -188,7 +169,7 @@ function (declare, domConstruct, ItemFileWriteStore, topic, Dialog, Button, name : '' + runData.name + '', date : currItemDate[0] + ' ' + currItemDate[1], numberofbugs : runData.resultCount, - duration : prettifyDuration(runData.duration), + duration : util.prettifyDuration(runData.duration), runData : runData, checkcmd : 'Show', del : false, diff --git a/www/scripts/codecheckerviewer/codecheckerviewer.js b/www/scripts/codecheckerviewer/codecheckerviewer.js index 9831f8953c..0d0127b20b 100644 --- a/www/scripts/codecheckerviewer/codecheckerviewer.js +++ b/www/scripts/codecheckerviewer/codecheckerviewer.js @@ -11,6 +11,7 @@ define([ 'dijit/Dialog', 'dijit/DropDownMenu', 'dijit/MenuItem', + 'dijit/form/Button', 'dijit/form/DropDownButton', 'dijit/layout/BorderContainer', 'dijit/layout/ContentPane', @@ -20,8 +21,8 @@ define([ 'codechecker/ListOfRuns', 'codechecker/util'], function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, - DropDownButton, BorderContainer, ContentPane, TabContainer, hashHelper, - ListOfBugs, ListOfRuns, util, filterHelper) { + Button, DropDownButton, BorderContainer, ContentPane, TabContainer, + hashHelper, ListOfBugs, ListOfRuns, util) { return function () { @@ -29,12 +30,19 @@ function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, CC_SERVICE = new codeCheckerDBAccess.codeCheckerDBAccessClient( new Thrift.Protocol(new Thrift.Transport("CodeCheckerService"))); - CC_AUTH_SERVICE = + + CC_OBJECTS = codeCheckerDBAccess; + + AUTH_SERVICE = new codeCheckerAuthentication.codeCheckerAuthenticationClient( new Thrift.TJSONProtocol( new Thrift.Transport("/Authentication"))); - CC_OBJECTS = codeCheckerDBAccess; + PROD_SERVICE = + new codeCheckerProductManagement.codeCheckerProductServiceClient( + new Thrift.Protocol(new Thrift.Transport("Products"))); + + PROD_OBJECTS = codeCheckerProductManagement; //----------------------------- Main layout ------------------------------// @@ -48,6 +56,10 @@ function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, //--- Logo ---// + var currentProduct = PROD_SERVICE.getCurrentProduct(); + var currentProductName = util.atou(currentProduct.displayedName_b64); + document.title = currentProductName + ' - CodeChecker'; + var logoContainer = domConstruct.create('div', { id : 'logo-container' }, headerPane.domNode); @@ -56,7 +68,7 @@ function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, var logoText = domConstruct.create('div', { id : 'logo-text', - innerHTML : 'CodeChecker' + innerHTML : 'CodeChecker - ' + currentProductName }, logoContainer); var version = domConstruct.create('span', { @@ -64,7 +76,7 @@ function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, innerHTML : CC_SERVICE.getPackageVersion() }, logoText); - var user = CC_AUTH_SERVICE.getLoggedInUser(); + var user = AUTH_SERVICE.getLoggedInUser(); var loginUserSpan = null; if (user.length > 0) { loginUserSpan = domConstruct.create('span', { @@ -73,7 +85,7 @@ function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, }); } - //--- Menu button ---// + //--- Menu button ---// var credits = new Dialog({ title : 'Credits', @@ -116,6 +128,18 @@ function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, dropDown : menuItems }); + //--- Back button to product list ---// + + var productListButton = new Button({ + class : 'mainMenuButton', + label : 'Back to product list', + onClick : function () { + // Use explicit URL here, as '/' could redirect back to this product + // if there is only one product. + window.open('/products.html', '_self'); + } + }); + var headerMenu = domConstruct.create('div', { id : 'header-menu' }); @@ -123,8 +147,9 @@ function (declare, topic, domConstruct, Dialog, DropDownMenu, MenuItem, if (loginUserSpan != null) domConstruct.place(loginUserSpan, headerMenu); - domConstruct.place(menuButton.domNode, headerMenu); + domConstruct.place(productListButton.domNode, headerMenu); + domConstruct.place(menuButton.domNode, headerMenu); domConstruct.place(headerMenu, headerPane.domNode); diff --git a/www/scripts/codecheckerviewer/util.js b/www/scripts/codecheckerviewer/util.js index f854b54f41..08932b1b7e 100644 --- a/www/scripts/codecheckerviewer/util.js +++ b/www/scripts/codecheckerviewer/util.js @@ -63,6 +63,45 @@ function (locale, dom, style) { return '#' + '00000'.substring(0, 6 - c.length) + c; }, + /** + * This function creates a colour from a string, then blend it with the + * given other colour with the given ratio. + * + * @param blendColour a variable applicable to the constructor of + * dojo.Color. It can be a color name, a hex string, on an array of RGB. + */ + strToColorBlend : function (str, blendColour, ratio) { + if (ratio === undefined) { + ratio = 0.75; + } + + var baseColour = new dojo.Color(this.strToColor(str)); + return dojo.blendColors(baseColour, new dojo.Color(blendColour), ratio); + }, + + /** + * Converts the given number of seconds into a more human-readable + * 'hh:mm:ss' format. + */ + prettifyDuration: function (seconds) { + var prettyDuration = "--------"; + + if (seconds >= 0) { + var durHours = Math.floor(seconds / 3600); + var durMins = Math.floor(seconds / 60) - durHours * 60; + var durSecs = seconds - durMins * 60 - durHours * 3600; + + var prettyDurHours = (durHours < 10 ? '0' : '') + durHours; + var prettyDurMins = (durMins < 10 ? '0' : '') + durMins; + var prettyDurSecs = (durSecs < 10 ? '0' : '') + durSecs; + + prettyDuration + = prettyDurHours + ':' + prettyDurMins + ':' + prettyDurSecs; + } + + return prettyDuration; + }, + /** * Creates a human friendly relative time ago on the date. */ @@ -73,7 +112,7 @@ function (locale, dom, style) { hour = minute * 60, day = hour * 24, week = day * 7, - month = day * 30 + month = day * 30, year = day * 365; var fuzzy; @@ -207,6 +246,25 @@ function (locale, dom, style) { }, content); return content; + }, + + /** + * Converts the given string containing Unicode characters to a base64 + * string. + */ + utoa : function(ustring) { + return window.btoa(unescape(encodeURIComponent(ustring))); + }, + + /** + * Converts the given Base64-encoded string to a Unicode string, properly + * handling the wider codepoints. + * + * (Normal "atob" would convert base64 to string where each character + * is one byte long, chopping up Unicode.) + */ + atou : function(b64) { + return decodeURIComponent(escape(window.atob(b64))); } }; }); diff --git a/www/scripts/productlist/ListOfProducts.js b/www/scripts/productlist/ListOfProducts.js new file mode 100644 index 0000000000..6e78f52cf3 --- /dev/null +++ b/www/scripts/productlist/ListOfProducts.js @@ -0,0 +1,201 @@ +// ------------------------------------------------------------------------- +// The CodeChecker Infrastructure +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// ------------------------------------------------------------------------- + +define([ + 'dojo/_base/declare', + 'dojo/dom-construct', + 'dojo/data/ItemFileWriteStore', + 'dojo/topic', + 'dijit/form/Button', + 'dijit/form/TextBox', + 'dijit/layout/BorderContainer', + 'dijit/layout/ContentPane', + 'dojox/grid/DataGrid', + 'codechecker/util'], +function (declare, domConstruct, ItemFileWriteStore, topic, Button, + TextBox, BorderContainer, ContentPane, DataGrid, util) { + + var ListOfProductsGrid = declare(DataGrid, { + constructor : function () { + this.store = new ItemFileWriteStore({ + data : { identifier : 'endpoint', items : [] } + }); + + // TODO: Support access control for products and handle locks well. + // TODO: Support showing the last checkin's information for products. + this.structure = [ + { name : ' ', field : 'status', cellClasses : 'status', width : '20px', noresize : true }, + { name : ' ', field : 'icon', cellClasses : 'product-icon', width : '40px', noresize : true }, + { name : 'Name', field : 'name', cellClasses : 'product-name', width : '25%' }, + { name : 'Description', field : 'description', styles : 'text-align: left;', width : '70%' }/*, + { name : 'Last check date', field : 'date', styles : 'text-align: center;', width : '30%' }, + { name : 'Last check bugs', field : 'numberofbugs', styles : 'text-align: center;', width : '20%' }, + { name : 'Last check duration', field : 'duration', styles : 'text-align: center;' }*/ + ]; + + this.focused = true; + this.selectable = true; + this.keepSelection = true; + this.escapeHTMLInData = false; + }, + + postCreate : function () { + this.inherited(arguments); + this._populateProducts(); + }, + + onRowClick : function (evt) { + var item = this.getItem(evt.rowIndex); + + switch (evt.cell.field) { + case 'name': + if (item.connected[0] && item.accessible[0]) { + window.open('/' + item.endpoint[0], '_self'); + } + break; + } + }, + + getItemsWhere : function (func) { + var result = []; + + for (var i = 0; i < this.rowCount; ++i) { + var item = this.getItem(i); + if (func(item)) + result.push(item); + } + + return result; + }, + + _addProductData : function (item) { + var name = util.atou(item.displayedName_b64); + var description = item.description_b64 + ? util.atou(item.description_b64) + : ""; + var statusIcon = ''; + var icon ='
' + + '' + + name[0].toUpperCase() + + '
'; + + if (!item.connected || !item.accessible) { + name = '' + + name + ''; + + if (!item.connected) { + statusIcon = ''; + description = '' + + 'The database connection for this product could not be made!' + + '
' + description; + } else if (!item.accessible) { + statusIcon = ''; + description = '' + + 'You do not have access to this product!' + + '
' + description; + } + } else { + name = '' + name + ''; + } + + this.store.newItem({ + status : statusIcon, + icon : icon, + endpoint : item.endpoint, + name : name, + description : description, + connected : item.connected, + accessible : item.accessible + }); + }, + + _populateProducts : function (productNameFilter) { + var that = this; + + PROD_SERVICE.getProducts(null, productNameFilter, function (productList) { + that.onLoaded(productList); + + productList.forEach(function (item) { + that._addProductData(item); + }); + }); + }, + + /** + * This function refreshes grid with available product data based on + * text name filter. + */ + refreshGrid : function (productNameFilter) { + var that = this; + + this.store.fetch({ + onComplete : function (products) { + products.forEach(function (products) { + that.store.deleteItem(products); + }); + that.store.save(); + } + }); + + PROD_SERVICE.getProducts(null, productNameFilter, function (productDataList) { + productDataList.forEach(function (item) { + that._addProductData(item); + }); + }); + }, + + onLoaded : function (productDataList) {} + }); + + var ProductFilter = declare(ContentPane, { + constructor : function () { + var that = this; + + this._productFilter = new TextBox({ + id : 'products-filter', + placeHolder : 'Search for products...', + onKeyUp : function (evt) { + clearTimeout(this.timer); + + var filter = this.get('value'); + this.timer = setTimeout(function () { + that.listOfProductsGrid.refreshGrid(filter); + }, 500); + } + }); + }, + + postCreate : function () { + this.addChild(this._productFilter); + } + }); + + return declare(BorderContainer, { + postCreate : function () { + var that = this; + + var filterPane = new ProductFilter({ + id : 'products-filter-container', + region : 'top' + }); + + var listOfProductsGrid = new ListOfProductsGrid({ + id : 'productGrid', + region : 'center', + onLoaded : that.onLoaded + }); + + filterPane.set('listOfProductsGrid', listOfProductsGrid); + + this.addChild(filterPane); + this.addChild(listOfProductsGrid); + }, + + onLoaded : function (productDataList) {} + }); +}); diff --git a/www/scripts/productlist/productlist.js b/www/scripts/productlist/productlist.js new file mode 100644 index 0000000000..197599bcec --- /dev/null +++ b/www/scripts/productlist/productlist.js @@ -0,0 +1,103 @@ +// ------------------------------------------------------------------------- +// The CodeChecker Infrastructure +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// ------------------------------------------------------------------------- + +define([ + 'dojo/_base/declare', + 'dojo/topic', + 'dojo/dom-construct', + 'dijit/form/Button', + 'dijit/layout/BorderContainer', + 'dijit/layout/ContentPane', + 'products/ListOfProducts'], +function (declare, topic, domConstruct, Button, BorderContainer, + ContentPane, ListOfProducts) { + + return function () { + + //---------------------------- Global objects ----------------------------// + + PROD_SERVICE = + new codeCheckerProductManagement.codeCheckerProductServiceClient( + new Thrift.Protocol(new Thrift.Transport("Products"))); + + PROD_OBJECTS = codeCheckerProductManagement; + + AUTH_SERVICE = + new codeCheckerAuthentication.codeCheckerAuthenticationClient( + new Thrift.TJSONProtocol( + new Thrift.Transport("/Authentication"))); + + //----------------------------- Main layout ------------------------------// + + var layout = new BorderContainer({ id : 'mainLayout' }); + + var headerPane = new ContentPane({ id : 'headerPane', region : 'top' }); + layout.addChild(headerPane); + + var productsPane = new ContentPane({ region : 'center' }); + layout.addChild(productsPane); + + //--- Logo ---// + + var logoContainer = domConstruct.create('div', { + id : 'logo-container' + }, headerPane.domNode); + + var logo = domConstruct.create('span', { id : 'logo' }, logoContainer); + + var logoText = domConstruct.create('div', { + id : 'logo-text', + innerHTML : 'CodeChecker - Products on this server' + }, logoContainer); + + var version = domConstruct.create('span', { + id : 'logo-version', + innerHTML : PROD_SERVICE.getPackageVersion() + }, logoText); + + var user = AUTH_SERVICE.getLoggedInUser(); + var loginUserSpan = null; + if (user.length > 0) { + loginUserSpan = domConstruct.create('span', { + id: 'loggedin', + innerHTML: "Logged in as " + user + "." + }); + } + + //--- Admin button ---// + + // TODO: Show admin button only if superuser. + /* var menuButton = new Button({ + class : 'mainMenuButton', + label : 'Administration', + onClick : function () { + window.open('/Administration', '_self'); + } + }); */ + + var headerMenu = domConstruct.create('div', { + id : 'header-menu' + }); + + if (loginUserSpan != null) + domConstruct.place(loginUserSpan, headerMenu); + + /* domConstruct.place(menuButton.domNode, headerMenu); */ + + domConstruct.place(headerMenu, headerPane.domNode); + + //--- Center panel ---// + + var listOfProducts = new ListOfProducts(); + + productsPane.addChild(listOfProducts); + + //--- Init page ---// + + document.body.appendChild(layout.domNode); + layout.startup(); + }; +}); diff --git a/www/style/codecheckerviewer.css b/www/style/codecheckerviewer.css index cd45361ae2..6433841c79 100644 --- a/www/style/codecheckerviewer.css +++ b/www/style/codecheckerviewer.css @@ -47,7 +47,7 @@ html, body { #logo-version { font-family: 'Source Code Pro', monospace; font-size: 16pt; - padding-left: 12px; + padding-left: 16px; color: white; } diff --git a/www/style/productlist.css b/www/style/productlist.css new file mode 100644 index 0000000000..6c604ad9b5 --- /dev/null +++ b/www/style/productlist.css @@ -0,0 +1,74 @@ + +/* Do not use this stylesheet without codecheckerviewer.css! */ + +/*** Product grid ***/ + +#productGrid .dojoxGridCell { + border: 0; +} + +.dojoxGridCell.product-icon { + text-align: center; +} + +.dojoxGridCell.product-name { + text-align: left; + font-size: 12pt; +} + +.dojoxGridCell.product-name .product-error { + text-decoration: line-through; +} + +.dojoxGridCell .product-description-error { + font-style: italic; + font-family: 'Source Code Pro', monospace; + font-size: 12px; +} + +.dojoxGridCell .product-description-error.database { + color: #b32424; +} + +.dojoxGridCell .product-description-error.access { + color: #5b6169; +} + +div.product-avatar { + width: 40px; + height: 40px; + border-radius: 55%; + background-color: #eee; +} + +span.product-avatar { + font-size: 18px; + width: 40px; + height: 40px; + text-align: center; + vertical-align: middle; + color: black; + font-weight: 600; + font-family: Helvetica, Arial, sans-serif; + display: table-cell; +} + +.dojoxGridCell.status { + text-align: center; +} + +.customIcon.product-noaccess { + color: #5b6169; +} + +.customIcon.product-noaccess:before { + content: "\e014"; +} + +.customIcon.product-error { + color: #b32424; +} + +.customIcon.product-error:before { + content: "\e015"; +}