diff --git a/tests/conftest.py b/tests/conftest.py index 389e44dd42..d465504e00 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -71,3 +71,74 @@ def app(env_config, default_config, instance_path): with app.app_context(): yield app + + +@pytest.fixture(scope="module") +def database(app): + """Setup database. + + Scope: module + + Normally, tests should use the function-scoped :py:data:`db` fixture + instead. This fixture takes care of creating the database/tables and + removing the tables once tests are done. + """ + from invenio_db import db as db_ + from sqlalchemy_utils.functions import create_database, database_exists + + if not database_exists(str(db_.engine.url)): + create_database(str(db_.engine.url)) + + # Use unlogged tables for PostgreSQL (see https://github.com/sqlalchemy/alembic/discussions/1108) + if db_.engine.name == "postgresql": + from sqlalchemy.ext.compiler import compiles + from sqlalchemy.schema import CreateTable + + @compiles(CreateTable) + def _compile_unlogged(element, compiler, **kwargs): + return compiler.visit_create_table(element).replace( + "CREATE TABLE ", + "CREATE UNLOGGED TABLE ", + ) + + db_.create_all() + + yield db_ + + db_.session.remove() + db_.drop_all() + + +def _search_create_indexes(current_search, current_search_client): + """Create all registered search indexes.""" + from invenio_search.engine import search + + try: + list(current_search.create()) + except search.RequestError: + list(current_search.delete(ignore=[404])) + list(current_search.create()) + current_search_client.indices.refresh() + + +def _search_delete_indexes(current_search): + """Delete all registered search indexes.""" + list(current_search.delete(ignore=[404])) + + +@pytest.fixture(scope="module") +def search(app): + """Setup and teardown all registered search indices. + + Scope: module + + This fixture will create all registered indexes in search and remove + once done. Fixtures that perform changes (e.g. index or remove documents), + should used the function-scoped :py:data:`search_clear` fixture to leave the + indexes clean for the following tests. + """ + from invenio_search import current_search, current_search_client + + _search_create_indexes(current_search, current_search_client) + yield current_search_client + _search_delete_indexes(current_search) diff --git a/tests/test_cernopendata_query_parser.py b/tests/test_cernopendata_query_parser.py index 21ca464e2d..68d2d613c6 100644 --- a/tests/test_cernopendata_query_parser.py +++ b/tests/test_cernopendata_query_parser.py @@ -28,35 +28,31 @@ from cernopendata.config import _query_parser_and +def _create_query(term): + # Defines the skeleton of a query + return dsl.query.Bool( + must=[ + dsl.query.QueryString( + default_operator="AND", fields=["title.tokens^2", "*"], query=term + ) + ], + must_not=[dsl.query.Match(distribution__availability="ondemand")], + ) + + def test_cernopendata_query_parser(app): with app.test_request_context("/"): - assert _query_parser_and("/Btau") == dsl.query.Bool( - must=[dsl.query.QueryString(default_operator="AND", query="\\/Btau")], - must_not=[dsl.query.Match(distribution__availability="ondemand")], - ) - assert _query_parser_and('"/Btau"') == dsl.query.Bool( - must=[dsl.query.QueryString(default_operator="AND", query='"\\/Btau"')], - must_not=[dsl.query.Match(distribution__availability="ondemand")], - ) - assert _query_parser_and("/btau AND CMS") == dsl.query.Bool( - must=[ - dsl.query.QueryString(default_operator="AND", query="\\/btau AND CMS") - ], - must_not=[dsl.query.Match(distribution__availability="ondemand")], - ) - assert _query_parser_and('"/btau" AND CMS') == dsl.query.Bool( - must=[ - dsl.query.QueryString(default_operator="AND", query='"\\/btau" AND CMS') - ], - must_not=[dsl.query.Match(distribution__availability="ondemand")], - ) - assert _query_parser_and("CMS AND /btau") == dsl.query.Bool( - must=[ - dsl.query.QueryString(default_operator="AND", query="CMS AND \\/btau") - ], - must_not=[dsl.query.Match(distribution__availability="ondemand")], + assert _query_parser_and("/Btau") == _create_query("\\/Btau") + assert _query_parser_and('"/Btau"') == _create_query('"\\/Btau"') + assert _query_parser_and("/btau AND CMS") == _create_query("\\/btau AND CMS") + assert _query_parser_and('"/btau" AND CMS') == _create_query( + '"\\/btau" AND CMS' ) + assert _query_parser_and("CMS AND /btau") == _create_query("CMS AND \\/btau") + with app.test_request_context("/?ondemand=true"): assert _query_parser_and("CMS AND /btau") == dsl.query.QueryString( - default_operator="AND", query="CMS AND \\/btau" + default_operator="AND", + fields=["title.tokens^2", "*"], + query="CMS AND \\/btau", ) diff --git a/tests/test_insert.py b/tests/test_insert.py new file mode 100644 index 0000000000..7ea7b6c18e --- /dev/null +++ b/tests/test_insert.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# +# This file is part of CERN Open Data Portal. +# Copyright (C) 2021 CERN. +# +# CERN Open Data Portal is free software; you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 2 of the +# License, or (at your option) any later version. +# +# CERN Open Data Portal is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with CERN Open Data Portal; if not, write to the +# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, +# MA 02111-1307, USA. +# +# In applying this license, CERN does not +# waive the privileges and immunities granted to it by virtue of its status +# as an Intergovernmental Organization or submit itself to any jurisdiction. + +from invenio_indexer.api import RecordIndexer + +from cernopendata.modules.fixtures.cli import create_glossary_term + + +def test_insert(app, database, search): + """Checking that records can be inserted""" + data = { + "anchor": "dummy_test", + } + schema = app.extensions["invenio-jsonschemas"].path_to_url( + "records/glossary-term-v1.0.0.json" + ) + record = create_glossary_term(data, schema) + + indexer = RecordIndexer() + done = indexer.index(record) + + assert done["_index"] == "records-glossary-term-v1.0.0"