Skip to content

Commit

Permalink
Temporarily disable unit tests on Ubuntu (#2301)
Browse files Browse the repository at this point in the history
  • Loading branch information
soininen authored Sep 7, 2023
2 parents 4faacf9 + aa4d54b commit 955f380
Show file tree
Hide file tree
Showing 18 changed files with 139 additions and 162 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test_runner.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
fail-fast: true
matrix:
python-version: [3.8, 3.9, "3.10", 3.11]
os: [windows-latest, ubuntu-22.04]
os: [windows-latest] # FIXME: temporarily disabled ubuntu-22.04
steps:
- uses: actions/checkout@v3
with:
Expand Down
20 changes: 9 additions & 11 deletions execution_tests/import_file_packs/execution_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import shutil
import subprocess
import unittest
from spinedb_api import DatabaseMapping, from_database
from spinedb_api import create_new_spine_database, DatabaseMapping, from_database


class ModifyConnectionFilterByScript(unittest.TestCase):
Expand All @@ -19,8 +19,7 @@ def setUp(self):
if self._database_path.exists():
self._database_path.unlink()
self._url = "sqlite:///" + str(self._database_path)
db_map = DatabaseMapping(self._url, create=True)
db_map.connection.close()
create_new_spine_database(self._url)

def test_execution(self):
completed = subprocess.run(
Expand All @@ -33,14 +32,13 @@ def test_execution(self):
)
)
self.assertEqual(completed.returncode, 0)
db_map = DatabaseMapping(self._url)
values = {}
for value_row in db_map.query(db_map.object_parameter_value_sq):
self.assertEqual(value_row.object_class_name, "a")
self.assertEqual(value_row.parameter_name, "info")
self.assertEqual(value_row.alternative_name, "Base")
values[value_row.object_name] = from_database(value_row.value, value_row.type)
db_map.connection.close()
with DatabaseMapping(self._url) as db_map:
values = {}
for value_row in db_map.query(db_map.entity_parameter_value_sq):
self.assertEqual(value_row.entity_class_name, "a")
self.assertEqual(value_row.parameter_name, "info")
self.assertEqual(value_row.alternative_name, "Base")
values[value_row.entity_name] = from_database(value_row.value, value_row.type)
self.assertEqual(len(values), 4)
self.assertEqual(values["b"], 23.0)
self.assertEqual(values["c"], 50.0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
"project": {
"version": 11,
"description": "",
"settings": {
"enable_execute_all": true
},
"specifications": {
"Tool": [
{
Expand Down Expand Up @@ -93,7 +96,7 @@
],
"condition": {
"type": "python-script",
"script": "import sys\nfrom spinedb_api import DatabaseMapping, from_database, import_object_parameter_values\nin_url = sys.argv[1]\nin_db_map = DatabaseMapping(in_url)\nsq = in_db_map.object_parameter_value_sq\nday_count_row = in_db_map.query(sq).filter(sq.c.object_class_name==\"Timeline\", sq.c.object_name==\"days_of_our_lives\", sq.c.parameter_name==\"cumulative_count\").first()\nday_count = from_database(day_count_row.value, day_count_row.type)\ncounter = day_count.values[-1]\nin_db_map.connection.close()\nif counter >= 30:\n exit(1)\nout_url = sys.argv[2]\nout_db_map = DatabaseMapping(out_url)\nimport_object_parameter_values(out_db_map, ((\"Counter\", \"loop_counter\", \"count\", counter),))\nout_db_map.commit_session(\"Increment counter.\")\nout_db_map.connection.close()\nexit(0)"
"script": "import sys\nfrom spinedb_api import DatabaseMapping, from_database, import_parameter_values\nin_url = sys.argv[1]\nwith DatabaseMapping(in_url) as in_db_map:\n\tsq = in_db_map.entity_parameter_value_sq\n\tday_count_row = in_db_map.query(sq).filter(sq.c.entity_class_name==\"Timeline\", sq.c.entity_name==\"days_of_our_lives\", sq.c.parameter_name==\"cumulative_count\").first()\nday_count = from_database(day_count_row.value, day_count_row.type)\ncounter = day_count.values[-1]\nif counter >= 30:\n exit(1)\nout_url = sys.argv[2]\nwith DatabaseMapping(out_url) as out_db_map:\n\timport_parameter_values(out_db_map, ((\"Counter\", \"loop_counter\", \"count\", counter),))\n\tout_db_map.commit_session(\"Increment counter.\")\nexit(0)\n"
},
"cmd_line_args": [
{
Expand All @@ -106,10 +109,7 @@
}
]
}
],
"settings": {
"enable_execute_all": true
}
]
},
"items": {
"Write data": {
Expand All @@ -124,7 +124,9 @@
"type": "resource",
"arg": "db_url@Loop counter store"
}
]
],
"kill_completed_processes": false,
"log_process_output": false
},
"Import": {
"type": "Importer",
Expand All @@ -133,7 +135,6 @@
"y": -4.007282712511945,
"specification": "Import data",
"cancel_on_error": false,
"purge_before_writing": false,
"on_conflict": "merge",
"file_selection": [
[
Expand Down Expand Up @@ -181,7 +182,6 @@
"y": -4.007282712511941,
"specification": "Counter data importer",
"cancel_on_error": false,
"purge_before_writing": false,
"on_conflict": "merge",
"file_selection": [
[
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,5 @@
"data.csv"
],
"cmdline_args": [],
"execute_in_work": true,
"includes_main_path": "../../.."
}
23 changes: 10 additions & 13 deletions execution_tests/loop_condition_with_cmd_line_args/execution_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import subprocess
import sys
import unittest
from spinedb_api import DatabaseMapping, from_database, Map
from spinedb_api import create_new_spine_database, DatabaseMapping, from_database, Map


class LoopConditionWithCmdLineArgs(unittest.TestCase):
Expand All @@ -24,23 +24,20 @@ def setUp(self):
database_path.parent.mkdir(parents=True, exist_ok=True)
if database_path.exists():
database_path.unlink()
db_map = DatabaseMapping(url, create=True)
db_map.connection.close()
create_new_spine_database(url)

def test_execution(self):
completed = subprocess.run((sys.executable, "-m", "spinetoolbox", "--execute-only", str(self._root_path)))
self.assertEqual(completed.returncode, 0)
db_map = DatabaseMapping(self._loop_counter_database_url)
value_rows = db_map.query(db_map.object_parameter_value_sq).all()
self.assertEqual(len(value_rows), 1)
loop_counter = from_database(value_rows[0].value, value_rows[0].type)
db_map.connection.close()
with DatabaseMapping(self._loop_counter_database_url) as db_map:
value_rows = db_map.query(db_map.parameter_value_sq).all()
self.assertEqual(len(value_rows), 1)
loop_counter = from_database(value_rows[0].value, value_rows[0].type)
self.assertEqual(loop_counter, 20.0)
db_map = DatabaseMapping(self._output_database_url)
value_rows = db_map.query(db_map.object_parameter_value_sq).all()
self.assertEqual(len(value_rows), 1)
output_value = from_database(value_rows[0].value, value_rows[0].type)
db_map.connection.close()
with DatabaseMapping(self._output_database_url) as db_map:
value_rows = db_map.query(db_map.parameter_value_sq).all()
self.assertEqual(len(value_rows), 1)
output_value = from_database(value_rows[0].value, value_rows[0].type)
expected_x = [f"T{i:03}" for i in range(31)]
expected_y = [float(i) for i in range(31)]
self.assertEqual(output_value, Map(expected_x, expected_y))
Expand Down
13 changes: 7 additions & 6 deletions execution_tests/loop_condition_with_cmd_line_args/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@
from spinedb_api import DatabaseMapping, from_database

url = sys.argv[1]
db_map = DatabaseMapping(url)
sq = db_map.object_parameter_value_sq
count_row = db_map.query(sq).filter(
sq.c.object_class_name == "Counter", sq.c.object_name == "loop_counter", sq.c.parameter_name == "count"
).first()
with DatabaseMapping(url) as db_map:
sq = db_map.entity_parameter_value_sq
count_row = (
db_map.query(sq)
.filter(sq.c.entity_class_name == "Counter", sq.c.entity_name == "loop_counter", sq.c.parameter_name == "count")
.first()
)
count = int(from_database(count_row.value, count_row.type))
db_map.connection.close()
data = [[f"T{i:03}", i] for i in range(count, count + 11)]

with open("data.csv", "w", newline="") as data_file:
Expand Down
40 changes: 17 additions & 23 deletions execution_tests/merger_write_order/execution_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import sys
import unittest

from spinedb_api import DatabaseMapping, from_database, import_functions
from spinedb_api import create_new_spine_database, DatabaseMapping, from_database, import_functions


class MergerWriteOrder(unittest.TestCase):
Expand All @@ -18,36 +18,30 @@ def setUp(self):
database_path.parent.mkdir(parents=True, exist_ok=True)
if database_path.exists():
database_path.unlink()
spoon_volumes = {
self._source_database_1_path: 1.0,
self._source_database_2_path: 99.0
}
spoon_volumes = {self._source_database_1_path: 1.0, self._source_database_2_path: 99.0}
for database_path, spoon_volume in spoon_volumes.items():
url = "sqlite:///" + str(database_path)
db_map = DatabaseMapping(url, create=True)
import_functions.import_object_classes(db_map, ("Widget",))
import_functions.import_objects(db_map, (("Widget", "spoon"),))
import_functions.import_object_parameters(db_map, (("Widget", "volume"),))
import_functions.import_object_parameter_values(db_map, (("Widget", "spoon", "volume", spoon_volume, "Base"),))
db_map.commit_session("Add test data.")
db_map.connection.close()
with DatabaseMapping(url, create=True) as db_map:
import_functions.import_entity_classes(db_map, ("Widget",))
import_functions.import_entities(db_map, (("Widget", "spoon"),))
import_functions.import_parameter_definitions(db_map, (("Widget", "volume"),))
import_functions.import_parameter_values(db_map, (("Widget", "spoon", "volume", spoon_volume, "Base"),))
db_map.commit_session("Add test data.")
self._sink_url = "sqlite:///" + str(self._sink_database_path)
db_map = DatabaseMapping(self._sink_url, create=True)
db_map.connection.close()
create_new_spine_database(self._sink_url)

def test_execution(self):
this_file = Path(__file__)
completed = subprocess.run((sys.executable, "-m", "spinetoolbox", "--execute-only", str(this_file.parent)))
self.assertEqual(completed.returncode, 0)
db_map = DatabaseMapping(self._sink_url)
value_rows = db_map.query(db_map.object_parameter_value_sq).all()
self.assertEqual(len(value_rows), 1)
self.assertEqual(value_rows[0].object_class_name, "Widget")
self.assertEqual(value_rows[0].object_name, "spoon")
self.assertEqual(value_rows[0].parameter_name, "volume")
self.assertEqual(value_rows[0].alternative_name, "Base")
self.assertEqual(from_database(value_rows[0].value, value_rows[0].type), 99.0)
db_map.connection.close()
with DatabaseMapping(self._sink_url) as db_map:
value_rows = db_map.query(db_map.entity_parameter_value_sq).all()
self.assertEqual(len(value_rows), 1)
self.assertEqual(value_rows[0].entity_class_name, "Widget")
self.assertEqual(value_rows[0].entity_name, "spoon")
self.assertEqual(value_rows[0].parameter_name, "volume")
self.assertEqual(value_rows[0].alternative_name, "Base")
self.assertEqual(from_database(value_rows[0].value, value_rows[0].type), 99.0)


if __name__ == '__main__':
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
import subprocess
import unittest
from spinedb_api import (
DiffDatabaseMapping,
DatabaseMapping,
import_alternatives,
import_entities,
import_entity_classes,
import_parameter_definitions,
import_parameter_values,
import_scenario_alternatives,
import_scenarios,
import_object_classes,
import_objects,
import_object_parameters,
import_object_parameter_values,
)


Expand All @@ -29,22 +29,21 @@ def setUp(self):
if self._database_path.exists():
self._database_path.unlink()
url = "sqlite:///" + str(self._database_path)
db_map = DiffDatabaseMapping(url, create=True)
import_object_classes(db_map, ("object_class",))
import_objects(db_map, (("object_class", "object"),))
import_object_parameters(db_map, (("object_class", "parameter"),))
import_alternatives(db_map, ("alternative",))
import_object_parameter_values(
db_map,
(
("object_class", "object", "parameter", 1.0, "Base"),
("object_class", "object", "parameter", 2.0, "alternative"),
),
)
import_scenarios(db_map, (("scenario", True),))
import_scenario_alternatives(db_map, (("scenario", "alternative"),))
db_map.commit_session("Add test data.")
db_map.connection.close()
with DatabaseMapping(url, create=True) as db_map:
import_entity_classes(db_map, ("object_class",))
import_entities(db_map, (("object_class", "object"),))
import_parameter_definitions(db_map, (("object_class", "parameter"),))
import_alternatives(db_map, ("alternative",))
import_parameter_values(
db_map,
(
("object_class", "object", "parameter", 1.0, "Base"),
("object_class", "object", "parameter", 2.0, "alternative"),
),
)
import_scenarios(db_map, (("scenario", True),))
import_scenario_alternatives(db_map, (("scenario", "alternative"),))
db_map.commit_session("Add test data.")

def test_execution(self):
completed = subprocess.run(
Expand Down
5 changes: 1 addition & 4 deletions execution_tests/modify_connection_filter_by_script/mod.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,7 @@

db_path = project.project_dir / ".spinetoolbox" / "items" / "data" / "Data.sqlite"
db_url = "sqlite:///" + str(db_path)
db_map = DatabaseMapping(db_url)
try:
with DatabaseMapping(db_url) as db_map:
scenario_ids = {r.name: r.id for r in db_map.query(db_map.scenario_sq).all()}
connection = project.find_connection("Data", "Export values")
connection.set_filter_enabled("db_url@Data", SCENARIO_FILTER_TYPE, "scenario", True)
finally:
db_map.connection.close()
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,5 @@
"out.csv"
],
"cmdline_args": [],
"execute_in_work": true,
"includes_main_path": "../../.."
}
Loading

0 comments on commit 955f380

Please sign in to comment.