forked from alexott/databricks-nutter-repos-demo
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtest_code1_code2.py
More file actions
54 lines (38 loc) · 1.65 KB
/
test_code1_code2.py
File metadata and controls
54 lines (38 loc) · 1.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# Databricks notebook source
# install nutter in cluster if you want to trigger tests from command line
# %python
# %pip install -U nutter
# COMMAND ----------
from runtime.nutterfixture import NutterFixture, tag
default_timeout = 600
class Test1Fixture(NutterFixture):
def __init__(self):
self.code1_result = ''
self.code2_table_name = "my_data"
NutterFixture.__init__(self)
def run_name1(self):
self.code1_result = dbutils.notebook.run('./Code1', default_timeout, {'name': 'world'})
def assertion_name1(self):
assert(self.code1_result == "Hello world")
def run_name2(self):
self.code1_result = dbutils.notebook.run('./Code1', default_timeout)
def assertion_name2(self):
assert(self.code1_result == "ERROR")
def run_code2(self):
# if we use `dbutils.notebook.run`, then we need to call `generate_data()` from inside of it...
# in that case we may need to have a separate notebook that will load functions & call that function
dbutils.notebook.run('./Code2', default_timeout)
def assertion_code2(self):
some_tbl = sqlContext.sql(f'SELECT COUNT(*) AS total FROM {self.code2_table_name}')
first_row = some_tbl.first()
assert (first_row[0] == 10)
def after_code2(self):
spark.sql(f"drop table {self.code2_table_name}")
# COMMAND ----------
result = Test1Fixture().execute_tests()
print(result.to_string())
# Comment out the next line (result.exit(dbutils)) to see the test result report from within the notebook
# check staging is updated from databricks
is_job = dbutils.notebook.entry_point.getDbutils().notebook().getContext().currentRunId().isDefined()
if is_job:
result.exit(dbutils)