Commit 8e4940bf authored by eric pellegrini's avatar eric pellegrini
Browse files

Merge branch 'feature-functional_tests_enhancing' into 'develop'

Feature functional tests enhancing

See merge request !8
parents 850a86b0 74c401fa
Pipeline #3435 failed with stages
in 17 minutes and 49 seconds
......@@ -80,7 +80,10 @@ class GlobalMotionFilteredTrajectory(IJob):
"""
self.numberOfSteps = self.configuration['frames']['number']
# Store universe name for further restoration
self.old_universe_name = self.configuration['trajectory']['instance'].universe.__class__.__name__
self.configuration['trajectory']['instance'].universe.__class__.__name__ = 'InfiniteUniverse'
self.configuration['trajectory']['instance'].universe._descriptionArguments = lambda: '()'
......@@ -197,4 +200,7 @@ class GlobalMotionFilteredTrajectory(IJob):
# The output trajectory is closed.
self._gmft.close()
# Restore universe name
self.configuration['trajectory']['instance'].universe.__class__.__name__ = self.old_universe_name
REGISTRY['gmft'] = GlobalMotionFilteredTrajectory
......@@ -139,72 +139,7 @@ class IJob(Configurable):
self._status = None
@classmethod
def build_parallelization_test(cls, testFile, parameters=None):
"""
Produce a file like object for a given job.\n
:Parameters:
#. parameters (dict): optional. If not None, the parameters with which the job file will be built.
"""
f = open(testFile, 'w')
# The first line contains the call to the python executable. This is necessary for the file to
# be autostartable.
f.write('#!%s\n\n' % sys.executable)
f.write('import os\n')
f.write('import unittest\n')
f.write('import numpy\n')
f.write('from Scientific.IO.NetCDF import NetCDFFile\n')
f.write('from Tests.UnitTest import UnitTest\n')
f.write('from MDANSE import REGISTRY\n\n')
f.write('class Test%sParallel(UnitTest):\n\n' % cls._type.upper())
f.write(' def test(self):\n')
# Writes the line that will initialize the |parameters| dictionary.
f.write(' parameters = {}\n')
for k, v in sorted(parameters.items()):
f.write(' parameters[%r] = %r\n' % (k, v))
f.write('\n job = REGISTRY[%r](%r)\n\n' % ('job',cls._type))
f.write(' parameters["running_mode"] = ("monoprocessor",1)\n')
f.write(' self.assertNotRaises(job.run,parameters,False)\n\n')
f.write(' f = NetCDFFile(job.configuration["output_files"]["files"][0],"r")\n')
f.write(' resMono = {}\n')
f.write(' for k,v in f.variables.items():\n')
f.write(' resMono[k] = v.getValue()\n')
f.write(' f.close()\n\n')
f.write(' parameters["running_mode"] = ("multiprocessor",2)\n')
f.write(' self.assertNotRaises(job.run,parameters,False)\n\n')
f.write(' f = NetCDFFile(job.configuration["output_files"]["files"][0],"r")\n')
f.write(' resMulti = {}\n')
f.write(' for k,v in f.variables.items():\n')
f.write(' resMulti[k] = v.getValue()\n')
f.write(' f.close()\n\n')
f.write(' for k in resMono.keys():\n')
f.write(' self.assertTrue(numpy.allclose(resMono[k],resMulti[k]))\n\n')
f.write('def suite():\n')
f.write(' loader = unittest.TestLoader()\n')
f.write(' s = unittest.TestSuite()\n')
f.write(' s.addTest(loader.loadTestsFromTestCase(Test%sParallel))\n' % cls._type.upper())
f.write(' return s\n\n')
f.write('if __name__ == "__main__":\n')
f.write(' unittest.main(verbosity=2)\n')
f.close()
os.chmod(testFile,stat.S_IRWXU)
@staticmethod
def set_pyro_server():
......@@ -298,53 +233,6 @@ class IJob(Configurable):
f.close()
os.chmod(jobFile,stat.S_IRWXU)
@classmethod
def build_test(cls, testFile, parameters=None):
"""
Produce a file like object for a given job.\n
:Parameters:
#. parameters (dict): optional. If not None, the parameters with which the job file will be built.
"""
f = open(testFile, 'w')
# The first line contains the call to the python executable. This is necessary for the file to
# be autostartable.
f.write('#!%s\n\n' % sys.executable)
f.write('import unittest\n')
f.write('from Tests.UnitTests.UnitTest import UnitTest\n')
f.write('from MDANSE import REGISTRY\n\n')
f.write('class Test%s(UnitTest):\n\n' % cls._type.upper())
f.write(' def test(self):\n')
# Writes the line that will initialize the |parameters| dictionary.
f.write(' parameters = {}\n')
if parameters is None:
parameters = cls.get_default_parameters()
for k, v in sorted(parameters.items()):
f.write(' parameters[%r] = %r\n' % (k, v))
# Sets |analysis| variable to an instance analysis to save.
f.write(' job = REGISTRY[%r][%r]()\n' % ('job',cls._type))
f.write(' self.assertNotRaises(job.run, parameters, status=False)\n\n')
f.write('def suite():\n')
f.write(' loader = unittest.TestLoader()\n')
f.write(' s = unittest.TestSuite()\n')
f.write(' s.addTest(loader.loadTestsFromTestCase(Test%s))\n' % cls._type.upper())
f.write(' return s\n\n')
f.write("if __name__ == '__main__':\n")
f.write(' unittest.main(verbosity=2)\n')
f.close()
os.chmod(testFile,stat.S_IRWXU)
def _run_monoprocessor(self):
......@@ -380,7 +268,8 @@ class IJob(Configurable):
if self._status is not None:
if self._status.is_stopped():
self._status.cleanup()
return
# Break to ensure the master will be shutdowned
break
else:
self._status.update()
......
......@@ -65,7 +65,6 @@ class RigidBodyTrajectory(IJob):
settings['reference']=('integer',{"mini":0})
settings['remove_translation']=('boolean',{'default':False})
settings['output_files']=('output_files', {"formats":["netcdf"]})
settings['running_mode']=('running_mode',{})
def initialize(self):
"""
......
......@@ -30,6 +30,8 @@ Created on Apr 13, 2015
:author: Eric C. Pellegrini
'''
import collections
class Node(object):
def __init__(self, name, **kwargs):
......@@ -44,7 +46,7 @@ class Node(object):
@property
def links(self):
return set(self._links)
return self._links
def add_link(self, other):
self._links.add(other)
......@@ -54,18 +56,16 @@ class Graph(object):
def __init__(self):
self._nodes = {}
self._nodes = collections.OrderedDict()
@property
def nodes(self):
return self._nodes
def add_node(self, name, **kwargs):
self._nodes[name] = Node(name, **kwargs)
def add_link(self, source, target):
self._nodes[source].add_link(self._nodes[target])
def build_connected_components(self):
......@@ -74,7 +74,7 @@ class Graph(object):
result = []
# Make a copy of the set, so we can modify it.
nodes = set(self._nodes.values())
nodes = [self._nodes[k] for k in sorted(self._nodes.keys())]
# Iterate while we still have nodes to process.
while nodes:
......@@ -102,7 +102,9 @@ class Graph(object):
neighbors.difference_update(group)
# Remove the remaining nodes from the global set.
nodes.difference_update(neighbors)
for neigh in neighbors:
if neigh in nodes:
nodes.remove(neigh)
# Add them to the group of connected nodes.
group.update(neighbors)
......@@ -110,6 +112,10 @@ class Graph(object):
# Add them to the queue, so we visit them in the next iterations.
queue.extend(neighbors)
# Sort the group
group = list(group)
group.sort(key = lambda n : n.name)
# Add the group to the list of groups.
result.append(group)
......
......@@ -35,16 +35,18 @@ import os
import glob
def suite():
files = glob.glob('Test*.py')
files = glob.glob("Test*.py")
#os.chdir("Jobs")
modules = [__import__(os.path.splitext(f)[0],globals(),locals(),[],-1) for f in files]
test_suite = unittest.TestSuite()
for m in modules:
print m.__file__
test_suite.addTests(m.suite())
return test_suite
def run_test():
unittest.TextTestRunner(verbosity=2).run(suite())
if __name__ == '__main__':
#os.chdir("Jobs")
run_test()
import os
import stat
import sys
from MDANSE import REGISTRY
#from docutils.io import Output
#from docutils.nodes import reference
class JobFileGenerator():
def __init__(self, job, parameters=None):
"""
Builds test for a given job
:Parameters:
# job
# parameters (dict): optional. If not None, the parameters which the job file will be built with.
"""
# Save job
self.job = job
self.job.get_default_parameters()
# Check if reference data is present
self.reference_data_path = os.path.join(os.path.pardir, os.path.pardir, os.path.pardir, "Data", "Jobs_reference_data", job._type)
self.reference_data_file = self.reference_data_path + "_reference" + ".nc"
if not os.path.isfile(self.reference_data_file):
print "/!\ Reference data file is not present for job " + str(job)
self.reference_data_file = None
# Check if job can be launched on multiprocessor
if job.settings.has_key('running_mode'):
self.multiprocessor = True
else:
print "/!\ Job " + str(job) + " cannot be launched on multiprocessor"
self.multiprocessor = False
# Create the job file
self.job_file_name = "Test_%s.py" % job._type
self.__build_job_file(parameters)
def __build_job_file(self, parameters):
"""
Builds job file for a given job
:Parameters:
# parameters (dict): optional. If not None, the parameters which the job file will be built with.
"""
array_of_python_dependancies_string = ['unittest', 'numpy', 'os']
array_of_mdanse_dependancies_string = ['from Tests.UnitTests.UnitTest import UnitTest',
'from MDANSE import REGISTRY']
test_string = ''
test_string = test_string + 'class Test%s(UnitTest):\n\n' % self.job._type.upper()
test_string = test_string + ' def test(self):\n'
# Writes the line that will initialize the |parameters| dictionary and create the job
if parameters is None:
parameters = self.job.get_default_parameters()
test_string = test_string + ' parameters = {}\n'
for k, v in sorted(parameters.items()):
test_string = test_string + ' parameters[%r] = %r\n' % (k, v)
test_string = test_string + ' job = REGISTRY[%r][%r]()\n' % ('job',self.job._type)
test_string = test_string + ' if "output_file" in parameters:\n'
test_string = test_string + ' output_path = parameters["output_file"][0]\n'
test_string = test_string + ' else:\n'
test_string = test_string + ' output_path = parameters["output_files"][0]\n'
test_string = test_string + ' reference_data_path = "' + self.reference_data_path + '"\n'
# Launch the job in monoprocessor mode and copy output file
test_string = test_string + ' print "Launching job in monoprocessor mode"\n'
test_string = test_string + ' parameters["running_mode"] = ("monoprocessor",1)\n'
test_string = test_string + ' self.assertNotRaises(job.run, parameters, status=False)\n'
test_string = test_string + ' shutil.copy(output_path + ".nc", reference_data_path + "_mono" + ".nc")\n'
test_string = test_string + ' print "Monoprocessor execution completed"\n\n'
# Launch the job in multiprocessor mode if avalaible
if self.multiprocessor:
test_string = test_string + ' print "Launching job in multiprocessor mode"\n'
test_string = test_string + ' parameters["running_mode"] = ("multiprocessor",2)\n'
test_string = test_string + ' self.assertNotRaises(job.run,parameters,False)\n'
test_string = test_string + ' shutil.copy(output_path + ".nc", reference_data_path + "_multi" + ".nc")\n'
test_string = test_string + ' print "Multiprocessor execution completed"\n\n'
# Compare reference data with monoprocessor if reference data exists
if self.reference_data_file:
test_string = test_string + ' print "Comparing monoprocessor output with reference output"\n'
test_string = test_string + ' self.assertTrue(compare("' + self.reference_data_file + '", reference_data_path + "_mono" + ".nc"))\n\n'
# Compare reference data with multiprocessor if reference data exists
if self.reference_data_file and self.multiprocessor:
test_string = test_string + ' print "Comparing multiprocessor output with reference output"\n'
test_string = test_string + ' self.assertTrue(compare("' + self.reference_data_file + '", reference_data_path + "_multi" + ".nc"))\n\n'
# If no reference data but multiprocessor, compare mono and multiprocessor
elif self.multiprocessor:
test_string = test_string + ' print "Comparing monoprocessor output with multiprocessor output"\n'
test_string = test_string + ' self.assertTrue(compare(reference_data_path + "_mono" + ".nc", reference_data_path + "_multi" + ".nc"))\n\n'
test_string = test_string + ' try:\n'
test_string = test_string + ' os.remove(reference_data_path + "_mono" + ".nc")\n'
if self.multiprocessor:
test_string = test_string + ' os.remove(reference_data_path + "_multi" + ".nc")\n'
test_string = test_string + ' except:\n'
test_string = test_string + ' pass\n'
# If test is GMTF, restore old_universe_name
if self.job._type == "gmft":
test_string = test_string + ' job.configuration["trajectory"]["instance"].universe.__class__.__name__ = job.old_universe_name\n'
# Finally write the suite method that will be called by test script
test_string = test_string + '\n\ndef suite():\n'
test_string = test_string + ' loader = unittest.TestLoader()\n'
test_string = test_string + ' s = unittest.TestSuite()\n'
test_string = test_string + ' s.addTest(loader.loadTestsFromTestCase(Test%s))\n' % self.job._type.upper()
test_string = test_string + ' return s'
self.__generate_test_file(test_string, array_of_python_dependancies_string, array_of_mdanse_dependancies_string)
for k,v in REGISTRY['job'].items():
def __generate_test_file(self, test_string, array_of_python_dependancies_string, array_of_mdanse_dependancies_string):
"""
Produce a file for the given informations
:Parameters:
# test_string (string): the content
# array_of_python_dependancies_string (array of string) Example:[numpy, "os", "sys"]
# array_of_mdanse_dependancies_string (array of string) Example:["from foo import bar", "import spam"]
"""
f = open(self.job_file_name, 'w')
# The first line contains the call to the python executable. This is necessary for the file to
# be autostartable.
f.write('#!%s\n\n' % sys.executable)
# Write dependancies
# Add unittest, shutils and os if needed
if not "unittest" in array_of_python_dependancies_string:
array_of_python_dependancies_string.append("unittest")
if not "os" in array_of_python_dependancies_string:
array_of_python_dependancies_string.append("os")
if not "shutil" in array_of_python_dependancies_string:
array_of_python_dependancies_string.append("shutil")
if not "time" in array_of_python_dependancies_string:
array_of_python_dependancies_string.append("time")
# Add NetCDF
array_of_mdanse_dependancies_string.append('from Scientific.IO.NetCDF import NetCDFFile')
array_of_mdanse_dependancies_string.append('import Comparator')
# Skip the mcstas test because mcstas executable is not available on all platform
if k=='mvi':
continue
v.build_test("Test_%s.py" % v._type)
# Sort arrays to write imports in the alphabetical order
array_of_python_dependancies_string.sort()
array_of_mdanse_dependancies_string.sort()
# Write in file
for dependancy_string in array_of_python_dependancies_string:
f.write("import " + dependancy_string + "\n")
f.write("\n")
for dependancy_string in array_of_mdanse_dependancies_string:
f.write(dependancy_string + "\n")
f.write("\n")
# Create the compare function
test_string2 = 'def compare(file1, file2):\n'
test_string2 = test_string2 + ' ret = True\n'
test_string2 = test_string2 + ' f = NetCDFFile(file1,"r")\n'
test_string2 = test_string2 + ' res1 = {}\n'
test_string2 = test_string2 + ' for k,v in f.variables.items():\n'
test_string2 = test_string2 + ' res1[k] = v.getValue()\n'
test_string2 = test_string2 + ' f.close()\n'
test_string2 = test_string2 + ' f = NetCDFFile(file2,"r")\n'
test_string2 = test_string2 + ' res2 = {}\n'
test_string2 = test_string2 + ' for k,v in f.variables.items():\n'
test_string2 = test_string2 + ' res2[k] = v.getValue()\n'
test_string2 = test_string2 + ' f.close()\n'
test_string2 = test_string2 + ' return Comparator.Comparator().compare(res1, res2)\n\n'
# Write test string
f.write(test_string2 + test_string)
f.write("\n\n")
# Write file ending
f.write("if __name__ == '__main__':\n")
f.write(' unittest.main(verbosity=2)\n')
f.close()
os.chmod(self.job_file_name,stat.S_IRWXU)
if __name__ == '__main__':
# Main script, automatically creates source files for testing jobs
for job_id,job in REGISTRY['job'].items():
# Skip the mcstas test because mcstas executable is not available on all platform
if job_id=='mvi':
pass
else:
job_file_generator = JobFileGenerator(job)
import numpy
import collections
class Comparator():
def __init__(self):
self.s1 = ""
self.s2 = ""
pass
def compare(self, res1, res2):
description1 = res1.pop("description", None)
description2 = res2.pop("description", None)
ret = self.__compareDictionnaries(res1, res2)
if not (description1 is None) or not (description2 is None):
ret = ret and self.__compareDescriptions(description1, description2)
return ret
def __compareDescriptions(self, descr1, descr2):
temp = collections.Counter(descr1)
return temp == collections.Counter(descr2)
def __compareDictionnaries(self, res1, res2):
ret = True
# Dictionnary Testing
if isinstance(res1, dict) and isinstance(res2, dict):
# Dictionnary case
if len(res1) == len(res2):
for key in res1.keys():
if key in res2.keys():
ret = ret and self.__compareDictionnaries(res1[key], res2[key])
else:
ret = False
else:
ret = False
else:
# Can be anything, probe array case first
if hasattr(res1, "__len__") and hasattr(res2, "__len__") and (not isinstance(res1, str)) and (not isinstance(res2, str)) and (not isinstance(res1, unicode)) and (not isinstance(res2, unicode)):
# Array case:
try:
ret = ret and numpy.allclose(res1,res2)
except TypeError:
# Python list case
if len(res1) == len(res2):
for index in range(len(res1)):
ret = ret and self.__compareDictionnaries(res1[index], res2[index])
else:
ret = False
else:
# Single Values case
ret = ret and (res1==res2)
return ret
\ No newline at end of file
import unittest
import os
from BuildJobTests import JobFileGenerator
class JobForTest():
settings = {}
configuration = {"output_files":{"files":["./File.nc"]}}
_type = 'Test'
def set_multi_processor(self):
self.settings = {'running_mode':True}
def set_mono_processor(self):
self.settings = {}
def get_default_parameters(self):
return {}
class TestBuildJobTests(unittest.TestCase):
def test(self):
temp = os.path.join(os.path.split(__file__)[0], "Test_Test.py")
self.job = JobForTest()
self.object = JobFileGenerator(self.job)
self.assertTrue(os.path.isfile(temp))
os.remove(temp)
self.job.set_multi_processor()
self.object = JobFileGenerator(self.job)
self.assertTrue(os.path.isfile(temp))
os.remove(temp)
if __name__ == '__main__':
unittest.main(verbosity=2)
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment