Commit 67a74d04 authored by eric pellegrini's avatar eric pellegrini
Browse files

Added 'empty_data' ancestor to Converter base class

Removed corresponding class attribute of Converter concrete class
parent 401b71ac
......@@ -40,7 +40,3 @@ class CHARMMConverter(DCDConverter):
type = 'charmm'
label = "CHARMM"
category = ('Converters',)
ancestor = []
\ No newline at end of file
......@@ -140,11 +140,7 @@ class CASTEPConverter(Converter):
type = 'castep'
label = "CASTEP"
category = ('Converters',)
ancestor = []
settings = collections.OrderedDict()
settings['castep_file'] = ('input_file', {'default':os.path.join('..','..','..','Data','Trajectories','CASTEP','PBAnew.md')})
settings['output_file'] = ('output_files', {'formats':["netcdf"]})
......
......@@ -34,4 +34,8 @@ from MDANSE.Framework.Jobs.IJob import IJob
class Converter(IJob):
type = None
\ No newline at end of file
type = None
category = ('Converters',)
ancestor = ['empty_data']
\ No newline at end of file
......@@ -289,10 +289,6 @@ class DL_POLYConverter(Converter):
label = "DL_POLY"
category = ('Converters',)
ancestor = []
settings = collections.OrderedDict()
settings['field_file'] = ('input_file',{'wildcard':"FIELD files|FIELD*|All files|*",
'default':os.path.join('..','..','..','Data','Trajectories','DL_Poly','FIELD_cumen')})
......
......@@ -233,8 +233,6 @@ class DiscoverConverter(Converter):
category = ('Converters','Materials Studio')
ancestor = []
settings = collections.OrderedDict()
settings['xtd_file'] = ('input_file',{'default':os.path.join('..','..','..','Data','Trajectories','Discover','sushi.xtd')})
settings['his_file'] = ('input_file',{'default':os.path.join('..','..','..','Data','Trajectories','Discover','sushi.his')})
......
......@@ -283,8 +283,6 @@ class ForciteConverter(Converter):
category = ('Converters','Materials Studio')
ancestor = []
settings = collections.OrderedDict()
settings['xtd_file'] = ('input_file',{'default':os.path.join('..','..','..','Data','Trajectories','Forcite','nylon66_rho100_500K_v300K.xtd')})
settings['trj_file'] = ('input_file',{'default':os.path.join('..','..','..','Data','Trajectories','Forcite','nylon66_rho100_500K_v300K.trj')})
......
......@@ -165,10 +165,6 @@ class GenericConverter(Converter):
label = "Generic"
category = ('Converters',)
ancestor = []
settings = collections.OrderedDict()
settings['gt_file'] = ('input_file',{'wildcard':"Generic trajectory files|*.gtf|All files|*",
'default':os.path.join('..','..','..','Data','Trajectories','Generic','test.gtf')})
......
......@@ -124,10 +124,6 @@ class LAMMPSConverter(Converter):
label = "LAMMPS"
category = ('Converters',)
ancestor = []
settings = collections.OrderedDict()
settings['config_file'] = ('input_file', {'label':"LAMMPS configuration file",
'default':os.path.join('..','..','..','Data','Trajectories','LAMMPS','glycyl_L_alanine_charmm.config')})
......
......@@ -40,7 +40,3 @@ class NAMDConverter(DCDConverter):
type = 'namd'
label = "NAMD"
category = ('Converters',)
ancestor = []
\ No newline at end of file
......@@ -47,10 +47,6 @@ class PDBConverter(Converter):
label = "PDB"
category = ('Converters',)
ancestor = []
settings = collections.OrderedDict()
settings['pdb_file'] = ('input_file',{'default':os.path.join('..','..','..','Data','Trajectories','PDB','2f58_nma.pdb')})
settings['nb_frame'] = ('range', {'valueType':int, 'includeLast':True, 'mini':0.0, 'default':(0,2,1)})
......
......@@ -124,10 +124,6 @@ class VASPConverter(Converter):
label = "VASP (>=5)"
category = ('Converters',)
ancestor = []
settings = collections.OrderedDict()
settings['xdatcar_file'] = ('input_file',{'default':os.path.join('..','..','..','Data','Trajectories','VASP','XDATCAR_version5')})
settings['time_step'] = ('float', {'label':"time step", 'default':1.0, 'mini':1.0e-9})
......
......@@ -40,7 +40,3 @@ class XPLORConverter(DCDConverter):
type = 'xplor'
label = "XPLOR"
category = ('Converters',)
ancestor = []
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment