diff --git a/doc/source/api/force_bdss.io.rst b/doc/source/api/force_bdss.io.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c221828e07ac581f86f7948193842a228c018dca
--- /dev/null
+++ b/doc/source/api/force_bdss.io.rst
@@ -0,0 +1,37 @@
+force_bdss.io package
+=====================
+
+Subpackages
+-----------
+
+.. toctree::
+
+    force_bdss.io.tests
+
+Submodules
+----------
+
+force_bdss.io.workflow_reader module
+------------------------------------
+
+.. automodule:: force_bdss.io.workflow_reader
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+force_bdss.io.workflow_writer module
+------------------------------------
+
+.. automodule:: force_bdss.io.workflow_writer
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: force_bdss.io
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/api/force_bdss.io.tests.rst b/doc/source/api/force_bdss.io.tests.rst
new file mode 100644
index 0000000000000000000000000000000000000000..feac0dbb94addf619977e47b56728b5c34d49ff2
--- /dev/null
+++ b/doc/source/api/force_bdss.io.tests.rst
@@ -0,0 +1,22 @@
+force_bdss.io.tests package
+===========================
+
+Submodules
+----------
+
+force_bdss.io.tests.test_workflow_reader module
+-----------------------------------------------
+
+.. automodule:: force_bdss.io.tests.test_workflow_reader
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: force_bdss.io.tests
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/api/force_bdss.rst b/doc/source/api/force_bdss.rst
index 31df6d0610d037019ac8faeba8d96a256f27eb0e..585e5731407f540335492d08a35d2b26d057f787 100644
--- a/doc/source/api/force_bdss.rst
+++ b/doc/source/api/force_bdss.rst
@@ -9,6 +9,7 @@ Subpackages
     force_bdss.cli
     force_bdss.core_plugins
     force_bdss.data_sources
+    force_bdss.io
     force_bdss.kpi
     force_bdss.mco
     force_bdss.tests
diff --git a/doc/source/api/force_bdss.tests.fixtures.rst b/doc/source/api/force_bdss.tests.fixtures.rst
new file mode 100644
index 0000000000000000000000000000000000000000..67056f7ef65941c819e17e7abbf4756b4b11728a
--- /dev/null
+++ b/doc/source/api/force_bdss.tests.fixtures.rst
@@ -0,0 +1,10 @@
+force_bdss.tests.fixtures package
+=================================
+
+Module contents
+---------------
+
+.. automodule:: force_bdss.tests.fixtures
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/api/force_bdss.tests.rst b/doc/source/api/force_bdss.tests.rst
index d564d7cd14fb3d5fcb6ed86223dea8f31774433f..d78f9d359df54ee71bfe7a117a086d75f9719589 100644
--- a/doc/source/api/force_bdss.tests.rst
+++ b/doc/source/api/force_bdss.tests.rst
@@ -1,6 +1,13 @@
 force_bdss.tests package
 ========================
 
+Subpackages
+-----------
+
+.. toctree::
+
+    force_bdss.tests.fixtures
+
 Submodules
 ----------
 
diff --git a/doc/source/api/force_bdss.workspecs.rst b/doc/source/api/force_bdss.workspecs.rst
index 506223976804e20e25bef7411ae4e77354d4aeed..13fb1dc84f6905f6ae3cbdbb78d4ad0b48fd4c5a 100644
--- a/doc/source/api/force_bdss.workspecs.rst
+++ b/doc/source/api/force_bdss.workspecs.rst
@@ -4,30 +4,6 @@ force_bdss.workspecs package
 Submodules
 ----------
 
-force_bdss.workspecs.data_source module
----------------------------------------
-
-.. automodule:: force_bdss.workspecs.data_source
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-force_bdss.workspecs.kpi_calculator module
-------------------------------------------
-
-.. automodule:: force_bdss.workspecs.kpi_calculator
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-force_bdss.workspecs.multi_criteria_optimizer module
-----------------------------------------------------
-
-.. automodule:: force_bdss.workspecs.multi_criteria_optimizer
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
 force_bdss.workspecs.workflow module
 ------------------------------------
 
diff --git a/force_bdss/base_core_driver.py b/force_bdss/base_core_driver.py
index 5875f1784dd0db5af54adb5b5901aeb869aca1a0..ca884ccac2d6462b581f1db81b5b86adbd777b07 100644
--- a/force_bdss/base_core_driver.py
+++ b/force_bdss/base_core_driver.py
@@ -1,10 +1,12 @@
 from envisage.plugin import Plugin
 from traits.trait_types import Instance
 
-from force_bdss.bundle_registry_plugin import (
+from .bundle_registry_plugin import (
     BundleRegistryPlugin,
     BUNDLE_REGISTRY_PLUGIN_ID
 )
+from .io.workflow_reader import WorkflowReader
+from .workspecs.workflow import Workflow
 
 
 class BaseCoreDriver(Plugin):
@@ -14,5 +16,13 @@ class BaseCoreDriver(Plugin):
 
     bundle_registry = Instance(BundleRegistryPlugin)
 
+    #: Deserialized content of the workflow file.
+    workflow = Instance(Workflow)
+
     def _bundle_registry_default(self):
         return self.application.get_plugin(BUNDLE_REGISTRY_PLUGIN_ID)
+
+    def _workflow_default(self):
+        reader = WorkflowReader(self.bundle_registry)
+        with open(self.application.workflow_filepath) as f:
+            return reader.read(f)
diff --git a/force_bdss/base_extension_plugin.py b/force_bdss/base_extension_plugin.py
index 3627b137961a3dcea0fb44068d42abdcdd5e08e3..f940d3544e9c5b9bd257f1b5f24497ad840d5372 100644
--- a/force_bdss/base_extension_plugin.py
+++ b/force_bdss/base_extension_plugin.py
@@ -1,9 +1,9 @@
 from envisage.plugin import Plugin
 from traits.trait_types import List
 
-from force_bdss.data_sources.i_data_source_bundle import IDataSourceBundle
-from force_bdss.kpi.i_kpi_calculator_bundle import IKPICalculatorBundle
-from force_bdss.mco.i_multi_criteria_optimizer_bundle import \
+from .data_sources.i_data_source_bundle import IDataSourceBundle
+from .kpi.i_kpi_calculator_bundle import IKPICalculatorBundle
+from .mco.i_multi_criteria_optimizer_bundle import \
     IMultiCriteriaOptimizerBundle
 
 
diff --git a/force_bdss/bdss_application.py b/force_bdss/bdss_application.py
index c813e4dc4357c922bc356ef48f076adfc70966be..e7c556fcff59ae8ee46fbb4e37ffb2b49fdb6c51 100644
--- a/force_bdss/bdss_application.py
+++ b/force_bdss/bdss_application.py
@@ -1,17 +1,13 @@
-import json
-
 from stevedore import extension
 from stevedore.exception import NoMatches
+
 from envisage.api import Application
 from envisage.core_plugin import CorePlugin
+from traits.api import Unicode, Bool
 
-from force_bdss.bundle_registry_plugin import BundleRegistryPlugin
-from force_bdss.core_evaluation_driver import CoreEvaluationDriver
-from force_bdss.core_mco_driver import CoreMCODriver
-
-from traits.api import Unicode, Bool, Instance
-
-from force_bdss.workspecs.workflow import Workflow
+from .bundle_registry_plugin import BundleRegistryPlugin
+from .core_evaluation_driver import CoreEvaluationDriver
+from .core_mco_driver import CoreMCODriver
 
 
 class BDSSApplication(Application):
@@ -22,9 +18,6 @@ class BDSSApplication(Application):
     #: The path of the workflow file to open
     workflow_filepath = Unicode()
 
-    #: Deserialized content of the workflow file.
-    workflow = Instance(Workflow)
-
     #: This flags signals to the application not to execute and orchestrate
     #: the MCO, but instead to perform a single evaluation under the
     #: coordination of the MCO itself. See design notes for more details.
@@ -56,7 +49,3 @@ class BDSSApplication(Application):
             print("No extensions found")
 
         super(BDSSApplication, self).__init__(plugins=plugins)
-
-    def _workflow_default(self):
-        with open(self.workflow_filepath) as f:
-            return Workflow.from_json(json.load(f))
diff --git a/force_bdss/bundle_registry_plugin.py b/force_bdss/bundle_registry_plugin.py
index f0e418c72e931bc49bf6600dac65be56dcdde7d0..2655bf7516067b6342b2cf4eb152dfb97a03c964 100644
--- a/force_bdss/bundle_registry_plugin.py
+++ b/force_bdss/bundle_registry_plugin.py
@@ -2,11 +2,12 @@ from envisage.extension_point import ExtensionPoint
 from envisage.plugin import Plugin
 from traits.api import List
 
-from force_bdss.data_sources.i_data_source_bundle import (
+from .data_sources.i_data_source_bundle import (
     IDataSourceBundle)
-from force_bdss.kpi.i_kpi_calculator_bundle import IKPICalculatorBundle
-from force_bdss.mco.i_multi_criteria_optimizer_bundle import (
-    IMultiCriteriaOptimizerBundle)
+from .kpi.i_kpi_calculator_bundle import IKPICalculatorBundle
+from .mco.i_multi_criteria_optimizer_bundle import (
+    IMultiCriteriaOptimizerBundle
+)
 
 
 BUNDLE_REGISTRY_PLUGIN_ID = "force.bdss.plugins.bundle_registry"
diff --git a/force_bdss/cli/tests/fixtures/test_csv.json b/force_bdss/cli/tests/fixtures/test_csv.json
deleted file mode 100644
index b58994dba80eabeecad2fc886de0b5a377de74b4..0000000000000000000000000000000000000000
--- a/force_bdss/cli/tests/fixtures/test_csv.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
-  "version": "1",
-  "multi_criteria_optimizer": {
-    "id": "force.bdss.bundles.enthought.dakota",
-    "model_data": {
-      "value_types": ["DUMMY"]
-    }
-  },
-  "data_sources": [
-    {
-      "id": "force.bdss.bundles.enthought.csv_extractor",
-      "model_data": {
-        "filename": "foo.csv",
-        "row": 3,
-        "column": 5,
-        "cuba_type": "PRESSURE"
-      }
-    },
-    {
-      "id": "force.bdss.bundles.enthought.csv_extractor",
-      "model_data": {
-        "filename": "foo.csv",
-        "row": 3,
-        "column": 5,
-        "cuba_type": "PRESSURE"
-      }
-    }
-  ],
-  "kpi_calculators": [
-    {
-      "id": "force.bdss.bundles.enthought.kpi_adder",
-      "model_data": {
-        "cuba_type_in": "PRESSURE",
-        "cuba_type_out": "TOTAL_PRESSURE"
-      }
-    }
-  ]
-}
diff --git a/force_bdss/cli/tests/fixtures/test_csv_v2.json b/force_bdss/cli/tests/fixtures/test_csv_v2.json
deleted file mode 100644
index a6963bba7644cf77ece55b383ec72204906d6d8b..0000000000000000000000000000000000000000
--- a/force_bdss/cli/tests/fixtures/test_csv_v2.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "version": "2"
-}
diff --git a/force_bdss/cli/tests/test_execution.py b/force_bdss/cli/tests/test_execution.py
index d511f79f6f2a4b0a806b12bc50e7c02c216c237d..80f3c878f91a050fa71b4ec04b81bf8e82486d2a 100644
--- a/force_bdss/cli/tests/test_execution.py
+++ b/force_bdss/cli/tests/test_execution.py
@@ -3,6 +3,8 @@ import subprocess
 import os
 from contextlib import contextmanager
 
+from force_bdss.tests import fixtures
+
 
 @contextmanager
 def cd(dir):
@@ -22,17 +24,17 @@ def fixture_dir():
 
 class TestExecution(unittest.TestCase):
     def test_plain_invocation_mco(self):
-        with cd(fixture_dir()):
+        with cd(fixtures.dirpath()):
             out = subprocess.check_call(["force_bdss", "test_csv.json"])
             self.assertEqual(out, 0)
 
     def test_unsupported_file_input(self):
-        with cd(fixture_dir()):
+        with cd(fixtures.dirpath()):
             with self.assertRaises(subprocess.CalledProcessError):
                 subprocess.check_call(["force_bdss", "test_csv_v2.json"])
 
     def test_corrupted_file_input(self):
-        with cd(fixture_dir()):
+        with cd(fixtures.dirpath()):
             with self.assertRaises(subprocess.CalledProcessError):
                 subprocess.check_call(["force_bdss",
                                        "test_csv_corrupted.json"])
diff --git a/force_bdss/core_evaluation_driver.py b/force_bdss/core_evaluation_driver.py
index 19e56664c580721246ee42ad65197512ccad8e0a..d28def3edda4c2f5c6005808de337f184fac9207 100644
--- a/force_bdss/core_evaluation_driver.py
+++ b/force_bdss/core_evaluation_driver.py
@@ -1,6 +1,13 @@
-from traits.has_traits import on_trait_change
+from __future__ import print_function
 
-from force_bdss.base_core_driver import BaseCoreDriver
+import sys
+from traits.api import on_trait_change
+
+from .base_core_driver import BaseCoreDriver
+from .io.workflow_reader import (
+    InvalidVersionException,
+    InvalidFileException
+)
 
 
 class CoreEvaluationDriver(BaseCoreDriver):
@@ -10,11 +17,14 @@ class CoreEvaluationDriver(BaseCoreDriver):
 
     @on_trait_change("application:started")
     def application_started(self):
-        workflow = self.application.workflow
-
-        mco_data = workflow.multi_criteria_optimizer
-        mco_bundle = self.bundle_registry.mco_bundle_by_id(mco_data.id)
-        mco_model = mco_bundle.create_model(mco_data.model_data)
+        try:
+            workflow = self.workflow
+        except (InvalidVersionException, InvalidFileException) as e:
+            print(str(e), file=sys.stderr)
+            sys.exit(1)
+
+        mco_model = workflow.multi_criteria_optimizer
+        mco_bundle = mco_model.bundle
         mco_communicator = mco_bundle.create_communicator(
             self.application,
             mco_model)
@@ -22,22 +32,17 @@ class CoreEvaluationDriver(BaseCoreDriver):
         parameters = mco_communicator.receive_from_mco()
 
         ds_results = []
-        for requested_ds in workflow.data_sources:
-            ds_bundle = self.bundle_registry.data_source_bundle_by_id(
-                requested_ds.id)
-            ds_model = ds_bundle.create_model(requested_ds.model_data)
-            data_source = ds_bundle.create_data_source(
-                self.application, ds_model)
+        for ds_model in workflow.data_sources:
+            ds_bundle = ds_model.bundle
+            data_source = ds_bundle.create_data_source(self.application,
+                                                       ds_model)
             ds_results.append(data_source.run(parameters))
 
         kpi_results = []
-        for requested_kpic in workflow.kpi_calculators:
-            kpic_bundle = self.bundle_registry.kpi_calculator_bundle_by_id(
-                requested_kpic.id)
-            ds_model = kpic_bundle.create_model(
-                requested_kpic.model_data)
-            kpi_calculator = kpic_bundle.create_data_source(
-                self.application, ds_model)
+        for kpic_model in workflow.kpi_calculators:
+            kpic_bundle = kpic_model.bundle
+            kpi_calculator = kpic_bundle.create_kpi_calculator(
+                self.application, kpic_model)
             kpi_results.append(kpi_calculator.run(ds_results))
 
         mco_communicator.send_to_mco(kpi_results)
diff --git a/force_bdss/core_mco_driver.py b/force_bdss/core_mco_driver.py
index f84c83dda83f6eeee1f7839608060d61551c952f..4d3d52aaac9706468c5bc3049155c5bb78f04ebc 100644
--- a/force_bdss/core_mco_driver.py
+++ b/force_bdss/core_mco_driver.py
@@ -4,9 +4,11 @@ import sys
 
 from traits.api import on_trait_change
 
-from force_bdss.base_core_driver import BaseCoreDriver
-from force_bdss.workspecs.workflow import (InvalidVersionException,
-                                           InvalidFileException)
+from .base_core_driver import BaseCoreDriver
+from .io.workflow_reader import (
+    InvalidVersionException,
+    InvalidFileException
+)
 
 
 class CoreMCODriver(BaseCoreDriver):
@@ -17,14 +19,12 @@ class CoreMCODriver(BaseCoreDriver):
     @on_trait_change("application:started")
     def application_started(self):
         try:
-            workflow = self.application.workflow
+            workflow = self.workflow
         except (InvalidVersionException, InvalidFileException) as e:
             print(str(e), file=sys.stderr)
             sys.exit(1)
 
-        mco_data = workflow.multi_criteria_optimizer
-        mco_bundle = self.bundle_registry.mco_bundle_by_id(mco_data.id)
-        mco_model = mco_bundle.create_model(mco_data.model_data)
+        mco_model = workflow.multi_criteria_optimizer
+        mco_bundle = mco_model.bundle
         mco = mco_bundle.create_optimizer(self.application, mco_model)
-
         mco.run()
diff --git a/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_bundle.py b/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_bundle.py
index 3ef2128b81b8e7ce35fb32fa38a5e1fb29eaf07f..a44b5e14b65970a1a62d26a7706469a60d13a679 100644
--- a/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_bundle.py
+++ b/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_bundle.py
@@ -12,9 +12,9 @@ class CSVExtractorBundle(HasStrictTraits):
 
     def create_model(self, model_data=None):
         if model_data is None:
-            return CSVExtractorModel()
-        else:
-            return CSVExtractorModel.from_json(model_data)
+            model_data = {}
+
+        return CSVExtractorModel(self, **model_data)
 
     def create_data_source(self, application, model):
         return CSVExtractorDataSource(self, application, model)
diff --git a/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_model.py b/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_model.py
index 91682add3f7fb1ee9742f8a8b1bed64dd3595cbb..ad8d34bad87ce416d563346ef7d12257f3a01477 100644
--- a/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_model.py
+++ b/force_bdss/core_plugins/csv_extractor/csv_extractor/csv_extractor_model.py
@@ -8,12 +8,3 @@ class CSVExtractorModel(BaseDataSourceModel):
     row = Int()
     column = Int()
     cuba_type = String()
-
-    @classmethod
-    def from_json(cls, json_data):
-        return cls(
-            filename=json_data["filename"],
-            row=json_data["row"],
-            column=json_data["column"],
-            cuba_type=json_data["cuba_type"]
-        )
diff --git a/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_bundle.py b/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_bundle.py
index a76456c24a3342b5cb15c820b99428982f2b4c5c..a06cf885d4a2d581768da6c6b71a21e7208763c9 100644
--- a/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_bundle.py
+++ b/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_bundle.py
@@ -13,9 +13,9 @@ class KPIAdderBundle(HasStrictTraits):
 
     def create_model(self, model_data=None):
         if model_data is None:
-            return KPIAdderModel()
-        else:
-            return KPIAdderModel.from_json(model_data)
+            model_data = {}
 
-    def create_data_source(self, application, model):
+        return KPIAdderModel(self, **model_data)
+
+    def create_kpi_calculator(self, application, model):
         return KPIAdderCalculator(self, application, model)
diff --git a/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_model.py b/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_model.py
index 3d57563f9444cde1c31175193b8b03ca3b275d85..099c422f80070ed3b8bb844cdd0c01bb3a6e628f 100644
--- a/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_model.py
+++ b/force_bdss/core_plugins/dummy_kpi/kpi_adder/kpi_adder_model.py
@@ -6,10 +6,3 @@ from force_bdss.api import BaseKPICalculatorModel
 class KPIAdderModel(BaseKPICalculatorModel):
     cuba_type_in = String()
     cuba_type_out = String()
-
-    @classmethod
-    def from_json(cls, json_data):
-        return cls(
-            cuba_type_in=json_data["cuba_type_in"],
-            cuba_type_out=json_data["cuba_type_out"]
-        )
diff --git a/force_bdss/core_plugins/dummy_mco/dakota/dakota_bundle.py b/force_bdss/core_plugins/dummy_mco/dakota/dakota_bundle.py
index 13880513717114a424861496583bcfb9f762a8bb..07e4dd366a2c964cbf9de4dcace24170feb3bc6c 100644
--- a/force_bdss/core_plugins/dummy_mco/dakota/dakota_bundle.py
+++ b/force_bdss/core_plugins/dummy_mco/dakota/dakota_bundle.py
@@ -12,9 +12,8 @@ class DakotaBundle(HasStrictTraits):
 
     def create_model(self, model_data=None):
         if model_data is None:
-            return DakotaModel()
-        else:
-            return DakotaModel.from_json(model_data)
+            model_data = {}
+        return DakotaModel(self, **model_data)
 
     def create_optimizer(self, application, model):
         return DakotaOptimizer(self, application, model)
diff --git a/force_bdss/core_plugins/dummy_mco/dakota/dakota_model.py b/force_bdss/core_plugins/dummy_mco/dakota/dakota_model.py
index 6b3ab63eb1c5491a7ef5c389c64bea074f3afd9c..b4c081a6b38bd5478f4287624411c5ce185cd01d 100644
--- a/force_bdss/core_plugins/dummy_mco/dakota/dakota_model.py
+++ b/force_bdss/core_plugins/dummy_mco/dakota/dakota_model.py
@@ -5,7 +5,3 @@ from force_bdss.api import BaseMCOModel
 
 class DakotaModel(BaseMCOModel):
     value_types = List(String)
-
-    @classmethod
-    def from_json(cls, model_data):
-        return cls(value_types=model_data["value_types"])
diff --git a/force_bdss/data_sources/base_data_source.py b/force_bdss/data_sources/base_data_source.py
index e8eec36d78660d9aab8eb24f2e140392d807a05e..c700ea861daeaeb670451137b70f09ad8c065966 100644
--- a/force_bdss/data_sources/base_data_source.py
+++ b/force_bdss/data_sources/base_data_source.py
@@ -3,6 +3,11 @@ import six
 
 
 class BaseDataSource(six.with_metaclass(abc.ABCMeta)):
+    """Base class for the DataSource, any computational engine/retriever
+    for data.
+
+    Inherit from this class for your specific DataSource.
+    """
     def __init__(self, bundle, application, model):
         self.bundle = bundle
         self.application = application
diff --git a/force_bdss/data_sources/base_data_source_model.py b/force_bdss/data_sources/base_data_source_model.py
index d88d3adff6f7938b163a3369a38612864a9a2132..cc3f30ea2f0c37f693fa222e5783a96d04654fa2 100644
--- a/force_bdss/data_sources/base_data_source_model.py
+++ b/force_bdss/data_sources/base_data_source_model.py
@@ -1,9 +1,20 @@
-import abc
-from traits.api import ABCHasStrictTraits
+from traits.api import ABCHasStrictTraits, Instance
+
+from .i_data_source_bundle import IDataSourceBundle
 
 
 class BaseDataSourceModel(ABCHasStrictTraits):
-    @classmethod
-    @abc.abstractmethod
-    def from_json(self, model_data):
-        pass
+    """Base class for the bundle specific DataSource models.
+    This model will also provide, through traits/traitsui magic the View
+    that will appear in the workflow manager UI.
+
+    In your bundle definition, your bundle-specific model must reimplement
+    this class.
+    """
+    #: A reference to the creating bundle, so that we can
+    #: retrieve it as the originating factory.
+    bundle = Instance(IDataSourceBundle, visible=False, transient=True)
+
+    def __init__(self, bundle, *args, **kwargs):
+        self.bundle = bundle
+        super(BaseDataSourceModel, self).__init__(*args, **kwargs)
diff --git a/force_bdss/data_sources/i_data_source_bundle.py b/force_bdss/data_sources/i_data_source_bundle.py
index 146c1e8cbaa03be765ad68d2707411b58046824c..5270245c23d05b36fda603bd723d843a6777ecd7 100644
--- a/force_bdss/data_sources/i_data_source_bundle.py
+++ b/force_bdss/data_sources/i_data_source_bundle.py
@@ -10,7 +10,12 @@ class IDataSourceBundle(Interface):
     name = String()
 
     def create_data_source(self, application, model):
+        """Factory method.
+        Must return the bundle-specific BaseDataSource instance.
+        """
         pass
 
     def create_model(self, model_data=None):
-        pass
+        """Factory method.
+        Must return the bundle-specific BaseDataSourceModel instance.
+        """
diff --git a/force_bdss/io/__init__.py b/force_bdss/io/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/force_bdss/io/tests/__init__.py b/force_bdss/io/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/force_bdss/io/tests/test_workflow_reader.py b/force_bdss/io/tests/test_workflow_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..53c73f4032d0802883d4cf8b039a311bfbccaeba
--- /dev/null
+++ b/force_bdss/io/tests/test_workflow_reader.py
@@ -0,0 +1,46 @@
+import json
+import unittest
+from six import StringIO
+
+from force_bdss.bundle_registry_plugin import BundleRegistryPlugin
+from force_bdss.io.workflow_reader import (
+    WorkflowReader,
+    InvalidVersionException, InvalidFileException)
+
+try:
+    import mock
+except ImportError:
+    from unittest import mock
+
+
+class TestWorkflowReader(unittest.TestCase):
+    def setUp(self):
+        self.mock_bundle_registry = mock.Mock(spec=BundleRegistryPlugin)
+        self.wfreader = WorkflowReader(self.mock_bundle_registry)
+
+    def test_initialization(self):
+        self.assertEqual(self.wfreader.bundle_registry,
+                         self.mock_bundle_registry)
+
+    def test_invalid_version(self):
+        data = {
+            "version": "2",
+            "workflow": {}
+        }
+
+        with self.assertRaises(InvalidVersionException):
+            self.wfreader.read(self._as_json_stringio(data))
+
+    def test_absent_version(self):
+        data = {
+        }
+
+        with self.assertRaises(InvalidFileException):
+            self.wfreader.read(self._as_json_stringio(data))
+
+    def _as_json_stringio(self, data):
+        fp = StringIO()
+        json.dump(data, fp)
+        fp.seek(0)
+
+        return fp
diff --git a/force_bdss/io/tests/test_workflow_writer.py b/force_bdss/io/tests/test_workflow_writer.py
new file mode 100644
index 0000000000000000000000000000000000000000..e6f81ec25a52ff167f25109b4dc37d3c43f42798
--- /dev/null
+++ b/force_bdss/io/tests/test_workflow_writer.py
@@ -0,0 +1,65 @@
+import unittest
+import json
+from six import StringIO
+
+from force_bdss.bundle_registry_plugin import BundleRegistryPlugin
+from force_bdss.io.workflow_reader import WorkflowReader
+
+try:
+    import mock
+except ImportError:
+    from unittest import mock
+
+from force_bdss.id_generators import bundle_id
+from force_bdss.io.workflow_writer import WorkflowWriter
+from force_bdss.mco.base_mco_model import BaseMCOModel
+from force_bdss.mco.i_multi_criteria_optimizer_bundle import \
+    IMultiCriteriaOptimizerBundle
+from force_bdss.workspecs.workflow import Workflow
+
+
+class TestWorkflowWriter(unittest.TestCase):
+    def setUp(self):
+        self.mock_registry = mock.Mock(spec=BundleRegistryPlugin)
+        mock_mco_bundle = mock.Mock(spec=IMultiCriteriaOptimizerBundle,
+                                    id=bundle_id("enthought", "mock"))
+        mock_mco_model = mock.Mock(
+            spec=BaseMCOModel,
+            bundle=mock_mco_bundle
+        )
+        mock_mco_bundle.create_model = mock.Mock(
+            return_value=mock_mco_model
+        )
+        self.mock_registry.mco_bundle_by_id = mock.Mock(
+            return_value=mock_mco_bundle)
+
+    def test_write(self):
+        wfwriter = WorkflowWriter()
+        fp = StringIO()
+        wf = self._create_mock_workflow()
+        wfwriter.write(wf, fp)
+        result = json.loads(fp.getvalue())
+        self.assertIn("version", result)
+        self.assertIn("workflow", result)
+        self.assertIn("multi_criteria_optimizer", result["workflow"])
+        self.assertIn("data_sources", result["workflow"])
+        self.assertIn("kpi_calculators", result["workflow"])
+
+    def test_write_and_read(self):
+        wfwriter = WorkflowWriter()
+        fp = StringIO()
+        wf = self._create_mock_workflow()
+        wfwriter.write(wf, fp)
+        fp.seek(0)
+        wfreader = WorkflowReader(self.mock_registry)
+        wf_result = wfreader.read(fp)
+        self.assertEqual(wf_result.multi_criteria_optimizer.bundle.id,
+                         wf.multi_criteria_optimizer.bundle.id)
+
+    def _create_mock_workflow(self):
+        wf = Workflow()
+        wf.multi_criteria_optimizer = BaseMCOModel(
+            mock.Mock(
+                spec=IMultiCriteriaOptimizerBundle,
+                id=bundle_id("enthought", "mock")))
+        return wf
diff --git a/force_bdss/io/workflow_reader.py b/force_bdss/io/workflow_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..adead5f8e13f6214622b20ece5169b02921660f5
--- /dev/null
+++ b/force_bdss/io/workflow_reader.py
@@ -0,0 +1,168 @@
+import json
+import logging
+
+from traits.api import HasStrictTraits, Instance
+
+from ..workspecs.workflow import Workflow
+from ..bundle_registry_plugin import BundleRegistryPlugin
+
+SUPPORTED_FILE_VERSIONS = ["1"]
+
+
+class InvalidFileException(Exception):
+    """Raised if the file is invalid for some reason"""
+
+
+class InvalidVersionException(InvalidFileException):
+    """Raised if the version tag does not satisfy the currently
+    supported list."""
+
+
+class WorkflowReader(HasStrictTraits):
+    """
+    Reads the workflow from a file.
+    """
+    #: The bundle registry. The reader needs it to create the
+    #: bundle-specific model objects.
+    bundle_registry = Instance(BundleRegistryPlugin)
+
+    def __init__(self, bundle_registry, *args, **kwargs):
+        """Initializes the reader.
+
+        Parameters
+        ----------
+        bundle_registry: BundleRegistryPlugin
+            The bundle registry that provides lookup services
+            for a bundle identified by a given id.
+        """
+        self.bundle_registry = bundle_registry
+
+        super(WorkflowReader, self).__init__(*args, **kwargs)
+
+    def read(self, file):
+        """Reads the file and returns a Workflow object.
+        If any problem is found, raises an InvalidFileException or a
+        derived, more specialized exception.
+
+        Parameters
+        ----------
+        file: File
+            A file object containing the data of the workflow in the
+            appropriate json format.
+
+        Returns
+        -------
+        Workflow
+            An instance of the model tree, rooted at Workflow.
+
+        Raises
+        ------
+        InvalidFileException
+            Raised if the file is corrupted or cannot be read by this reader.
+        """
+        json_data = json.load(file)
+
+        try:
+            version = json_data["version"]
+        except KeyError:
+            logging.error("File missing version information")
+            raise InvalidFileException("Corrupted input file, no version"
+                                       " specified")
+
+        if version not in SUPPORTED_FILE_VERSIONS:
+            logging.error(
+                "File contains version {} that is not in the "
+                "list of supported versions {}".format(
+                    version, SUPPORTED_FILE_VERSIONS)
+            )
+            raise InvalidVersionException(
+                "File version {} not supported".format(json_data["version"]))
+
+        wf = Workflow()
+
+        try:
+            wf_data = json_data["workflow"]
+            wf.multi_criteria_optimizer = self._extract_mco(wf_data)
+            wf.data_sources[:] = self._extract_data_sources(wf_data)
+            wf.kpi_calculators[:] = self._extract_kpi_calculators(wf_data)
+        except KeyError as e:
+            logging.exception("Could not read file")
+            raise InvalidFileException("Could not read file. "
+                                       "Unable to find key {}".format(e))
+        return wf
+
+    def _extract_mco(self, wf_data):
+        """Extracts the MCO from the workflow dictionary data.
+
+        Parameters
+        ----------
+        wf_data: dict
+            the content of the workflow key in the top level dictionary data.
+
+        Returns
+        -------
+        a BaseMCOModel instance of the bundle-specific MCO driver, or None
+        if no MCO is specified in the file (as in the case of premature
+        saving).
+        """
+        registry = self.bundle_registry
+
+        mco_data = wf_data.get("multi_criteria_optimizer")
+        if mco_data is None:
+            # The file was saved without setting an MCO.
+            # The file is valid, we simply can't run any optimization yet.
+            return None
+
+        mco_id = mco_data["id"]
+        mco_bundle = registry.mco_bundle_by_id(mco_id)
+        return mco_bundle.create_model(
+            wf_data["multi_criteria_optimizer"]["model_data"])
+
+    def _extract_data_sources(self, wf_data):
+        """Extracts the data sources from the workflow dictionary data.
+
+        Parameters
+        ----------
+        wf_data: dict
+            the content of the workflow key in the top level dictionary data.
+
+        Returns
+        -------
+        list of BaseDataSourceModel instances. Each BaseDataSourceModel is an
+        instance of the bundle specific model class. The list can be empty.
+        """
+        registry = self.bundle_registry
+
+        data_sources = []
+        for ds_entry in wf_data["data_sources"]:
+            ds_id = ds_entry["id"]
+            ds_bundle = registry.data_source_bundle_by_id(ds_id)
+            data_sources.append(ds_bundle.create_model(ds_entry["model_data"]))
+
+        return data_sources
+
+    def _extract_kpi_calculators(self, wf_data):
+        """Extracts the KPI calculators from the workflow dictionary data.
+
+        Parameters
+        ----------
+        wf_data: dict
+            the content of the workflow key in the top level dictionary data.
+
+        Returns
+        -------
+        list of BaseKPICalculatorModel instances. Each BaseKPICalculatorModel
+        is an instance of the bundle specific model class. The list can be
+        empty.
+        """
+        registry = self.bundle_registry
+
+        kpi_calculators = []
+        for kpic_entry in wf_data["kpi_calculators"]:
+            kpic_id = kpic_entry["id"]
+            kpic_bundle = registry.kpi_calculator_bundle_by_id(kpic_id)
+
+            kpi_calculators.append(
+                kpic_bundle.create_model(kpic_entry["model_data"]))
+
+        return kpi_calculators
diff --git a/force_bdss/io/workflow_writer.py b/force_bdss/io/workflow_writer.py
new file mode 100644
index 0000000000000000000000000000000000000000..a83d54aab3c649a521a140d49774a89995275b8f
--- /dev/null
+++ b/force_bdss/io/workflow_writer.py
@@ -0,0 +1,48 @@
+import json
+from traits.api import HasStrictTraits
+
+
+class WorkflowWriter(HasStrictTraits):
+    """A Writer for writing the Workflow onto disk.
+    """
+    def write(self, workflow, f):
+        """Writes the workflow model object to a file f in JSON format.
+
+        Parameters
+        ----------
+        workflow: Workflow
+            The Workflow instance to write to file
+
+        f: File
+            A file object on which to write the workflow, properly serialized
+            into JSON.
+        """
+        data = {
+            "version": "1",
+            "workflow": {}
+        }
+
+        wf_data = data["workflow"]
+        wf_data["multi_criteria_optimizer"] = {
+            "id": workflow.multi_criteria_optimizer.bundle.id,
+            "model_data": workflow.multi_criteria_optimizer.__getstate__()
+        }
+        kpic_data = []
+        for kpic in workflow.kpi_calculators:
+            kpic_data.append({
+                "id": kpic.bundle.id,
+                "model_data": kpic.__getstate__()}
+            )
+
+        wf_data["kpi_calculators"] = kpic_data
+
+        ds_data = []
+        for ds in workflow.data_sources:
+            ds_data.append({
+                "id": ds.bundle.id,
+                "model_data": ds.__getstate__()
+            })
+
+        wf_data["data_sources"] = ds_data
+
+        json.dump(data, f)
diff --git a/force_bdss/kpi/base_kpi_calculator_model.py b/force_bdss/kpi/base_kpi_calculator_model.py
index 9489b9b89ded2d98442b5f72052e2666673952a7..1b254a96b5198709e437b8a90187730caafe2919 100644
--- a/force_bdss/kpi/base_kpi_calculator_model.py
+++ b/force_bdss/kpi/base_kpi_calculator_model.py
@@ -1,9 +1,20 @@
-import abc
-from traits.has_traits import ABCHasStrictTraits
+from traits.api import ABCHasStrictTraits, Instance
+
+from .i_kpi_calculator_bundle import IKPICalculatorBundle
 
 
 class BaseKPICalculatorModel(ABCHasStrictTraits):
-    @classmethod
-    @abc.abstractmethod
-    def from_json(self, model_data):
-        pass
+    """Base class for the bundle specific KPI calculator models.
+    This model will also provide, through traits/traitsui magic the View
+    that will appear in the workflow manager UI.
+
+    In your bundle definition, your bundle-specific model must reimplement
+    this class.
+    """
+    #: A reference to the creating bundle, so that we can
+    #: retrieve it as the originating factory.
+    bundle = Instance(IKPICalculatorBundle, visible=False, transient=True)
+
+    def __init__(self, bundle, *args, **kwargs):
+        self.bundle = bundle
+        super(BaseKPICalculatorModel, self).__init__(*args, **kwargs)
diff --git a/force_bdss/mco/base_mco_model.py b/force_bdss/mco/base_mco_model.py
index 3bf128cfb0f7132eed980a92cd8d39770233a097..e7466353e63a32edc862d744b643752f6c2f88cb 100644
--- a/force_bdss/mco/base_mco_model.py
+++ b/force_bdss/mco/base_mco_model.py
@@ -1,9 +1,22 @@
-import abc
-from traits.api import ABCHasStrictTraits
+from traits.api import ABCHasStrictTraits, Instance
+
+from .i_multi_criteria_optimizer_bundle import IMultiCriteriaOptimizerBundle
 
 
 class BaseMCOModel(ABCHasStrictTraits):
-    @classmethod
-    @abc.abstractmethod
-    def from_json(self, model_data):
-        pass
+    """Base class for the bundle specific MCO models.
+    This model will also provide, through traits/traitsui magic the View
+    that will appear in the workflow manager UI.
+
+    In your bundle definition, your bundle-specific model must reimplement
+    this class.
+    """
+    #: A reference to the creating bundle, so that we can
+    #: retrieve it as the originating factory.
+    bundle = Instance(IMultiCriteriaOptimizerBundle,
+                      visible=False,
+                      transient=True)
+
+    def __init__(self, bundle, *args, **kwargs):
+        self.bundle = bundle
+        super(BaseMCOModel, self).__init__(*args, **kwargs)
diff --git a/force_bdss/tests/fixtures/__init__.py b/force_bdss/tests/fixtures/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ad2b16ba9831f5cd9f3b37cbe8890a6afa7b711
--- /dev/null
+++ b/force_bdss/tests/fixtures/__init__.py
@@ -0,0 +1,9 @@
+from os.path import join, dirname, abspath
+
+
+def get(filename):
+    return join(dirpath(), filename)
+
+
+def dirpath():
+    return dirname(abspath(__file__))
diff --git a/force_bdss/cli/tests/fixtures/foo.csv b/force_bdss/tests/fixtures/foo.csv
similarity index 100%
rename from force_bdss/cli/tests/fixtures/foo.csv
rename to force_bdss/tests/fixtures/foo.csv
diff --git a/force_bdss/tests/fixtures/test_csv.json b/force_bdss/tests/fixtures/test_csv.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c15b0f07f2bdeee497b41ea655e31fb64b19dff
--- /dev/null
+++ b/force_bdss/tests/fixtures/test_csv.json
@@ -0,0 +1,40 @@
+{
+  "version": "1",
+  "workflow": {
+    "multi_criteria_optimizer": {
+      "id": "force.bdss.bundles.enthought.dakota",
+      "model_data": {
+        "value_types": ["DUMMY"]
+      }
+    },
+    "data_sources": [
+      {
+        "id": "force.bdss.bundles.enthought.csv_extractor",
+        "model_data": {
+          "filename": "foo.csv",
+          "row": 3,
+          "column": 5,
+          "cuba_type": "PRESSURE"
+        }
+      },
+      {
+        "id": "force.bdss.bundles.enthought.csv_extractor",
+        "model_data": {
+          "filename": "foo.csv",
+          "row": 3,
+          "column": 5,
+          "cuba_type": "PRESSURE"
+        }
+      }
+    ],
+    "kpi_calculators": [
+      {
+        "id": "force.bdss.bundles.enthought.kpi_adder",
+        "model_data": {
+          "cuba_type_in": "PRESSURE",
+          "cuba_type_out": "TOTAL_PRESSURE"
+        }
+      }
+    ]
+  }
+}
diff --git a/force_bdss/cli/tests/fixtures/test_csv_corrupted.json b/force_bdss/tests/fixtures/test_csv_corrupted.json
similarity index 100%
rename from force_bdss/cli/tests/fixtures/test_csv_corrupted.json
rename to force_bdss/tests/fixtures/test_csv_corrupted.json
diff --git a/force_bdss/tests/fixtures/test_csv_v2.json b/force_bdss/tests/fixtures/test_csv_v2.json
new file mode 100644
index 0000000000000000000000000000000000000000..2479fa8781ee6c6e16739c43dfa2c7737db04a5b
--- /dev/null
+++ b/force_bdss/tests/fixtures/test_csv_v2.json
@@ -0,0 +1,5 @@
+{
+  "version": "2",
+  "workflow": {
+  }
+}
diff --git a/force_bdss/workspecs/data_source.py b/force_bdss/workspecs/data_source.py
deleted file mode 100644
index bad5752c63bdf9bb9b3d76256592d9ec875c53c4..0000000000000000000000000000000000000000
--- a/force_bdss/workspecs/data_source.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from traits.api import HasStrictTraits, String, Dict
-
-
-class DataSource(HasStrictTraits):
-    id = String()
-    model_data = Dict()
-
-    @classmethod
-    def from_json(cls, json_data):
-        self = cls(
-            id=json_data["id"],
-            model_data=json_data["model_data"]
-        )
-
-        return self
diff --git a/force_bdss/workspecs/kpi_calculator.py b/force_bdss/workspecs/kpi_calculator.py
deleted file mode 100644
index b8066259538af6b8e19168d4404a432e3f2bdadc..0000000000000000000000000000000000000000
--- a/force_bdss/workspecs/kpi_calculator.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from traits.api import HasStrictTraits, String, Dict
-
-
-class KPICalculator(HasStrictTraits):
-    id = String()
-    model_data = Dict()
-
-    @classmethod
-    def from_json(cls, json_data):
-        self = cls(
-            id=json_data["id"],
-            model_data=json_data["model_data"]
-        )
-
-        return self
diff --git a/force_bdss/workspecs/multi_criteria_optimizer.py b/force_bdss/workspecs/multi_criteria_optimizer.py
deleted file mode 100644
index efa4bfd9428e7c8e6a4d093649b0fc07d79c81a8..0000000000000000000000000000000000000000
--- a/force_bdss/workspecs/multi_criteria_optimizer.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from traits.api import HasStrictTraits, String, Dict
-
-
-class MultiCriteriaOptimizer(HasStrictTraits):
-    id = String()
-    model_data = Dict()
-
-    @classmethod
-    def from_json(cls, json_data):
-        self = cls(
-            id=json_data["id"],
-            model_data=json_data["model_data"]
-        )
-
-        return self
diff --git a/force_bdss/workspecs/workflow.py b/force_bdss/workspecs/workflow.py
index edc85c80f6f1770b08a15ce3dba17aa9080d61e7..b7963a0d4622888562f05743e8d3dfdc8194b73e 100644
--- a/force_bdss/workspecs/workflow.py
+++ b/force_bdss/workspecs/workflow.py
@@ -1,48 +1,20 @@
-from traits.api import HasStrictTraits, Instance, String, List
+from traits.api import HasStrictTraits, Instance, List
 
-from force_bdss.workspecs.data_source import DataSource
-from force_bdss.workspecs.kpi_calculator import KPICalculator
-from .multi_criteria_optimizer import MultiCriteriaOptimizer
-
-SUPPORTED_FILE_VERSIONS = ["1"]
-
-
-class InvalidFileException(Exception):
-    pass
-
-
-class InvalidVersionException(InvalidFileException):
-    pass
+from ..data_sources.base_data_source_model import BaseDataSourceModel
+from ..kpi.base_kpi_calculator_model import BaseKPICalculatorModel
+from ..mco.base_mco_model import BaseMCOModel
 
 
 class Workflow(HasStrictTraits):
-    name = String()
-    multi_criteria_optimizer = Instance(MultiCriteriaOptimizer)
-    data_sources = List(DataSource)
-    kpi_calculators = List(KPICalculator)
-
-    @classmethod
-    def from_json(cls, json_data):
-        try:
-            version = json_data["version"]
-        except KeyError:
-            raise InvalidFileException("Corrupted input file, no version"
-                                       " specified")
-
-        if version not in SUPPORTED_FILE_VERSIONS:
-            raise InvalidVersionException(
-                "File version {} not supported".format(json_data["version"]))
-
-        self = cls(
-            multi_criteria_optimizer=MultiCriteriaOptimizer.from_json(
-                json_data["multi_criteria_optimizer"]
-            ),
-            data_sources=[
-                DataSource.from_json(data_source_data)
-                for data_source_data in json_data["data_sources"]],
-            kpi_calculators=[
-                KPICalculator.from_json(kpi_calculator_data)
-                for kpi_calculator_data in json_data["kpi_calculators"]]
-        )
-
-        return self
+    """Model object that represents the Workflow as a whole"""
+    #: Contains the bundle-specific MCO Model object.
+    #: Can be None if no MCO has been specified yet.
+    multi_criteria_optimizer = Instance(BaseMCOModel, allow_none=True)
+
+    #: Contains the bundle-specific DataSource Model objects.
+    #: The list can be empty
+    data_sources = List(BaseDataSourceModel)
+
+    #: Contains the bundle-specific KPI Calculator Model objects.
+    #: The list can be empty
+    kpi_calculators = List(BaseKPICalculatorModel)