From 0a5b7a63405e0576adc595f92b58e5358821e3dc Mon Sep 17 00:00:00 2001
From: Arthur Le Bars <arthur.le-bars@sb-roscoff.fr>
Date: Tue, 19 Jan 2021 17:09:00 +0100
Subject: [PATCH] run_workflow changes (functional again), some optimisation in
 gga_init, gga_load and run_workflow, more validation steps in gga_init

---
 config => examples/config                     |   2 +-
 examples/config_demo.yaml                     |  21 ++
 .../{yml_example_input.yml => example.yml}    |   0
 examples/{demo.yaml => input_demo.yaml}       |   0
 gga_init.py                                   |  46 +--
 gga_load_data.py                              |   5 +-
 run_workflow_phaeoexplorer.py                 |  74 +++--
 ..._users.yml => authelia_users_template.yml} |   0
 templates/compose_template.bk.yml             | 302 ------------------
 ...late.yml => gspecies_compose_template.yml} |   0
 ..._nginx.conf => nginx_apollo_template.conf} |   0
 ...ginx.conf => nginx_download_template.conf} |   0
 ...aefik.yml => traefik_compose_template.yml} |   0
 utils/docker_compose_generator.py             |  73 +++++
 utils/metadata_generator.py                   |  36 +++
 workflows/phaeoexplorer_base_workflow.ga      |   1 -
 workflows/phaeoexplorer_jbrowse_workflow.ga   |   1 -
 17 files changed, 203 insertions(+), 358 deletions(-)
 rename config => examples/config (96%)
 create mode 100644 examples/config_demo.yaml
 rename examples/{yml_example_input.yml => example.yml} (100%)
 rename examples/{demo.yaml => input_demo.yaml} (100%)
 rename templates/{authelia_users.yml => authelia_users_template.yml} (100%)
 delete mode 100644 templates/compose_template.bk.yml
 rename templates/{compose_template.yml => gspecies_compose_template.yml} (100%)
 rename templates/{apollo_nginx.conf => nginx_apollo_template.conf} (100%)
 rename templates/{download_nginx.conf => nginx_download_template.conf} (100%)
 rename templates/{traefik.yml => traefik_compose_template.yml} (100%)
 create mode 100755 utils/docker_compose_generator.py
 create mode 100755 utils/metadata_generator.py
 delete mode 100644 workflows/phaeoexplorer_base_workflow.ga
 delete mode 100644 workflows/phaeoexplorer_jbrowse_workflow.ga

diff --git a/config b/examples/config
similarity index 96%
rename from config
rename to examples/config
index 0501bd7..527f310 100644
--- a/config
+++ b/examples/config
@@ -3,7 +3,7 @@
 
 # "all" section contains variables used by several services at once or the paths to import sensible files that cannot be procedurally generated/formatted using the scripts
 all:
-      custom_host: your_host  # The hosting machine name
+      custom_host: localhost  # The hosting machine name
       custom_authelia_config_path: /path/to/your/authelia/config  # The path to the authelia config yml to use, an example is available in the "examples" folder at the repo root
 # "galaxy" section contains variables used to set up the galaxy service
 galaxy:
diff --git a/examples/config_demo.yaml b/examples/config_demo.yaml
new file mode 100644
index 0000000..527f310
--- /dev/null
+++ b/examples/config_demo.yaml
@@ -0,0 +1,21 @@
+# This is the configuration file used by the gga_init.py, gga_load_data.py and run_workflow.py scripts
+# It contains (sensible) variables to set up different docker services
+
+# "all" section contains variables used by several services at once or the paths to import sensible files that cannot be procedurally generated/formatted using the scripts
+all:
+      custom_host: localhost  # The hosting machine name
+      custom_authelia_config_path: /path/to/your/authelia/config  # The path to the authelia config yml to use, an example is available in the "examples" folder at the repo root
+# "galaxy" section contains variables used to set up the galaxy service
+galaxy:
+      custom_galaxy_default_admin_email: admin@galaxy.org  # The default admin email (used to connect to the instance)
+      custom_galaxy_defaut_admin_user: admin  # The default admin user
+      custom_galaxy_default_admin_password: password  # The default admin password (used to connect to the instance)
+      custom_galaxy_config_master_api_key: master  # The master API key (not in use at the moment so you can skip this)
+      custom_galaxy_tripal_password: galaxy_tripal_password  # Same as custom_tripal_admin_password (connection to db relies on this)
+      custom_galaxy_web_apollo_user: admin_apollo@galaxy.org  # Apollo user
+      custom_galaxy_web_apollo_password: galaxy_apollo_password  # Apollo password tied to the user above
+tripal:
+      custom_tripal_admin_password: galaxy_tripal_password  # Same as custom_galay_tripal_password (connection to db relies on this)
+      custom_banner: /path/to/banner  # Custom banner, TODO: defaults to a generic banner
+      custom_theme: "abims_gga"   # Use this to use another theme
+      custom_theme_git_clone: "http://gitlab.sb-roscoff.fr/abims/e-infra/tripal_gga.git"  # Use this to install another theme than the default one
\ No newline at end of file
diff --git a/examples/yml_example_input.yml b/examples/example.yml
similarity index 100%
rename from examples/yml_example_input.yml
rename to examples/example.yml
diff --git a/examples/demo.yaml b/examples/input_demo.yaml
similarity index 100%
rename from examples/demo.yaml
rename to examples/input_demo.yaml
diff --git a/gga_init.py b/gga_init.py
index 9fd72ae..393e5cc 100644
--- a/gga_init.py
+++ b/gga_init.py
@@ -49,23 +49,27 @@ class DeploySpeciesStack(speciesData.SpeciesData):
             logging.info("Updating directory tree for %s" % self.full_name)
         try:
             os.chdir(self.species_dir)
-            working_dir = os.getcwd()
         except OSError:
             logging.critical("Cannot access " + self.species_dir + ", run with higher privileges")
             sys.exit()
 
         # Copy the custom banner to the species dir (banner used in tripal pages)
-        # To change the banner, replace the "banner.png" file in the "misc" folder of the archive
-        if not os.path.isfile("%s/banner.png" % self.species_dir):
-            shutil.copy("%s/misc/banner.png" % self.script_dir, "%s/banner.png" % self.species_dir)
+        if not self.config["custom_banner"] or self.config["custom_banner"] == "/path/to/banner" or self.config["custom_banner"] == "":
+            try:
+                if os.path.isfile(os.path.abspath(self.config["custom_banner"])):
+                    shutil.copy(os.path.abspath(self.config["custom_banner"]), "%s/banner.png" % self.species_dir)
 
+            except FileNotFoundError:
+                logging.warning("Custom banner (%s), skipping" % self.config["custom_banner"])
+
+        # Copy nginx conf
         try:
             os.mkdir("./nginx/")
             os.mkdir("./nginx/conf")
             with open(os.path.abspath("./nginx/conf/default.conf"), 'w') as conf:
                 conf.write("server {\n\tlisten 80;\n\tserver_name ~.;\n\tlocation /download/ {\n\t\talias /project_data/; \n\t\tautoindex on;\n\t}\n}")  # The species nginx conf
         except FileExistsError:
-            logging.debug("NginX conf exists")
+            logging.debug("Nginx conf already exists, skipping")
 
 
         organism_annotation_dir, organism_genome_dir = None, None
@@ -116,6 +120,8 @@ class DeploySpeciesStack(speciesData.SpeciesData):
         # Return to main_dir
         os.chdir(self.main_dir)
 
+        logging.info("Directory tree generated for %s" % self.full_name)
+
 
     def make_compose_files(self):
         """
@@ -132,10 +138,10 @@ class DeploySpeciesStack(speciesData.SpeciesData):
             sys.exit(0)
 
         # Path to the templates used to generate the custom docker-compose files for an input species
-        stack_template_path = self.script_dir + "/templates/compose_template.yml"
-        traefik_template_path = self.script_dir + "/templates/traefik.yml"
+        stack_template_path = self.script_dir + "/templates/gspecies_compose_template.yml"
+        traefik_template_path = self.script_dir + "/templates/traefik_compose_template.yml"
         # authelia_config_path = self.script_dir + "/templates/authelia_config_example.yml"  # Do not copy the authelia config!
-        authelia_users_path = self.script_dir + "/templates/authelia_users.yml"
+        authelia_users_path = self.script_dir + "/templates/authelia_users_template.yml"
 
         # Set the genus_species_strain_sex var, used
         genus_species_strain_sex = "{0}_{1}".format(self.genus.lower(), self.species)
@@ -172,26 +178,21 @@ class DeploySpeciesStack(speciesData.SpeciesData):
             # Create mounts for the current docker-compose
             self.create_mounts(working_dir=self.species_dir)
 
-            # TODO: obsolete?
-            # Call create_mounts.py (replace subprocess.DEVNULL by subprocess.PIPE to get script stdout and stderr back)
-            # subprocess.call(["python3", self.script_dir + "/create_mounts.py"], cwd=self.species_dir,
-            #                 stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)  # Create mounts for the containers
-
-        # Store the traefik directory path to be able to create volumes for the traefik containers
-        traefik_dir = None
         try:
             os.chdir(os.path.abspath(self.main_dir))
             os.mkdir("./traefik")
             os.mkdir("./traefik/authelia")
             if self.config["custom_authelia_config_path"]:
-                print("Authelia configuration found in the config file, placing it in ./traefik/authelia/")
-
-            # if not os.path.isfile("../traefik/authelia/configuration.yml"):  # TODO: obsolete?
-            #     shutil.copy(authelia_config_path, "../traefik/authelia/configuration.yml")  # change variables by hand and adds the path of your authelia configuration in the config file
+                if os.path.isfile(os.path.abspath(self.config["custom_authelia_config_path"])):
+                    try:
+                        shutil.copy(os.path.abspath(self.config["custom_authelia_config_path"]), "./traefik/authelia")
+                    except FileNotFoundError:
+                        logging.critical("Cannot copy custom Authelia config file (%s)" % self.config["custom_authelia_config_path"])
+                        sys.exit()
+                else:
+                    logging.critical("Custom Authelia config file not found (%s)" % self.config["custom_authelia_config_path"])
             if not os.path.isfile("./traefik/authelia/users.yml"):
                 shutil.copy(authelia_users_path, "./traefik/authelia/users.yml")
-            # subprocess.call(["python3", self.script_dir + "/create_mounts.py"], cwd=self.species_dir,
-            #                 stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)  # Create mounts for the containers # TODO: obsolete?
         except FileExistsError:
             logging.debug("Traefik directory already exists: %s" % os.path.abspath("../traefik"))
         try:
@@ -206,7 +207,6 @@ class DeploySpeciesStack(speciesData.SpeciesData):
 
         # Create the mounts for the traefik+authelia containers
         self.create_mounts(working_dir=traefik_dir)
-        # subprocess.call(["python3", self.script_dir + "/create_mounts.py"], cwd=self.species_dir) # TODO: obsolete?
 
         os.chdir(self.main_dir)
 
@@ -384,7 +384,7 @@ if __name__ == "__main__":
 
         logging.info("Stack deployed for %s" % deploy_stack_for_current_organism.full_name)
 
-        # TODO: IF GENUS°1 == GENUS°2 AND SP°1 == SP°2 --> SKIP INIT, CONTINUE TO NEXT ITEM IN INPUT
+        # TODO: IF GENUS°1 == GENUS°2 AND SP°1 == SP°2 --> SKIP INIT, CONTINUE TO NEXT ITEM IN INPUT (DEPLOY AT THE END)
 
     # TODO: RELOAD TRAEFIK OUTSIDE LOOP
     logging.info("All stacks deployed for organisms in input file %s" % args.input)
diff --git a/gga_load_data.py b/gga_load_data.py
index cff2747..ea60431 100644
--- a/gga_load_data.py
+++ b/gga_load_data.py
@@ -628,6 +628,7 @@ if __name__ == "__main__":
         logging.basicConfig(level=logging.DEBUG)
     else:
         logging.basicConfig(level=logging.INFO)
+    logging.getLogger("urllib3").setLevel(logging.WARNING)
 
     # Parsing the config file if provided, using the default config otherwise
     if not args.config:
@@ -703,12 +704,10 @@ if __name__ == "__main__":
             logging.info("Successfully set up library in galaxy for %s" % load_data_for_current_species.full_name)
 
             # # Set or get the history for the current organism
-            # load_data_for_current_species.set_get_history()
+            load_data_for_current_species.set_get_history()
             
             # Remove H. sapiens from database if here TODO: set a dedicated history for removing H. sapiens (instead of doing it into a species history)
-            # logging.info("Removing H. sapiens from Chado database for %s" % load_data_for_current_species.full_name)
             load_data_for_current_species.remove_homo_sapiens_from_db()
-            # logging.info("Successfully removed H. sapiens from Chado database for %s" % load_data_for_current_species.full_name)
 
             # logging.info("Importing datasets into history for %s" % load_data_for_current_species.full_name)
             # load_data_for_current_species.import_datasets_into_history()
diff --git a/run_workflow_phaeoexplorer.py b/run_workflow_phaeoexplorer.py
index 35e645f..5485c6c 100644
--- a/run_workflow_phaeoexplorer.py
+++ b/run_workflow_phaeoexplorer.py
@@ -102,7 +102,9 @@ class RunWorkflow(speciesData.SpeciesData):
 
         # Find genome folder datasets
         genome_fasta_ldda_id = genome_folder_content["folder_contents"][0]["ldda_id"]
-                
+
+        annotation_gff_ldda_id, annotation_proteins_ldda_id, annotation_transcripts_ldda_id = None, None, None
+
         # Several dicts in the annotation folder content (one dict = one file)
         for k, v in annotation_folder_content.items():
             if k == "folder_contents":
@@ -122,13 +124,11 @@ class RunWorkflow(speciesData.SpeciesData):
         return {"history_id": self.history_id, "library_id": self.library_id, "datasets": self.datasets}
 
 
-
     # def import_datasets_to_history(self):
     #     """
     #     Load the datasets into the current species history
-
+    #     OBSOLETE
     #     """
-
     #     logging.info("Uploading datasets into history %s" % self.history_id)
     #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["genome_file"])
     #     self.instance.histories.upload_dataset_from_library(history_id=self.history_id, lib_dataset_id=self.datasets["gff_file"])
@@ -234,7 +234,7 @@ class RunWorkflow(speciesData.SpeciesData):
         :return:
         """
 
-        logging.info("importing workflow: " + str(workflow_path))
+        logging.info("Importing workflow: " + str(workflow_path))
         workflow_name = "demo"  # for workflow demo
         workflow_ga_file = workflow_path
 
@@ -271,12 +271,12 @@ class RunWorkflow(speciesData.SpeciesData):
             workflow_dict = json.load(ga_in_file)
 
             self.instance.workflows.import_workflow_dict(workflow_dict=workflow_dict)
-            self.workflow_name = workflow_name
-            workflow_attributes = self.instance.workflows.get_workflows(name=self.workflow_name)
+            workflow_attributes = self.instance.workflows.get_workflows(name=workflow_name)
             workflow_id = workflow_attributes[0]["id"]
             show_workflow = self.instance.workflows.show_workflow(workflow_id=workflow_id)
             logging.debug("Workflow ID: " + workflow_id)
 
+            logging.info("Running workflow: %s" % workflow_name)
             self.instance.workflows.invoke_workflow(workflow_id=workflow_id,
                                                     history_id=self.history_id,
                                                     params=workflow_parameters,
@@ -287,7 +287,7 @@ class RunWorkflow(speciesData.SpeciesData):
 
     def import_datasets_into_history(self):
         """
-        Find datasets in a library, get their ID and import thme into the current history if they are not already
+        Find datasets in a library, get their ID and import them into the current history if they are not already
 
         :return:
         """
@@ -356,6 +356,8 @@ class RunWorkflow(speciesData.SpeciesData):
         Retrieve current organism ID and OGS and genome chado analyses IDs (needed to run some tools as Tripal/Chado
         doesn't accept organism/analyses names as valid inputs
 
+        WARNING: It is mandatory to call this function before invoking a workflow
+
         :return:
         """
         # Get the ID for the current organism in chado
@@ -434,6 +436,8 @@ if __name__ == "__main__":
         logging.basicConfig(level=logging.DEBUG)
     else:
         logging.basicConfig(level=logging.INFO)
+    logging.getLogger("urllib3").setLevel(logging.INFO)
+    logging.getLogger("bioblend").setLevel(logging.INFO)
 
     # Parsing the config file if provided, using the default config otherwise
     if not args.config:
@@ -489,13 +493,14 @@ if __name__ == "__main__":
 
         # Prepare the instance+history for the current organism (add organism and analyses in Chado) --> add argument? 
         # (althought there is no risk as chado refuses to duplicate an analysis/organism)
-        run_workflow_for_current_organism.prepare_history()
+        # run_workflow_for_current_organism.prepare_history()
 
         # Get the attributes of the instance and project data files
         run_workflow_for_current_organism.get_instance_attributes()
+        run_workflow_for_current_organism.get_organism_and_analyses_ids()
 
         # Import datasets into history (needs to be done in gga_load_data??)
-        run_workflow_for_current_organism.import_datasets_into_history()
+        # run_workflow_for_current_organism.import_datasets_into_history()
 
 
         workflow_parameters = dict()
@@ -506,21 +511,18 @@ if __name__ == "__main__":
         # DEMO WORKFLOW
         PARAM_LOAD_FASTA_IN_HISTORY = "0"
         PARAM_LOAD_FASTA_IN_CHADO = "1"
-        # PARAM_SYNC_ORGANISM_INTO_TRIPAL = "2"
-        # PARAM_SYNC_GENOME_ANALYSIS_INTO_TRIPAL = "3"
-        # PARAM_SYNC_FEATURES_INTO_TRIPAL = "4"
 
-        # Mapping parameters
+        # Workflow inputs
         workflow_parameters[PARAM_LOAD_FASTA_IN_HISTORY] = {}
         workflow_parameters[PARAM_LOAD_FASTA_IN_CHADO] = {"organism": run_workflow_for_current_organism.org_id,
-                                                        "analysis_id": run_workflow_for_current_organism.genome_analysis_id,
-                                                        "do_update": "true"}
-        # workflow_parameters[PARAM_SYNC_ORGANISM_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
-        # workflow_parameters[PARAM_SYNC_GENOME_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.ogs_analysis_id}
-        # workflow_parameters[PARAM_SYNC_FEATURES_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
-
+                                                          "analysis_id": run_workflow_for_current_organism.genome_analysis_id,
+                                                          "do_update": "true"}
+        # Change "do_update": "true" to "do_update": "false" to prevent appending to the fasta file in chado
+        # It is safer to never update it and completely delete and restart the galaxy+tripal services instead (no workaround at the moment)
         run_workflow_for_current_organism.datamap = dict()
-        run_workflow_for_current_organism.datamap[PARAM_LOAD_FASTA_IN_HISTORY] = {"src": "hda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
+
+        # Datamap for input datasets - dataset source (type): ldda (LibraryDatasetDatasetAssociation)
+        run_workflow_for_current_organism.datamap[PARAM_LOAD_FASTA_IN_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
 
 
         """COMMENTED FOR THE DEMO"""
@@ -548,13 +550,31 @@ if __name__ == "__main__":
         # workflow_parameters[PARAM_SYNC_OGS_ANALYSIS_INTO_TRIPAL] = {"analysis_id": run_workflow_for_current_organism.genome_analysis_id}
         # workflow_parameters[PARAM_SYNC_FEATURES_INTO_TRIPAL] = {"organism_id": run_workflow_for_current_organism.org_id}
 
-        # # Loading files into history works a bit different than the others as it's not a GMOD tool but a standard Galaxy tool
-        # # It requires this additional "datamap" (conveniently named "datamap" here), requiring the source type of the file and its corresponding ID (unique)
+        # Loading files into history works a bit different than the others as it's not a GMOD tool but a standard Galaxy tool
+        # It requires this additional "datamap" (conveniently named "datamap" here), requiring the source type of the file and its corresponding ID (unique)
+        # The comments taken from the bioblend docs:
+        # """
+        # A mapping of workflow inputs to datasets and dataset collections.
+        #                        The datasets source can be a LibraryDatasetDatasetAssociation (``ldda``),
+        #                        LibraryDataset (``ld``), HistoryDatasetAssociation (``hda``), or
+        #                        HistoryDatasetCollectionAssociation (``hdca``).
+        #
+        #                        The map must be in the following format:
+        #                        ``{'<input_index>': {'id': <encoded dataset ID>, 'src': '[ldda, ld, hda, hdca]'}}``
+        #                        (e.g. ``{'2': {'id': '29beef4fadeed09f', 'src': 'hda'}}``)
+        #
+        #                        This map may also be indexed by the UUIDs of the workflow steps,
+        #                        as indicated by the ``uuid`` property of steps returned from the
+        #                        Galaxy API. Alternatively workflow steps may be addressed by
+        #                        the label that can be set in the workflow editor. If using
+        #                        uuid or label you need to also set the ``inputs_by`` parameter
+        #                        to ``step_uuid`` or ``name``.
+        # """
         # run_workflow_for_current_organism.datamap = dict()
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE1_INTO_HISTORY] = {"src": "hda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE2_INTO_HISTORY] = {"src": "hda", "id": run_workflow_for_current_organism.datasets["gff_file"]}
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE3_INTO_HISTORY] = {"src": "hda", "id": run_workflow_for_current_organism.datasets["proteins_file"]}
-        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE4_INTO_HISTORY] = {"src": "hda", "id": run_workflow_for_current_organism.datasets["transcripts_file"]}
+        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE1_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["genome_file"]}
+        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE2_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["gff_file"]}
+        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE3_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["proteins_file"]}
+        # run_workflow_for_current_organism.datamap[PARAM_LOAD_FILE4_INTO_HISTORY] = {"src": "ldda", "id": run_workflow_for_current_organism.datasets["transcripts_file"]}
 
         # Run the workflow with the parameters set above
         run_workflow_for_current_organism.run_workflow(workflow_path=workflow,
diff --git a/templates/authelia_users.yml b/templates/authelia_users_template.yml
similarity index 100%
rename from templates/authelia_users.yml
rename to templates/authelia_users_template.yml
diff --git a/templates/compose_template.bk.yml b/templates/compose_template.bk.yml
deleted file mode 100644
index 103757f..0000000
--- a/templates/compose_template.bk.yml
+++ /dev/null
@@ -1,302 +0,0 @@
-# ./docker_data is created and filled with persistent data that should be backuped
-
-version: '3.7'
-services:
-    proxy:
-        image: quay.io/abretaud/nginx-ldap:latest
-        volumes:
-            - ./src_data/:/project_data/
-            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
-            - ./nginx/conf:/etc/nginx/conf.d
-        networks:
-            - traefikbig
-            - genus_species
-        deploy:
-          labels:
-            # Download page
-            - "traefik.http.routers.genus_species-nginx.rule=(Host(`scratchgmodv1`) && PathPrefix(`/sp/genus_species/download`))"
-#            - "traefik.http.routers.genus_species-nginx.tls=true"
-#            - "traefik.http.routers.genus_species-nginx.entryPoints=webs"
-            - "traefik.http.routers.genus_species-nginx.entryPoints=web" # lg
-#            - "traefik.http.routers.genus_species-nginx.middlewares=sp-auth,sp-app-trailslash,sp-prefix"
-            - "traefik.http.routers.genus_species-nginx.middlewares=sp-app-trailslash,sp-prefix" # lg
-            - "traefik.http.services.genus_species-nginx.loadbalancer.server.port=80"
-          restart_policy:
-            condition: on-failure
-            delay: 5s
-            max_attempts: 3
-            window: 120s
-
-    tripal:
-        image: quay.io/galaxy-genome-annotation/tripal:v2.x
-        depends_on:
-            - tripal-db
-            - elasticsearch
-        volumes:
-            - ./docker_data/galaxy/:/export/:ro
-            - ./src_data/:/project_data/:ro
-            - ./src_data:/data:ro
-            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
-        environment:
-            DB_HOST: tripal-db.genus_species
-            BASE_URL_PATH: /sp/genus_species
-            UPLOAD_LIMIT: 20M
-            MEMORY_LIMIT: 512M
-            TRIPAL_GIT_CLONE_MODULES: "https://github.com/abretaud/tripal_rest_api.git[@c6f9021ea5d4c6d7c67c5bd363a7dd9359228bbc] https://github.com/tripal/tripal_elasticsearch.git[@dc7f276046e394a80a7dfc9404cf1a149006eb2a] https://github.com/tripal/tripal_analysis_interpro.git https://github.com/tripal/tripal_analysis_go.git https://github.com/tripal/tripal_analysis_blast.git  https://github.com/tripal/tripal_analysis_expression.git[@7240039fdeb4579afd06bbcb989cb7795bd4c342]"
-            TRIPAL_DOWNLOAD_MODULES: ""
-            TRIPAL_ENABLE_MODULES: "tripal_analysis_blast tripal_analysis_interpro tripal_analysis_go tripal_rest_api tripal_elasticsearch"
-            SITE_NAME: "Genus species"
-            ELASTICSEARCH_HOST: elasticsearch.genus_species
-            ENABLE_JBROWSE: /jbrowse/?data=data/genus_species_strain_sex
-            ENABLE_APOLLO: 0
-            ENABLE_BLAST: 1
-            ENABLE_DOWNLOAD: 1
-            ENABLE_WIKI: 0
-            ENABLE_GO: 0
-            ENABLE_ORTHOLOGY: 0
-            ENABLE_ORTHOLOGY_LINKS: 0
-            THEME: "abims"    # Use this to use another theme
-            THEME_GIT_CLONE: "http://gga:BuH1_aG5@gitlab.sb-roscoff.fr/abims/e-infra/tripal_abims"    # Use this to install another theme
-            ADMIN_PASSWORD: 23fN,Ajt  # You need to define it and update it in galaxy config below --> change in prod (set a password in script init?)
-        networks:
-            - traefikbig
-            - genus_species
-        deploy:
-          labels:
-            - "traefik.http.routers.genus_species-tripal.rule=(Host(`scratchgmodv1`) && PathPrefix(`/sp/genus_species`))"
-#            - "traefik.http.routers.genus_species-tripal.tls=true"
-#            - "traefik.http.routers.genus_species-tripal.entryPoints=webs"
-            - "traefik.http.routers.genus_species-tripal.entryPoints=web" # lg
-#            - "traefik.http.routers.genus_species-tripal.middlewares=sp-auth,sp-trailslash,sp-prefix,tripal-addprefix"
-            - "traefik.http.routers.genus_species-tripal.middlewares=sp-trailslash,sp-prefix,tripal-addprefix" # lg
-            - "traefik.http.services.genus_species-tripal.loadbalancer.server.port=80"
-          restart_policy:
-            condition: on-failure
-            delay: 5s
-            max_attempts: 3
-            window: 120s
-
-    tripal-db:
-        image: quay.io/galaxy-genome-annotation/chado:1.31-jenkins26-pg9.5
-        environment:
-            - POSTGRES_PASSWORD=postgres
-            # The default chado image would try to install the schema on first run,
-            # we just want the tools to be available.
-            - INSTALL_CHADO_SCHEMA=0
-        volumes:
-            - ./docker_data/tripal_db/:/var/lib/postgresql/data/
-        networks:
-            - genus_species
-
-    elasticsearch:
-        image: docker.elastic.co/elasticsearch/elasticsearch:6.6.1
-        #deploy:
-          #resources:
-            #limits:
-              #memory: 500M
-        volumes:
-            - ./docker_data/elastic_search_index/:/usr/share/elasticsearch/data/
-        environment:
-            bootstrap.memory_lock: "true"
-            xpack.security.enabled: "false"
-            xpack.monitoring.enabled: "false"
-            xpack.ml.enabled: "false"
-            xpack.graph.enabled: "false"
-            xpack.watcher.enabled: "false"
-            cluster.routing.allocation.disk.threshold_enabled: "false"
-            ES_JAVA_OPTS: "-Xms500m -Xmx500m"
-            TAKE_FILE_OWNERSHIP: "true"
-        networks:
-            - genus_species
-
-    galaxy:
-        image: quay.io/galaxy-genome-annotation/docker-galaxy-annotation:gmod
-        volumes:
-            - ../galaxy_data_libs_SI.py:/opt/galaxy_data_libs_SI.py
-            - ./docker_data/galaxy/:/export/
-            - ./src_data/:/project_data/:ro
-            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
-            - ./docker_data/jbrowse/:/jbrowse/data/
-            - ./docker_data/apollo/:/apollo-data/
-            - ../galaxy_nginx.conf:/etc/nginx/uwsgi_params
-        environment:
-            NONUSE: nodejs,proftp,reports,condor
-            GALAXY_LOGGING: full
-            GALAXY_CONFIG_BRAND: "Genus species"
-            GALAXY_CONFIG_ALLOW_LIBRARY_PATH_PASTE: "True"
-            GALAXY_CONFIG_USE_REMOTE_USER: "True"
-            GALAXY_CONFIG_REMOTE_USER_MAILDOMAIN: "sb-roscoff.fr"
-            GALAXY_DEFAULT_ADMIN_EMAIL: "gga@sb-roscoff.fr"
-            GALAXY_DEFAULT_ADMIN_USER: "gga"
-            GALAXY_DEFAULT_ADMIN_PASSWORD: "password"
-            GALAXY_CONFIG_ADMIN_USERS: "admin@galaxy.org, gga@sb-roscoff.fr, lgueguen@sb-roscoff.fr, alebars@sb-roscoff.fr"   # admin@galaxy.org is the default (leave it), gogepp@bipaa is a shared ldap user we use to connect
-            GALAXY_CONFIG_MASTER_API_KEY: "master"
-            ENABLE_FIX_PERMS: 0
-            PROXY_PREFIX: /sp/genus_species/galaxy
-            GALAXY_TRIPAL_URL: http://tripal.genus_species/tripal/
-            GALAXY_TRIPAL_PASSWORD: 23fN,Ajt  # See tripal config above
-            GALAXY_WEBAPOLLO_URL: http://one-of-the-swarm-node:8888/apollo/
-            GALAXY_WEBAPOLLO_USER: "admin_apollo@sb-roscoff.fr"
-            GALAXY_WEBAPOLLO_PASSWORD: "Q65:dA,t"  # See tripal config below
-            GALAXY_WEBAPOLLO_EXT_URL: /apollo/
-            GALAXY_CHADO_DBHOST: tripal-db.genus_species
-            GALAXY_CHADO_DBSCHEMA: chado
-            GALAXY_AUTO_UPDATE_DB: 1
-            GALAXY_AUTO_UPDATE_CONDA: 1
-            GALAXY_AUTO_UPDATE_TOOLS: "/galaxy-central/tools_1.yaml"
-            GALAXY_SHARED_DIR: ""
-            BLAT_ENABLED: 1
-        networks:
-            - traefikbig
-            - genus_species
-        deploy:
-          labels:
-            - "traefik.http.routers.genus_species-galaxy.rule=(Host(`scratchgmodv1`) && PathPrefix(`/sp/genus_species/galaxy`))"
-#            - "traefik.http.routers.genus_species-galaxy.tls=true"
-#            - "traefik.http.routers.genus_species-galaxy.entryPoints=webs"
-            - "traefik.http.routers.genus_species-galaxy.entryPoints=web" #lg
-#            - "traefik.http.routers.genus_species-galaxy.middlewares=sp-auth,sp-app-trailslash,sp-app-prefix"
-            - "traefik.http.routers.genus_species-galaxy.middlewares=sp-app-trailslash,sp-app-prefix" #lg
-            - "traefik.http.services.genus_species-galaxy.loadbalancer.server.port=80"
-          restart_policy:
-            condition: on-failure
-            delay: 5s
-            max_attempts: 3
-            window: 120s
-
-    jbrowse:
-        image: quay.io/galaxy-genome-annotation/jbrowse:v1.16.8
-        volumes:
-            - ./docker_data/galaxy/:/export/:ro
-            - ./src_data/:/project_data/:ro
-            #- /groups/XXX/:/groups/XXX/:ro  # We do this when we have symlinks in src_data pointing to /groups/XXX/...
-            - ./docker_data/jbrowse/:/jbrowse/data/:ro
-        networks:
-            - traefikbig
-            - genus_species
-        deploy:
-          labels:
-            - "traefik.http.routers.genus_species-jbrowse.rule=(Host(`scratchgmodv1`) && PathPrefix(`/sp/genus_species/jbrowse`))"
-#            - "traefik.http.routers.genus_species-jbrowse.tls=true"
-#            - "traefik.http.routers.genus_species-jbrowse.entryPoints=webs"
-            - "traefik.http.routers.genus_species-jbrowse.entryPoints=web" # lg
-#            - "traefik.http.routers.genus_species-jbrowse.middlewares=sp-auth,sp-app-trailslash,sp-app-prefix"
-            - "traefik.http.routers.genus_species-jbrowse.middlewares=sp-app-trailslash,sp-app-prefix" #lg
-            - "traefik.http.services.genus_species-jbrowse.loadbalancer.server.port=80"
-          restart_policy:
-            condition: on-failure
-            delay: 5s
-            max_attempts: 3
-            window: 120s
-
-    blast:
-        image: quay.io/abretaud/sf-blast:latest
-        depends_on:
-            - blast-db
-        environment:
-            DB_HOST: blast-db.genus_species
-            UPLOAD_LIMIT: 20M
-            MEMORY_LIMIT: 128M
-            DB_NAME: 'postgres'
-            ADMIN_EMAIL: 'g.ga@sb-roscoff.fr'  # email sender
-            ADMIN_NAME: 'gga'  # email sender name
-            JOBS_METHOD: 'local'   # Can be local (= no sge jobs, but run inside the container) or drmaa (= to submit to a cluster)
-            JOBS_WORK_DIR: '/tmp/blast_jobs/'  # disk accessible both from compute nodes and mounted in this docker (at the same path)
-            CDD_DELTA_PATH: '/db/cdd_delta/current/flat/cdd_delta'
-            BLAST_TITLE: 'Genus species blast server'
-            JOBS_SCHED_NAME: 'blast_gspecies'    # job names
-            #PRE_CMD: '. /local/env/envblast-2.6.0.sh; . /local/env/envpython-3.7.1.sh;'    # executed at the beginning of each job
-            #APACHE_RUN_USER: 'bipaaweb'   # username known by sge
-            #APACHE_RUN_GROUP: 'bipaa'   # group known by sge
-            BASE_URL_PATH: '/sp/genus_species/blast/'
-            UID: 55914  # username known by sge (for drmaa mode only)
-            GID: 40259  # group known by sge (for drmaa mode only)
-            #JOBS_DRMAA_NATIVE: '-p web' # This line and following for slurm
-            #DRMAA_METHOD: 'slurm' # This line and following for slurm
-        volumes:
-            - ../blast-themes/abims/:/var/www/blast/app/Resources/:ro # You can theme the app
-            - /usr/local/genome2/:/usr/local/genome2/:ro # path for blast executables
-            - /db/:/db/:ro # for access to indexed blast databases
-            #- /data1/sge/:/usr/local/sge/:ro # an sge install
-            #- /xxxx/blast_jobs/:/xxxx/blast_jobs/ # (for drmaa mode only)
-            - ./blast/banks.yml:/var/www/blast/app/config/banks.yml:ro
-            - ./blast/links.yml:/etc/blast_links/links.yml:ro
-            #- /data1/slurm/slurm.conf:/etc/slurm-llnl/slurm.conf:ro # This line and following for slurm
-            #- /data1/slurm/gres.conf:/etc/slurm-llnl/gres.conf:ro
-            #- /data1/slurm/cgroup.conf:/etc/slurm-llnl/cgroup.conf:ro
-            #- /data1/slurm/slurmdbd.conf:/etc/slurm-llnl/slurmdbd.conf:ro
-            #- /data1/slurm/drmaa/:/etc/slurm-llnl/drmaa/:ro
-            #- /etc/munge/:/etc/munge/:ro
-        networks:
-            - traefikbig
-            - genus_species
-        deploy:
-          labels:
-            - "traefik.http.routers.genus_species-blast.rule=(Host(`scratchgmodv1`) && PathPrefix(`/sp/genus_species/blast`))"
-#            - "traefik.http.routers.genus_species-blast.tls=true"
-#            - "traefik.http.routers.genus_species-blast.entryPoints=webs"
-            - "traefik.http.routers.genus_species-blast.entryPoints=web" # lg
-#            - "traefik.http.routers.genus_species-blast.middlewares=sp-big-req,sp-auth,sp-app-trailslash,sp-app-prefix"
-            - "traefik.http.routers.genus_species-blast.middlewares=sp-big-req,sp-app-trailslash,sp-app-prefix" # lg
-            - "traefik.http.services.genus_species-blast.loadbalancer.server.port=80"
-          restart_policy:
-            condition: on-failure
-            delay: 5s
-            max_attempts: 3
-            window: 120s
-
-    blast-db:
-#        image: postgres:9.6-alpine
-        image: postgres:9.5
-        environment:
-            - POSTGRES_PASSWORD=postgres
-            - PGDATA=/var/lib/postgresql/data/
-        volumes:
-            - ./docker_data/blast_db/:/var/lib/postgresql/data/
-        networks:
-            - genus_species
-
-#    wiki:
-#        image: quay.io/abretaud/mediawiki
-#        environment:
-#            MEDIAWIKI_SERVER: http://localhost
-#            MEDIAWIKI_PROXY_PREFIX: /sp/genus_species/wiki
-#            MEDIAWIKI_SITENAME: Genus species
-#            MEDIAWIKI_SECRET_KEY: XXXXXXXXXX
-#            MEDIAWIKI_DB_HOST: wiki-db.genus_species
-#            MEDIAWIKI_DB_PASSWORD: password
-#            MEDIAWIKI_ADMIN_USER: abretaud   # ldap user
-#        depends_on:
-#            - wiki-db
-#        volumes:
-#            - ./docker_data/wiki_uploads:/images
-#            #- ../bipaa_wiki.png:/var/www/mediawiki/resources/assets/wiki.png:ro # To change the logo at the top left
-#        networks:
-#            - traefikbig
-#            - genus_species
-#        deploy:
-#          labels:
-#            - "traefik.http.routers.genus_species-blast.rule=(Host(`localhost`) && PathPrefix(`/sp/genus_species/blast`))"
-#            - "traefik.http.routers.genus_species-blast.tls=true"
-#            - "traefik.http.routers.genus_species-blast.entryPoints=webs"
-#            - "traefik.http.routers.genus_species-blast.middlewares=sp-big-req,sp-auth,sp-app-trailslash,sp-app-prefix"
-#            - "traefik.http.services.genus_species-blast.loadbalancer.server.port=80"
-#          restart_policy:
-#            condition: on-failure
-#            delay: 5s
-#            max_attempts: 3
-#            window: 120s
-
-#    wiki-db:
-#        image: postgres:9.6-alpine
-#        volumes:
-#            - ./docker_data/wiki_db/:/var/lib/postgresql/data/
-#        networks:
-#            - genus_species
-
-networks:
-    traefikbig:
-        external: true
-    genus_species:
-        driver: overlay
-        name: genus_species
diff --git a/templates/compose_template.yml b/templates/gspecies_compose_template.yml
similarity index 100%
rename from templates/compose_template.yml
rename to templates/gspecies_compose_template.yml
diff --git a/templates/apollo_nginx.conf b/templates/nginx_apollo_template.conf
similarity index 100%
rename from templates/apollo_nginx.conf
rename to templates/nginx_apollo_template.conf
diff --git a/templates/download_nginx.conf b/templates/nginx_download_template.conf
similarity index 100%
rename from templates/download_nginx.conf
rename to templates/nginx_download_template.conf
diff --git a/templates/traefik.yml b/templates/traefik_compose_template.yml
similarity index 100%
rename from templates/traefik.yml
rename to templates/traefik_compose_template.yml
diff --git a/utils/docker_compose_generator.py b/utils/docker_compose_generator.py
new file mode 100755
index 0000000..30747b1
--- /dev/null
+++ b/utils/docker_compose_generator.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+import os
+import argparse
+import logging
+# import yaml
+# import ruamel.yaml
+# import json
+
+"""
+docker-compose_generator.py
+
+This method will write a formatted docker-compose.yml for the specified organism (only requires genus and species)
+"""
+
+
+class DockerComposeGenerator:
+
+    def __init__(self):
+        self.mode = None
+        self.genus = None
+        self.species = None
+        self.organism_template = None
+        self.traefik_template = None
+        self.outdir = None
+
+    def generate(self):
+        if self.organism_template is None:
+            self.organism_template = str(os.getcwd() + "/templates/gspecies_compose_template.yml")
+        else:
+            with open(self.organism_template, 'r') as infile:
+                organism_content = list()
+                for line in infile:
+                    # Replace placeholders by the genus and species
+                    organism_content.append(line.replace("genus_species", str(self.genus.lower() + "_" + self.species)).replace("Genus species", str(self.genus + " " + self.species)).replace("Genus/species", str(self.genus + "/" + self.species)).replace("gspecies", str(self.genus.lower()[0] + self.species)))
+                self.write_yml(content=organism_content)
+
+        if self.traefik_template is None:
+            self.traefik_template = str(os.getcwd() + "/templates/gspecies_compose_template.yml")
+        else:
+            with open(self.traefik_template, 'r') as infile:
+                traefik_content = list()
+                for line in infile:
+                    # Replace placeholders by the genus and species
+                    traefik_content.append(line.replace("genus_species", str(self.genus.lower() + "_" + self.species)).replace("Genus species", str(self.genus + " " + self.species)).replace("Genus/species", str(self.genus + "/" + self.species)).replace("gspecies", str(self.genus.lower()[0] + self.species)))
+                self.write_yml(content=traefik_content)
+
+    def write_yml(self, content):
+        with open(self.outdir + "/docker-compose.yml", 'w') as outfile:
+            for line in content:
+                outfile.write(line)
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description="Generator of docker-compose.yml for GGA automated integration "
+                                                 "following the templates available @ "
+                                                 "https://gitlab.inria.fr/abretaud/genodock_demo/")
+    parser.add_argument("-g", "--genus", type=str, help="input genus")
+    parser.add_argument("-s", "--species", type=str, help="input species")
+    parser.add_argument("-o", "--organism-template", type=str, help="input organism template docker-compose.yml (compose or stack), optional")
+    parser.add_argument("-t", "--traefik-template", type=str, help="input organism template docker-compose.yml (compose or stack), optional")
+    parser.add_argument("-m", "--main-dir", type=str, help="where to write the output traefik docker-compose.yml (defaults to cd, autoload places it in main directory)")
+    parser.add_argument("-d", "--organism-dir", type=str, help="where to write the output organism docker-compose.yml (defaults to cd, autoload places it in organism directory)")
+    args = parser.parse_args()
+
+    dc_generator = DockerComposeGenerator()
+    dc_generator.genus = args.genus
+    dc_generator.species = args.species
+    if args.template:
+        dc_generator.template = args.template
+    dc_generator.outdir = args.outdir
+    dc_generator.generate()
diff --git a/utils/metadata_generator.py b/utils/metadata_generator.py
new file mode 100755
index 0000000..c03ff0c
--- /dev/null
+++ b/utils/metadata_generator.py
@@ -0,0 +1,36 @@
+import os
+import logging
+import yaml
+
+"""
+Metadata generator for gga_auto_load
+
+Creates a file that summarizes actions taken by the autoload script (e.g what was done in the dedicated galaxy instance)
+This organism metadata file is located in the metadata directory of the organism directory (i.e /genus_species/metadata)
+By default, will also create/update a general metadata file (located in the parent directory i.e where all the organisms
+directories are located)
+
+TODO: move inside autoload
+
+Metadata format: .yml
+"""
+
+
+class MetadataGenerator:
+
+    def __init__(self, maindir):
+        self.maindir = maindir
+        self.genus = None
+        self.species = None
+        self.metadata = None
+        self.do_update = False
+        self.date = "01/01/2020"
+
+
+    # def read_metadata(self):
+    #     for label, content in metadata.items():
+    #         print("FOO")
+
+    def write_metadata(self):
+        with open(self.maindir + "/main_metadata.yml", "a") as metadata:
+            metadata.write("\n\nAdded " + self.genus + " " + self.species + "")
\ No newline at end of file
diff --git a/workflows/phaeoexplorer_base_workflow.ga b/workflows/phaeoexplorer_base_workflow.ga
deleted file mode 100644
index 5af63fe..0000000
--- a/workflows/phaeoexplorer_base_workflow.ga
+++ /dev/null
@@ -1 +0,0 @@
-{"uuid": "7ebc1035-728c-4bca-a1c3-abd1c01bc064", "tags": [], "format-version": "0.1", "name": "preset_workflow", "version": 1, "steps": {"0": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "6956ef7f-7fec-402b-a8ea-f054a819f351", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 0, "uuid": "74f22d9b-e764-45e4-b0eb-579c9b647ea0", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 343.433349609375, "left": 201.33331298828125}, "annotation": "", "content_id": null, "type": "data_input"}, "1": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "efc230d5-5570-4446-b56b-c0213bef9ef0", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 1, "uuid": "6c1a20fa-828a-404c-b107-76fb8ddf3954", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 340.41668701171875, "left": 334.816650390625}, "annotation": "", "content_id": null, "type": "data_input"}, "2": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "90864336-6fc2-49fa-8f16-ccf11c64dc9a", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 2, "uuid": "1d25f54c-7575-4c8d-be55-73dd7e58613f", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 340.41668701171875, "left": 467.6333312988281}, "annotation": "", "content_id": null, "type": "data_input"}, "3": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "9e3d04a8-20f6-4f20-bfac-5a8b7df54557", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 3, "uuid": "89e7487e-004d-4db1-b5eb-1676b98aebde", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 337.6166687011719, "left": 600.4166717529297}, "annotation": "", "content_id": null, "type": "data_input"}, "4": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.2", "tool_version": "2.3.2", "outputs": [{"type": "json", "name": "results"}], "workflow_outputs": [], "input_connections": {"fasta": {"output_name": "output", "id": 0}, "wait_for": {"output_name": "output", "id": 0}}, "tool_state": "{\"do_update\": \"\\\"false\\\"\", \"relationships\": \"{\\\"__current_case__\\\": 0, \\\"rel_type\\\": \\\"none\\\"}\", \"ext_db\": \"{\\\"db\\\": \\\"\\\", \\\"re_db_accession\\\": \\\"\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"re_uniquename\": \"\\\"\\\"\", \"match_on_name\": \"\\\"false\\\"\", \"__page__\": null, \"__rerun_remap_job_id__\": null, \"psql_target\": \"{\\\"__current_case__\\\": 0, \\\"method\\\": \\\"remote\\\"}\", \"re_name\": \"\\\"\\\"\", \"fasta\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"organism\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"sequence_type\": \"\\\"contig\\\"\"}", "id": 4, "tool_shed_repository": {"owner": "gga", "changeset_revision": "1421dbc33a92", "name": "chado_feature_load_fasta", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "f3655d26-08b8-408e-bfef-6e8a4aaab355", "errors": null, "name": "Chado load fasta", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Chado load fasta"}, {"name": "organism", "description": "runtime parameter for tool Chado load fasta"}], "position": {"top": 303.58331298828125, "left": 745.2333374023438}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_fasta/feature_load_fasta/2.3.2", "type": "tool"}, "5": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.2", "tool_version": "2.3.2", "outputs": [{"type": "txt", "name": "results"}], "workflow_outputs": [], "input_connections": {"fasta": {"output_name": "output", "id": 1}, "wait_for": {"output_name": "results", "id": 4}, "gff": {"output_name": "output", "id": 2}}, "tool_state": "{\"prot_naming\": \"{\\\"__current_case__\\\": 1, \\\"method\\\": \\\"regex\\\", \\\"re_protein\\\": \\\"protein\\\\\\\\1\\\", \\\"re_protein_capture\\\": \\\"^mRNA(\\\\\\\\..+)$\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"__page__\": null, \"gff\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"__rerun_remap_job_id__\": null, \"no_seq_compute\": \w"\\\"false\\\"\", \"psql_target\": \"{\\\"__current_case__\\\": 0, \\\"method\\\": \\\"remote\\\"}\", \"add_only\": \"\\\"false\\\"\", \"fasta\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"organism\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"landmark_type\": \"\\\"contig\\\"\"}", "id": 5, "tool_shed_repository": {"owner": "gga", "changeset_revision": "fb0651ee6d33", "name": "chado_feature_load_gff", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "236254d3-121e-4910-bcba-146d208a59a5", "errors": null, "name": "Chado load gff", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Chado load gff"}, {"name": "organism", "description": "runtime parameter for tool Chado load gff"}], "position": {"top": 285.20001220703125, "left": 957.2333374023438}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/chado_feature_load_gff/feature_load_gff/2.3.2", "type": "tool"}, "6": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "txt", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 5}}, "tool_state": "{\"__page__\": null, \"__rerun_remap_job_id__\": null, \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"organism_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", "id": 6, "tool_shed_repository": {"owner": "gga", "changeset_revision": "afd5d92745fb", "name": "tripal_organism_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "05314408-41fa-4a2f-8aae-3988e2d899f6", "errors": null, "name": "Synchronize an organism", "post_job_actions": {}, "label": null, "inputs": [{"name": "organism_id", "description": "runtime parameter for tool Synchronize an organism"}], "position": {"top": 322, "left": 1168}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_organism_sync/organism_sync/3.2.1.0", "type": "tool"}, "7": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "json", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 6}}, "tool_state": "{\"__page__\": null, \"__rerun_remap_job_id__\": null, \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", "id": 7, "tool_shed_repository": {"owner": "gga", "changeset_revision": "f487ff676088", "name": "tripal_analysis_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "44c7cc7c-0848-47a7-872c-351f057803c1", "errors": null, "name": "Synchronize an analysis", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Synchronize an analysis"}], "position": {"top": 323.58331298828125, "left": 1375.63330078125}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "type": "tool"}, "8": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "json", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 7}}, "tool_state": "{\"__page__\": null, \"__rerun_remap_job_id__\": null, \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\", \"analysis_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", "id": 8, "tool_shed_repository": {"owner": "gga", "changeset_revision": "f487ff676088", "name": "tripal_analysis_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "8ce8c990-39ce-4725-892b-4216a75f487d", "errors": null, "name": "Synchronize an analysis", "post_job_actions": {}, "label": null, "inputs": [{"name": "analysis_id", "description": "runtime parameter for tool Synchronize an analysis"}], "position": {"top": 321.20001220703125, "left": 1583.63330078125}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_analysis_sync/analysis_sync/3.2.1.0", "type": "tool"}, "9": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0", "tool_version": "3.2.1.0", "outputs": [{"type": "txt", "name": "results"}], "workflow_outputs": [], "input_connections": {"wait_for": {"output_name": "results", "id": 8}}, "tool_state": "{\"__page__\": null, \"repeat_types\": \"[{\\\"__index__\\\": 0, \\\"types\\\": \\\"mRNA\\\"}, {\\\"__index__\\\": 1, \\\"types\\\": \\\"popylpeptide\\\"}]\", \"__rerun_remap_job_id__\": null, \"organism_id\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"repeat_ids\": \"[]\", \"wait_for\": \"{\\\"__class__\\\": \\\"ConnectedValue\\\"}\"}", "id": 9, "tool_shed_repository": {"owner": "gga", "changeset_revision": "64e36c3f0dd6", "name": "tripal_feature_sync", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "04600903-dd16-4db1-b562-552aeb003e6c", "errors": null, "name": "Synchronize features", "post_job_actions": {}, "label": null, "inputs": [{"name": "organism_id", "description": "runtime parameter for tool Synchronize features"}], "position": {"top": 321.20001220703125, "left": 1794.0333251953125}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/tripal_feature_sync/feature_sync/3.2.1.0", "type": "tool"}}, "annotation": "", "a_galaxy_workflow": "true"}
\ No newline at end of file
diff --git a/workflows/phaeoexplorer_jbrowse_workflow.ga b/workflows/phaeoexplorer_jbrowse_workflow.ga
deleted file mode 100644
index 787bce9..0000000
--- a/workflows/phaeoexplorer_jbrowse_workflow.ga
+++ /dev/null
@@ -1 +0,0 @@
-{"uuid": "e904df94-5e7a-47c1-a4d6-712822694860", "tags": [], "format-version": "0.1", "name": "jbrowse", "version": 24, "steps": {"0": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "b5801dd3-b710-420d-96ca-bd7201f03ff4", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 0, "uuid": "e7c12b60-558b-490f-b581-701d38dc1bf1", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 204.5, "left": 277.5}, "annotation": "", "content_id": null, "type": "data_input"}, "1": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "68f656ab-1b09-4642-b836-7d27ed7e0233", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 1, "uuid": "e0908a52-62a2-4d1f-ae99-eb2906550cf1", "errors": null, "name": "Input dataset", "label": null, "inputs": [], "position": {"top": 271.5, "left": 277.5}, "annotation": "", "content_id": null, "type": "data_input"}, "2": {"tool_id": "toolshed.g2.bx.psu.edu/repos/iuc/jbrowse/jbrowse/1.16.5+galaxy7", "tool_version": "1.16.5+galaxy7", "outputs": [{"type": "html", "name": "output"}], "workflow_outputs": [{"output_name": "output", "uuid": "14a463ef-f88b-42f2-bef3-d0e1fc0c9cd3", "label": null}], "input_connections": {"track_groups_0|data_tracks_0|data_format|annotation": {"output_name": "output", "id": 1}, "reference_genome|genome": {"output_name": "output", "id": 0}}, "tool_state": "{\"__page__\": null, \"standalone\": \"\\\"true\\\"\", \"__rerun_remap_job_id__\": null, \"reference_genome\": \"{\\\"__current_case__\\\": 1, \\\"genome\\\": {\\\"__class__\\\": \\\"ConnectedValue\\\"}, \\\"genome_type_select\\\": \\\"history\\\"}\", \"track_groups\": \"[{\\\"__index__\\\": 0, \\\"category\\\": \\\"Annotation\\\", \\\"data_tracks\\\": [{\\\"__index__\\\": 0, \\\"data_format\\\": {\\\"__current_case__\\\": 2, \\\"annotation\\\": {\\\"__class__\\\": \\\"ConnectedValue\\\"}, \\\"data_format_select\\\": \\\"gene_calls\\\", \\\"index\\\": \\\"false\\\", \\\"jb_custom_config\\\": {\\\"option\\\": []}, \\\"jbcolor_scale\\\": {\\\"color_score\\\": {\\\"__current_case__\\\": 0, \\\"color\\\": {\\\"__current_case__\\\": 0, \\\"color_select\\\": \\\"automatic\\\"}, \\\"color_score_select\\\": \\\"none\\\"}}, \\\"jbmenu\\\": {\\\"track_menu\\\": [{\\\"__index__\\\": 0, \\\"menu_action\\\": \\\"iframeDialog\\\", \\\"menu_icon\\\": \\\"dijitIconBookmark\\\", \\\"menu_label\\\": \\\"View transcript report\\\", \\\"menu_title\\\": \\\"Transcript {id}\\\", \\\"menu_url\\\": \\\"http://localhost/sp/undaria_pinnatifida/feature/Undaria/pinnatifida/mRNA/{id}\\\"}]}, \\\"jbstyle\\\": {\\\"max_height\\\": \\\"600\\\", \\\"style_classname\\\": \\\"transcript\\\", \\\"style_description\\\": \\\"note,description\\\", \\\"style_height\\\": \\\"10px\\\", \\\"style_label\\\": \\\"product,name,id\\\"}, \\\"match_part\\\": {\\\"__current_case__\\\": 1, \\\"match_part_select\\\": \\\"false\\\"}, \\\"override_apollo_drag\\\": \\\"False\\\", \\\"override_apollo_plugins\\\": \\\"False\\\", \\\"track_config\\\": {\\\"__current_case__\\\": 3, \\\"html_options\\\": {\\\"topLevelFeatures\\\": \\\"\\\"}, \\\"track_class\\\": \\\"NeatHTMLFeatures/View/Track/NeatFeatures\\\"}, \\\"track_visibility\\\": \\\"default_off\\\"}}]}]\", \"plugins\": \"{\\\"BlastView\\\": \\\"true\\\", \\\"ComboTrackSelector\\\": \\\"false\\\", \\\"GCContent\\\": \\\"false\\\"}\", \"action\": \"{\\\"__current_case__\\\": 0, \\\"action_select\\\": \\\"create\\\"}\", \"gencode\": \"\\\"1\\\"\", \"jbgen\": \"{\\\"aboutDescription\\\": \\\"\\\", \\\"defaultLocation\\\": \\\"\\\", \\\"hideGenomeOptions\\\": \\\"false\\\", \\\"shareLink\\\": \\\"true\\\", \\\"show_menu\\\": \\\"true\\\", \\\"show_nav\\\": \\\"true\\\", \\\"show_overview\\\": \\\"true\\\", \\\"show_tracklist\\\": \\\"true\\\", \\\"trackPadding\\\": \\\"20\\\"}\", \"uglyTestingHack\": \"\\\"\\\"\"}", "id": 2, "tool_shed_repository": {"owner": "iuc", "changeset_revision": "edb534491f92", "name": "jbrowse", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "2bcc764a-5827-4fbe-a51e-9f0a49d3319b", "errors": null, "name": "JBrowse", "post_job_actions": {}, "label": null, "inputs": [], "position": {"top": 189.5, "left": 540.5}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/iuc/jbrowse/jbrowse/1.16.5+galaxy7", "type": "tool"}, "3": {"tool_id": "toolshed.g2.bx.psu.edu/repos/gga/jbrowse_to_container/jbrowse_to_container/0.5.1", "tool_version": "0.5.1", "outputs": [{"type": "html", "name": "output"}], "workflow_outputs": [{"output_name": "output", "uuid": "ce9cf789-8543-49db-a68c-44000f3fa671", "label": null}], "input_connections": {"organisms_0|jbrowse": {"output_name": "output", "id": 2}}, "tool_state": "{\"__page__\": null, \"__rerun_remap_job_id__\": null, \"organisms\": \"[{\\\"__index__\\\": 0, \\\"advanced\\\": {\\\"unique_id\\\": \\\"UNIQUEID\\\"}, \\\"jbrowse\\\": {\\\"__class__\\\": \\\"RuntimeValue\\\"}, \\\"name\\\": \\\"NAME\\\"}]\"}", "id": 3, "tool_shed_repository": {"owner": "gga", "changeset_revision": "11033bdad2ca", "name": "jbrowse_to_container", "tool_shed": "toolshed.g2.bx.psu.edu"}, "uuid": "7029b830-6e02-4a9b-8fa9-68e77c46cbe7", "errors": null, "name": "Add organisms to JBrowse container", "post_job_actions": {}, "label": null, "inputs": [], "position": {"top": 207.5, "left": 874}, "annotation": "", "content_id": "toolshed.g2.bx.psu.edu/repos/gga/jbrowse_to_container/jbrowse_to_container/0.5.1", "type": "tool"}}, "annotation": "", "a_galaxy_workflow": "true"}
\ No newline at end of file
-- 
GitLab