diff --git a/canto_weekly.service b/canto_weekly.service
index b8541b70ea1fcdd4b3b85465f59d43055bc924a0..834435bfbd88d8fe85e2ff7108b5f32f402c399b 100644
--- a/canto_weekly.service
+++ b/canto_weekly.service
@@ -4,8 +4,8 @@ After=network-online.target
 Wants=network-online.target
 
 [Service]
-WorkingDirectory=/data/export/canto/canto
-ExecStart=/data/export/canto/canto/jwrn3_test.sh
+WorkingDirectory=/data/export/canto-space/canto
+ExecStart=/data/export/canto-space/canto/weekly_routine.sh
 
 [Install]
 WantedBy=default.target
\ No newline at end of file
diff --git a/weekly_routine.sh b/weekly_routine.sh
index 16b40fec0cf06fe0c85fa8561a4110e2c8bea3ee..e6c625e4f3d53ee29c9d04aed79e4c4868416191 100644
--- a/weekly_routine.sh
+++ b/weekly_routine.sh
@@ -1,8 +1,14 @@
-#!/bin/sh
-#weekly routine on Sunday pm /Monday am. This script should be run from /canto-space/
-# JWRN
-# /canto-space/ == /data/export/canto/canto
-# vt comments2: /canto-space/ == /data/export/canto-space
+#!/bin/bash
+
+# weekly routine on Sunday pm /Monday am. This script should be run from /data/export/canto-space
+
+# REMOVE LINES 5-8
+## JWRN
+## /canto-space/ == /data/export/canto/canto
+## vt comments2: /canto-space/ == /data/export/canto-space
+
+# Define canto-space root
+CANTOSPACE="/data/export/canto-space"
 
 # Define log file
 LOGFILE="/var/log/canto_weekly.log"
@@ -12,25 +18,25 @@ LOGFILE="/var/log/canto_weekly.log"
 # generates something like
 # 2020-05-20 10:24:37: Canto script completed successfully
 
-function log (){
+function log () {
     DATESTAMP="$(date +"%Y-%m-%d %H:%M:%S")"
     /bin/echo "${DATESTAMP}: ${1}" >> "${LOGFILE}"
 }
 
 # copy/update of ontologies from /data/export/curfiles/ontologies/trunk/ into 'canto-space/import_export'
-## gm comment: I don't really know how rsync works so I didn't know which options to choose (so didn't try to!) and the syntax may not be correct, but the first path should be OK if Canto is installed on the current vm.
-## vt comment: the second path should be to /canto-space/import_export/
+# GM comment: I don't really know how rsync works so I didn't know which options to choose (so didn't try to!) and the syntax may not be correct, but the first path should be OK if Canto is installed on the current vm.
+# VT comment: the second path should be to /canto-space/import_export/
+# JWRN comment: Done
 
 function update_obo_file()
 {
 	FILE_NAME=${1}
 
-# first, check that the FILE exists and svn update if so
-
-	if [ -e "/data/export/curfiles/ontologies/trunk/${FILE_NAME}" ] ; then
+	# first, check that the FILE exists and svn update if so
+	if [[ -e "/data/export/curfiles/ontologies/trunk/${FILE_NAME}" ]] ; then
 
-		log "Updating ${FILE_NAME} ..."
-		/usr/bin/rsync /data/export/curfiles/ontologies/trunk/${FILE_NAME} ./import_export/${FILE_NAME}
+		log "Updating ${FILE_NAME}..."
+		/usr/bin/rsync /data/export/curfiles/ontologies/trunk/${FILE_NAME} "${CANTOSPACE}/import_export/${FILE_NAME}"
 
 	else
 
@@ -47,25 +53,31 @@ done
 
 
 #replace merged ontology and reload all ontologies to Canto
-##vt comment: Ideally, add the following 'if' routine . If hard to implement, remove the 'if' routine and make the three commands run by default
-##vt comments2: the 'if' loop here may save significant time - takes 20min on my local vm
-## JWRN comment: how do we know an ontology has been changed? Is there a piece of information we can write out and read back in?
-##vt comments2: response to JWRN's question above: as these files will not be necessarily updated every week, the simpler way would be to check the timestamp. If no changes in the last, say, 24h, then you can assume no change has been made
-## JWRN comment: may be easier to just update whatever and improve in time
-
-if <any ontology has been changed>, then
+# VT comment: Ideally, add the following 'if' routine . If hard to implement, remove the 'if' routine and make the three commands run by default
+# VT comments2: the 'if' loop here may save significant time - takes 20min on my local vm
+# JWRN comment: how do we know an ontology has been changed? Is there a piece of information we can write out and read back in?
+# VT comments2: response to JWRN's question above: as these files will not be necessarily updated every week, the simpler way would be to check the timestamp. If no changes in the last, say, 24h, then you can assume no change has been made
+# JWRN comment: may be easier to just update whatever and improve in time.
+# JWRN comment: which file(s) need datestamp checking? 
+# JWRN comment: test routine example
+#if [[ $(find "${FILENAME}" -mtime +1 -print) ]]; then
+#    echo "${FILENAME} not changed"
+#else
+#    echo "Modified time less than 24h."
+#	 Run commands below here 
+#fi
+# JWRN comment: commenting out test for the moment.
 
 # redo/replace merged FBbt-GO.obo ontology
-		sh ./FBbt-GO_routine/FBbt-GO_routine.sh
+sh ./FBbt-GO_routine/FBbt-GO_routine.sh
+
 # replace extension_config.tsv
-		sh ./extension_config-Rscript/list-to-extension_config.sh
+sh ./extension_config-Rscript/list-to-extension_config.sh
+
 # reload the ontologies	and extension configuration
-		sudo ./canto/script/canto_docker ./script/canto_load.pl --process-extension-config --ontology  /import_export/FBbt-GO_test2.obo --ontology    /import_export/fly_development.obo --ontology   /import_export/flybase_controlled_vocabulary.obo
+# JWRN comment: I suspect the sudo here is superfluous as the script is running as root so removed
+./canto/script/canto_docker ./script/canto_load.pl --process-extension-config --ontology  /import_export/FBbt-GO_test2.obo --ontology    /import_export/fly_development.obo --ontology   /import_export/flybase_controlled_vocabulary.obo
 	
-fi
-
-# JWRN additions
-
 # Function to retry command until sucessful with max number of attempts
 function retry {
 
@@ -76,7 +88,8 @@ function retry {
     local max=5
 
     # Sleep period in seconds until retry
-    local delay=2
+	# 600 = 10 mins
+    local delay=600
 
     # Loop 
     while true; do
@@ -85,16 +98,16 @@ function retry {
 		"${@}" && break || {
 
 			# Else loop while attempt no is less than max
-			if [[ "${n}" -lt "${max}" ]]; then
+			if [ "${n}" -lt "${max}" ]; then
 
 				# Increment attempt counter
 				((n++))
 
 				# log status
-				log "Command failed. Attempt ${n}/${max}"
+				log "File retrieval failed. Attempt ${n}/${max}"
 
 				# Sleep for period
-				sleep ${delay}
+				sleep "${delay}"
 			else
 				# Hit max attempts and still failed so giving up
 				log "${@} failed after ${n} attempts."
@@ -107,7 +120,6 @@ function retry {
 MARKERFILE="./canto_done"
 
 # Use retry function to pull marker file from deneb
-# JWRN comment: requires ssh keys be setup for root to fbadmin
 retry /usr/bin/scp fbadmin@deneb.pdb.cam.ac.uk:instance/canto_done "${MARKERFILE}"
 
 # Get DBNAME from downloaded file
@@ -119,7 +131,7 @@ else
 fi
 
 # Check DBNAME is not blank
-if [[ -z ${BDNAME }]]; then
+if [[ -z ${BDNAME } ]]; then
     log "${DBNAME} is blank, cannot continue"
     exit 1
 fi
@@ -134,36 +146,39 @@ else
 	exit 1
 fi
 
-
-#data import (using Gillian's scripts in the vm - see point 7.d in https://docs.google.com/document/d/19C-J8sJmZb_OSluxyzBWJxUkdR_N4sIpgjHI7u5pp0I/edit)
-### gm comment: the following 'if' command should work
-### run the script to generate new information into canto ONLY if the fbrf_input_list.tsv file exists
+# data import (using Gillian's scripts in the vm - see point 7.d in https://docs.google.com/document/d/19C-J8sJmZb_OSluxyzBWJxUkdR_N4sIpgjHI7u5pp0I/edit)
+# GM comment: the following 'if' command should work
+# run the script to generate new information into canto ONLY if the fbrf_input_list.tsv file exists
 
 # make fbrf_input_list.tsv (list of newly thin-curated papers)
 # if there are no new papers to add in a particular week, the output file,
 # fbrf_input_list.tsv, will be empty
-/usr/bin/perl /data/export/support_scripts/get_fbrfs_to_add_to_canto.pl /data/export/support_scripts/modules_server.cfg > fbrf_input_list.tsv
-
 
-if [ -e "./fbrf_input_list.tsv" ] ; then # test for not empty
+CURATEDPAPERLISTFILE="fbrf_input_list.tsv"
+/usr/bin/perl /data/export/support_scripts/get_fbrfs_to_add_to_canto.pl /data/export/support_scripts/modules_server.cfg > "${CURATEDPAPERLISTFILE}"
 
-# make the json input file
-	/usr/bin/perl /data/export/support_scripts/canto_json_input_maker.pl /data/export/support_scripts/modules_server.cfg ./fbrf_input_list.tsv > ./import-fbrfs.json
+# Test fbrf_input_list.tsv exists and isn't empty
+if [[ ( -e "${CURATEDPAPERLISTFILE}" ) && ( -s "${CURATEDPAPERLISTFILE}" ) ]] ; then
 
-##vt comments2: this next step make take some time, depending on the amount of new data
-# load the json file into canto
-	sudo ./canto/script/canto_docker ./script/canto_add.pl --sessions-from-json ./import-fbrfs.json vmt25@cam.ac.uk 7227
+	# make the json input file
+	/usr/bin/perl /data/export/support_scripts/canto_json_input_maker.pl /data/export/support_scripts/modules_server.cfg "${CURATEDPAPERLISTFILE}" > ./import-fbrfs.json
 
-### remove the fbrf_input_list.tsv file once done, so that the script doesn't try to add the same information again next time its run
-	/bin/rm ./fbrf_input_list.tsv
+	# load the json file into canto
+	# VT comment: this next step make take some time, depending on the amount of new data
+	# JWRN comment: again I suspect the sudo is superfluous so removed
+	./canto/script/canto_docker ./script/canto_add.pl --sessions-from-json ./import-fbrfs.json vmt25@cam.ac.uk 7227
 
+	# remove the fbrf_input_list.tsv file once done, so that the script doesn't try to add the same information again next time its run
+	/bin/rm "${CURATEDPAPERLISTFILE}"
+else
+	log "Not loading import-fbrfs.json into Canto. Either ${CURATEDPAPERLISTFILE} does not exist or is empty..."
 fi
 
 
-#reset cache (restart memcached)
+# Reset cache (restart memcached)
 /etc/init.d/memcached restart
 
-#canto restart
+# Canto restart
 /etc/init.d/canto restart