From 7259d53cc417c1eeaa68c43da859837d7e1eda73 Mon Sep 17 00:00:00 2001
From: Nikolaus Krismer <nikolaus.krismer@uibk.ac.at>
Date: Sat, 19 Dec 2015 09:56:00 +0100
Subject: [PATCH] allowing multiple target_srids now

---
 CHANGELOG.md                    |  1 +
 bootstrap/config.sh             |  6 ++++-
 bootstrap/importData.sh         | 48 +++++++++++++++++++--------------
 bootstrap/prepareEnvironment.sh |  2 +-
 4 files changed, 35 insertions(+), 22 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index ca4ae5e..64f76c4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,6 @@
 Upcoming version:
 -----------------
+  - allowing multiple target_srids now (Nikolaus Krismer)
   - added information about srid 82344 (Nikolaus Krismer)
   - updated geoserver and geoserver-shell (Nikolaus Krismer)
   - gradle version upgrade (Nikolaus Krismer)
diff --git a/bootstrap/config.sh b/bootstrap/config.sh
index de7ac82..0ea1d2e 100755
--- a/bootstrap/config.sh
+++ b/bootstrap/config.sh
@@ -7,7 +7,11 @@ IMPORT_DATA_NEO4J_SPATIAL=true
 IMPORT_DATA_POSTGIS=true
 IMPORT_DATA_SPATIALITE=true
 
-TARGET_SRID=3857
+# Comma separated list of srids for which datasets will be generated
+# This is useful when creating multiple datasets (when creating/updating data from scratch)
+# Note that only the last SRID in the list will be kept in the database, all the others will be overwritten
+#TARGET_SRID="4326,3857"
+TARGET_SRID="3857"
 UPDATE_DATA=false
 
 CACHE_DIR_LOCAL="/tmp/vagrant-cache"
diff --git a/bootstrap/importData.sh b/bootstrap/importData.sh
index 927928f..81bbe5a 100755
--- a/bootstrap/importData.sh
+++ b/bootstrap/importData.sh
@@ -67,11 +67,12 @@ function fn_import_dataset() {
 	local OSM_FILE="$2"
 	local OSM_FILE_FILTERED="$3"
 	local BOUNDING="$4"
-	local SQL_EXPORT_FILE="${NAME,,}_export_${TARGET_SRID}.sql.gz"
+	local SRID="$5"
+	local SQL_EXPORT_FILE="${NAME,,}_export_${SRID}.sql.gz"
 
-	echo "Importing data for region of $NAME"
+	echo "Importing data for region of $NAME (EPSG:${SRID})"
 	echo "  - importing into postgis"
-	fn_import_dataset_postgis "$NAME" "$SQL_EXPORT_FILE"
+	fn_import_dataset_postgis "$NAME" "$SRID" "$SQL_EXPORT_FILE"
 
 	if $IMPORT_DATA_SPATIALITE; then
 		echo "  - importing into neo4j"
@@ -92,7 +93,8 @@ function fn_import_dataset_neo4j_spatial() {
 
 function fn_import_dataset_postgis() {
 	local NAME="$1"
-	local SQL_EXPORT_FILE="$2"
+	local SRID="$2"
+	local SQL_EXPORT_FILE="$3"
 
 	local CITY=${NAME// /}
 	CITY=${CITY,,}
@@ -122,7 +124,7 @@ function fn_import_dataset_postgis() {
 	if $AVAILABLE_ISOCHRONE_DATAMODEL && [ ! -f "$DATA_DIR/$SQL_EXPORT_FILE" ]; then
 		echo "  - creating datamodel using isochrone-datamodel"
 		# Create datamodel using isochrone-datamodel project
-		DB_USERNAME="$PG_DB_USER" DB_PASSWORD="$PG_DB_PASSWORD" "$DEPLOY_DIR/isochrone-datamodel/builder.sh" -d -s -b -l -t${TARGET_SRID} -c${CITY} >> "$WORKING_DIR/create_datamodel_$CITY.log" 2>&1
+		DB_USERNAME="$PG_DB_USER" DB_PASSWORD="$PG_DB_PASSWORD" "$DEPLOY_DIR/isochrone-datamodel/builder.sh" -d -s -b -l -t${SRID} -c${CITY} >> "$WORKING_DIR/create_datamodel_$CITY.log" 2>&1
 
 		echo "  - copying tables to isochrone database"
 		PGPASSWORD="spatial" pg_dump -U spatial -h localhost -p 5432 -x -O -d spatial -t "transformed.${CITY}_*"  | sed -e "s/transformed/public/g" | PGPASSWORD="$PG_DB_PASSWORD" psql -h localhost  -U "$PG_DB_USER" -p 5432 -d isochrone >> "$WORKING_DIR/create_datamodel_$CITY.log" 2>&1
@@ -180,7 +182,9 @@ function fn_init_geoserver() {
 }
 
 function fn_import_spatialite() {
-	SPATIALITE_FILENAME="isochrone_${TARGET_SRID}.spatialite"
+	local SRID="$1"
+
+	SPATIALITE_FILENAME="isochrone_${SRID}.spatialite"
 	if ! $UPDATE_DATA; then
 		echo "Importing spatialite database"
 		fn_download_newer $DOWNLOAD_DIR/$SPATIALITE_FILENAME $CACHE_DIR_REMOTE_DATA/$SPATIALITE_FILENAME
@@ -220,28 +224,32 @@ mkdir -p $DOWNLOAD_DIR
 mkdir -p $WORKING_DIR
 cd "$WORKING_DIR"
 
-# Import real world datasets
-fn_import_dataset "Bolzano" "$OSM_FILE_ITALY" "$OSM_FILE_BOLZANO_FILTERED" "$OSM_BOUNDING_BOLZANO"
-fn_import_dataset "Innsbruck" "$OSM_FILE_AUSTRIA" "$OSM_FILE_INNSBRUCK_FILTERED" "$OSM_BOUNDING_INNSBRUCK"
-fn_import_dataset "Salzburg" "$OSM_FILE_AUSTRIA" "$OSM_FILE_SALZBURG_FILTERED" "$OSM_BOUNDING_SALZBURG"
-fn_import_dataset "SanFrancisco" "$OSM_FILE_CALIFORNIA" "$OSM_FILE_SANFRANCISCO_FILTERED" "$OSM_BOUNDING_SANFRANCISCO"
-#if ! $IS_LOCAL_TEST_DEPLOY; then
-#	fn_import_dataset "Italy" "$OSM_FILE_ITALY" "$OSM_FILE_ITALY_FILTERED" "$OSM_BOUNDING_ITALY"
-#	fn_import_dataset "TrentoAltoAdige" "$OSM_FILE_ITALY" "$OSM_FILE_TRENTOALTOADIGE_FILTERED" "$OSM_BOUNDING_TRENTOALTOADIGE"
-#fi
-
 # Generate synthetic datasets (and densities for them) -> they are always created from scratch (never cached)
 if $AVAILABLE_POSTGIS && $AVAILABLE_ISOCHRONE_TOOLS; then
+	echo "Importing data for synthetic networks"
 	java -cp $DOWNLOAD_DIR/isochrone-tools.jar at.uibk.dbis.isochrone.generator.GridNetworkGenerator -d 100 -l 60 > "$WORKING_DIR/generate_gridNetwork.log" 2>&1
 	java -cp $DOWNLOAD_DIR/isochrone-tools.jar at.uibk.dbis.isochrone.generator.SpiderNetworkGenerator -d 6 -lvl 1000 -l 60 > "$WORKING_DIR/generate_spiderNetwork.log" 2>&1
 	java -cp $DOWNLOAD_DIR/isochrone-tools.jar at.uibk.dbis.isochrone.generator.density.DensityGenerator -t grid_s100 -d 60,120,180,240,300 >> "$WORKING_DIR/generate_gridNetwork.log" 2>&1
 	java -cp $DOWNLOAD_DIR/isochrone-tools.jar at.uibk.dbis.isochrone.generator.density.DensityGenerator -t spider_l1000 -d 60,120,180,240,300 >> "$WORKING_DIR/generate_spiderNetwork.log" 2>&1
 fi
 
-# Not importing datasets one-by-one into spatialite -> we copy data from postgis (after postgis data import)
-if $IMPORT_DATA_SPATIALITE; then
-	fn_import_spatialite
-fi
+SRID_ARR=(${TARGET_SRID//,/ })
+for CURRENT_SRID in "${SRID_ARR[@]}"; do
+	# Import real world datasets
+	fn_import_dataset "Bolzano" "$OSM_FILE_ITALY" "$OSM_FILE_BOLZANO_FILTERED" "$OSM_BOUNDING_BOLZANO" "$CURRENT_SRID"
+	fn_import_dataset "Innsbruck" "$OSM_FILE_AUSTRIA" "$OSM_FILE_INNSBRUCK_FILTERED" "$OSM_BOUNDING_INNSBRUCK" "$CURRENT_SRID"
+	fn_import_dataset "Salzburg" "$OSM_FILE_AUSTRIA" "$OSM_FILE_SALZBURG_FILTERED" "$OSM_BOUNDING_SALZBURG" "$CURRENT_SRID"
+	fn_import_dataset "SanFrancisco" "$OSM_FILE_CALIFORNIA" "$OSM_FILE_SANFRANCISCO_FILTERED" "$OSM_BOUNDING_SANFRANCISCO" "$CURRENT_SRID"
+#	if ! $IS_LOCAL_TEST_DEPLOY; then
+#		fn_import_dataset "Italy" "$OSM_FILE_ITALY" "$OSM_FILE_ITALY_FILTERED" "$OSM_BOUNDING_ITALY"
+#		fn_import_dataset "TrentoAltoAdige" "$OSM_FILE_ITALY" "$OSM_FILE_TRENTOALTOADIGE_FILTERED" "$OSM_BOUNDING_TRENTOALTOADIGE"
+#	fi
+	
+	# Not importing datasets one-by-one into spatialite -> we copy data from postgis (after postgis data import)
+	if $IMPORT_DATA_SPATIALITE; then
+		fn_import_spatialite "$CURRENT_SRID"
+	fi
+done
 
 # After data import we have to initialize geoserver layers (if geoserver is used)
 if $AVAILABLE_GEOSERVER; then
diff --git a/bootstrap/prepareEnvironment.sh b/bootstrap/prepareEnvironment.sh
index 1039bdf..6cb2ef5 100755
--- a/bootstrap/prepareEnvironment.sh
+++ b/bootstrap/prepareEnvironment.sh
@@ -620,7 +620,7 @@ if $INSTALL_NEO4J_SPATIAL; then
 	cd $DEPLOY_DIR/neo4j-spatial
 	git checkout -q $NEO4J_SPATIAL_VERSION >> /dev/null 2>&1
 
-	echo "  - compilinh neo4j-spatial (from source)"
+	echo "  - compiling neo4j-spatial (from source)"
 	mvn clean install -DskipTests >> $WORKING_DIR/install_neo4j_spatial.log 2>&1
 
 	if $INSTALL_NEO4J; then
-- 
GitLab