summaryrefslogtreecommitdiff
path: root/metadata/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'metadata/scripts')
-rwxr-xr-xmetadata/scripts/aggregate.sh31
-rwxr-xr-xmetadata/scripts/cert-retrieve.sh8
-rwxr-xr-xmetadata/scripts/create_mxml.sh81
-rwxr-xr-xmetadata/scripts/expiration_date.pl11
-rwxr-xr-xmetadata/scripts/get-metadata.sh111
-rw-r--r--metadata/scripts/now_date.pl7
-rwxr-xr-xmetadata/scripts/pull-and-verify.sh50
-rwxr-xr-xmetadata/scripts/remove_entity.sh65
-rwxr-xr-xmetadata/scripts/rename-xml.sh44
-rw-r--r--metadata/scripts/unique_id.pl7
-rwxr-xr-xmetadata/scripts/update-and-validate.sh38
-rwxr-xr-xmetadata/scripts/update-cron.sh27
-rwxr-xr-xmetadata/scripts/update.sh28
-rw-r--r--metadata/scripts/urls2hostport.py6
-rwxr-xr-xmetadata/scripts/verify-uiinfo-urls.sh16
-rwxr-xr-xmetadata/scripts/weekly-routines-metadata.sh8
16 files changed, 538 insertions, 0 deletions
diff --git a/metadata/scripts/aggregate.sh b/metadata/scripts/aggregate.sh
new file mode 100755
index 00000000..35ac1a7b
--- /dev/null
+++ b/metadata/scripts/aggregate.sh
@@ -0,0 +1,31 @@
+#!/bin/sh
+
+aggregate_interval_min=60
+
+DIR=`pwd`
+ODIR=$1
+
+last_aggregate_ts=$ODIR/last_aggregate.ts
+
+if find $last_aggregate_ts -mmin -$aggregate_interval_min 2>/dev/null | grep -q . ; then
+ exit 0
+fi
+
+rm -f $ODIR/*.xml
+
+grep -v -e '^#' $ODIR/metadata.lst | grep . | (while read url cert; do
+ cfile=""
+ if [ "x$cert" != "x" ]; then
+ cfile=$DIR/certs/$cert
+ fi
+ cmd="$DIR/scripts/pull-and-verify.sh $url $ODIR $cfile"
+ $cmd
+ ret=$?
+ if [ "$ret" != "0" ] ; then
+ echo "$cmd returned $ret"
+ exit $ret
+ fi
+done) || exit $ret
+
+touch $last_aggregate_ts
+
diff --git a/metadata/scripts/cert-retrieve.sh b/metadata/scripts/cert-retrieve.sh
new file mode 100755
index 00000000..5440b0f8
--- /dev/null
+++ b/metadata/scripts/cert-retrieve.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+# usage: retrieve-cert.sh remote.host.name [port]
+#
+REMHOST=$1
+REMPORT=${2:-443}
+
+echo | openssl s_client -connect ${REMHOST}:${REMPORT} 2>&1 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p'
diff --git a/metadata/scripts/create_mxml.sh b/metadata/scripts/create_mxml.sh
new file mode 100755
index 00000000..bb7855d5
--- /dev/null
+++ b/metadata/scripts/create_mxml.sh
@@ -0,0 +1,81 @@
+#!/bin/bash
+# Creates MXML files to be used in signing of Metadata
+if ! test -d swamid-2.0 ; then
+ echo "Script should be run from base directory of swamid-metadata"
+ exit
+fi
+
+ls swamid-2.0/ swamid-testing/ swamid-edugain | egrep -v "/:|^$" | sort | uniq -c | awk '$1 > 1 {print $2, "exists in more than one feed"}'
+
+#
+# eduGAIN
+#
+cat << EOF > swamid-edugain-idp-1.0.mxml
+<?xml version="1.0" encoding="UTF-8"?>
+<md:EntitiesDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata"
+ xmlns:xi="http://www.w3.org/2001/XInclude"
+ Name="http://md.swamid.se/md/swamid-idp-2.0.xml">
+EOF
+grep -l IDPSSO swamid-edugain/*.xml | LC_ALL=C sort | while read file; do
+ echo " <xi:include href=\"$file\"/>" >> swamid-edugain-idp-1.0.mxml
+done
+echo "</md:EntitiesDescriptor>" >> swamid-edugain-idp-1.0.mxml
+
+cat << EOF > swamid-edugain-sp-1.0.mxml
+<?xml version="1.0" encoding="UTF-8"?>
+<md:EntitiesDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:shibmeta="urn:mace:shibboleth:metadata:1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ds="http://www.w3.org/2000/09/xmldsig#" xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:shibmd="urn:mace:shibboleth:metadata:1.0">
+EOF
+grep -l SPSSO swamid-edugain/*.xml | grep -v "`grep -l IDPSSO swamid-edugain/*.xml`" | LC_ALL=C sort | while read file; do
+ echo " <xi:include href=\"$file\"/>" >> swamid-edugain-sp-1.0.mxml
+done
+echo "</md:EntitiesDescriptor>" >> swamid-edugain-sp-1.0.mxml
+
+#
+# Swamid-2.0
+#
+cat << EOF > swamid-idp-2.0.mxml
+<?xml version="1.0" encoding="UTF-8"?>
+<md:EntitiesDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata"
+ xmlns:xi="http://www.w3.org/2001/XInclude"
+ Name="http://md.swamid.se/md/swamid-idp-2.0.xml">
+ <xi:include href="swamid-edugain-idp-1.0.mxml" xpointer="xpointer(//*[@entityID])"/>
+
+ <!-- Opt-out from eduGAIN IDP:s -->
+EOF
+grep -l IDPSSO swamid-2.0/*.xml | LC_ALL=C sort | while read file; do
+ echo " <xi:include href=\"$file\"/>" >> swamid-idp-2.0.mxml
+done
+echo "</md:EntitiesDescriptor>" >> swamid-idp-2.0.mxml
+
+cat << EOF > swamid-sp-2.0.mxml
+<?xml version="1.0" encoding="UTF-8"?>
+<md:EntitiesDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:shibmeta="urn:mace:shibboleth:metadata:1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ds="http://www.w3.org/2000/09/xmldsig#" xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:shibmd="urn:mace:shibboleth:metadata:1.0">
+ <xi:include href="swamid-edugain-sp-1.0.mxml" xpointer="xpointer(//*[@entityID])"/>
+
+ <!-- Opt-out from eduGAIN SP:s -->
+EOF
+grep -l SPSSO swamid-2.0/*.xml | grep -v "`grep -l IDPSSO swamid-2.0/*.xml`" | LC_ALL=C sort | while read file; do
+ echo " <xi:include href=\"$file\"/>" >> swamid-sp-2.0.mxml
+done
+echo "</md:EntitiesDescriptor>" >> swamid-sp-2.0.mxml
+
+#
+# Swamid-testing
+#
+cat << EOF > swamid-testing-idp-1.0.mxml
+<?xml version="1.0"?>
+<EntitiesDescriptor xmlns="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:xi="http://www.w3.org/2001/XInclude" Name="http://md.swamid.se/md/swamid-testing-idp-1.0.xml">
+EOF
+egrep -l "AttributeAuthorityDescriptor|IDPSSODescriptor" swamid-testing/*.xml | LC_ALL=C sort | while read file; do
+ echo " <xi:include href=\"$file\"/>" >> swamid-testing-idp-1.0.mxml
+done
+echo "</EntitiesDescriptor>" >> swamid-testing-idp-1.0.mxml
+
+cat << EOF > swamid-testing-sp-1.0.mxml
+<?xml version="1.0"?>
+<EntitiesDescriptor xmlns="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:xi="http://www.w3.org/2001/XInclude" Name="http://md.swamid.se/md/swamid-testing-sp-1.0.xml">
+EOF
+grep -l SPSSO swamid-testing/*.xml | grep -v "`egrep -l "AttributeAuthorityDescriptor|IDPSSODescriptor" swamid-testing/*.xml`" | LC_ALL=C sort | while read file; do
+ echo " <xi:include href=\"$file\"/>" >> swamid-testing-sp-1.0.mxml
+done
+echo "</EntitiesDescriptor>" >> swamid-testing-sp-1.0.mxml
diff --git a/metadata/scripts/expiration_date.pl b/metadata/scripts/expiration_date.pl
new file mode 100755
index 00000000..cf297c5f
--- /dev/null
+++ b/metadata/scripts/expiration_date.pl
@@ -0,0 +1,11 @@
+#!/usr/bin/env perl
+
+use Date::Calc qw/Today_and_Now Add_Delta_Days/;
+my $delta = $ARGV[0];
+$delta = 30 unless $delta;
+
+($year,$month,$day,$hour,$min,$sec) = Today_and_Now();
+($year,$month,$day) = Add_Delta_Days($year,$month,$day,$delta);
+my $date = sprintf "%s-%02d-%02dT%02d:%02d:%02dZ",$year,$month,$day,$hour,$min,$sec;
+print $date;
+
diff --git a/metadata/scripts/get-metadata.sh b/metadata/scripts/get-metadata.sh
new file mode 100755
index 00000000..16379681
--- /dev/null
+++ b/metadata/scripts/get-metadata.sh
@@ -0,0 +1,111 @@
+#!/bin/bash
+#
+# Fetch Service Provider metadata and save into entityid filename
+#
+
+error()
+{
+ echo "Error: $*" 1>&2
+ exit 1
+}
+
+metadataurl=$1
+if [ -z "$metadataurl" ] ; then
+ cat <<EOF
+Usage: `basename $0` <metadataurl>
+Ex: `basename $0` https://shibsp.mysite.com/Shibboleth.sso/Metadata
+ `basename $0` https://shibidp.mysite.com/idp/profile/Metadata/SAML
+ `basename $0` some-downloaded-metadata.xml
+ `basename $0` reep:<entityid>
+ `basename $0` <hostname> (tries to pull from standard locations)
+EOF
+ exit 1
+fi
+
+xmldir=swamid-2.0
+update_xml=true
+if echo "$metadataurl" | grep -qE '^http://|^https://' ; then
+ metadata=`curl -L -s -k -f "$metadataurl"`
+elif echo "$metadataurl" | grep -qE '^reep:' ; then
+ id=`echo -n "$metadataurl" | sed 's/^reep://' | sha1sum | awk '{print $1}'`
+ metadataurl="http://md.reep.refeds.org/entities/%7Bsha1%7D$id"
+ metadata=`curl -L -s -k -f "$metadataurl"`
+else
+ if [ -s "${metadataurl}" ]; then
+ metadata=`cat "$metadataurl"`
+ else
+ case $metadataurl in
+ [0-9][0-9]*)
+ xmldir=$(curl -L -m 5 -s -k -f "https://metadata.swamid.se/?show=feed&id=${metadataurl}")
+ urls="https://metadata.swamid.se/?rawXML=${metadataurl}"
+ update_xml=false
+ ;;
+ http*)
+ urls="$metadataurl"
+ ;;
+ *)
+ urls="https://${metadataurl}/idp/shibboleth https://${metadataurl}/Shibboleth.sso/Metadata https://${metadataurl}/saml/index/sp-metadata https://${metadataurl}/saml/metadata https://${metadataurl}/federationmetadata/2007-06/federationmetadata.xml"
+ ;;
+ esac
+ for i in ${urls}; do
+ metadata=`curl -L -m 5 -s -k -f "${i}"`
+ [ -n "${metadata}" ] && break
+ done
+ fi
+fi
+[ -n "$metadata" ] || error "Failed to fetch metadata from $metadataurl"
+
+script_cwd=`dirname "$0"`
+if test -d $xmldir ; then
+ echo "Moving into $xmldir/"
+ cd $xmldir
+ echo "$script_cwd" | grep -q ^/ || script_cwd=../$script_cwd
+fi
+
+entityid=`echo "$metadata" | sed -n 's/.*entityID=['\''"]\([^"]*\)['\''"].*/\1/p'`
+[ -n "$entityid" ] || error "Failed to find entityID in metadata"
+[ `echo "$entityid" | wc -l` = 1 ] || error "Multiple entityid:s found: `echo $entityid`"
+
+entityidfn=`echo "$entityid" | sed 's;.*://;;' | sed 's/[^a-zwA-ZW0-9_.-]/-/g' | sed 's/$/.xml/'`
+[ -n "$entityidfn" ] || error "Failed to generate filename from entityid $entityid"
+OLDFILE=$(find ../swamid-testing ../swamid-2.0 ../swamid-edugain -name $entityidfn | grep -v $xmldir)
+if [ -n "$OLDFILE" ]; then
+ echo "Moving $OLDFILE into $xmldir"
+ git mv $OLDFILE $entityidfn
+fi
+[ -r "$entityidfn" ] && new=false || new=true
+if $new ; then
+ echo -n "Save metadata into $entityidfn [Y/n]? "
+else
+ regdate=$(sed -n 's;.*RegistrationInfo.*registrationInstant="\([^"]*\)".*;\1;p' < "$entityidfn" | head -n 1)
+ echo -n "Replace $entityidfn with metadata [Y/n]? "
+fi
+[ -n "$regdate" ] || regdate=$(perl $script_cwd/../scripts/now_date.pl)
+
+read x
+case $x in
+ Y|y|"")
+ echo "$metadata" > $entityidfn
+ tmp=`mktemp`
+ if $update_xml ; then
+ xsltproc --stringparam regDate "$regdate" $script_cwd/../xslt/add-rpi.xsl ${entityidfn} > ${tmp} && mv ${tmp} ${entityidfn}
+ fi
+ xsltproc $script_cwd/../xslt/clean-entitydescriptor.xsl ${entityidfn} > ${tmp} && mv ${tmp} ${entityidfn}
+
+ if $new ; then
+ echo -n "Add ${xmldir}/$entityidfn to git [Y/n]? "
+ read x
+ case $x in
+ Y|y|"")
+ git add $entityidfn
+ ;;
+ *)
+ echo "Not added"
+ ;;
+ esac
+ fi
+ ;;
+ *)
+ echo "Nothing done"
+ ;;
+esac
diff --git a/metadata/scripts/now_date.pl b/metadata/scripts/now_date.pl
new file mode 100644
index 00000000..1a5e6890
--- /dev/null
+++ b/metadata/scripts/now_date.pl
@@ -0,0 +1,7 @@
+#!/usr/bin/env perl
+
+use Date::Calc qw/Today_and_Now/;
+
+($year,$month,$day,$hour,$min,$sec) = Today_and_Now();
+my $date = sprintf "%s-%02d-%02dT%02d:%02d:%02dZ",$year,$month,$day,$hour,$min,$sec;
+print $date;
diff --git a/metadata/scripts/pull-and-verify.sh b/metadata/scripts/pull-and-verify.sh
new file mode 100755
index 00000000..8994fab1
--- /dev/null
+++ b/metadata/scripts/pull-and-verify.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+URL=$1
+DIR=$2
+CERT=$3
+
+if echo "$DIR" | grep -q "mds.swamid.se" ; then
+ publish_name=mds.swamid.se
+else
+ publish_name=md.swamid.se
+fi
+
+TMPF=`mktemp`
+curl -s -m 120 -k -L $URL > $TMPF
+if [ $? -ne 0 ]; then
+ echo "Unable to download $URL: $?"
+ exit 1
+fi
+if [ "x$CERT" != "x" ]; then
+ xmlsec1 --verify --pubkey-cert-pem $CERT --id-attr:ID urn:oasis:names:tc:SAML:2.0:metadata:EntitiesDescriptor $TMPF
+ #samlsign -c $CERT -f $TMPF
+ if [ $? -ne 0 ]; then
+ echo "Unable to verify $URL with $CERT: $?"
+ exit 1
+ fi
+fi
+TMPD=`mktemp -d`
+xsltproc --stringparam output $TMPD xslt/import-metadata.xsl $TMPF
+if [ $? -ne 0 ]; then
+ echo "Unable to import metadata from $URL: $?"
+ exit 1
+fi
+rsync -avz $TMPD/ $DIR
+(
+echo '<?xml version="1.0"?>'
+echo "<EntitiesDescriptor xmlns=\"urn:oasis:names:tc:SAML:2.0:metadata\" xmlns:xi=\"http://www.w3.org/2001/XInclude\" Name=\"http://$publish_name/md/$DIR.xml\">"
+T=`mktemp`
+for md in $DIR/*.xml; do
+ xsltproc xslt/clean-entitydescriptor.xsl $md > $T && mv $T $md
+ test=`echo $md | cut -d/ -f2-`
+ if [ ! -f "swamid-2.0/$test" -a ! -f "swamid-edugain/$test" ]; then
+ echo "<xi:include href=\"$md\"/>"
+ fi
+done
+rm -f $T
+echo "</EntitiesDescriptor>"
+) > $DIR.mxml
+#git add $DIR.mxml $DIR
+#git commit -m "$URL into $DIR" $DIR.mxml $DIR
+rm -rf $TMPF $TMPD
diff --git a/metadata/scripts/remove_entity.sh b/metadata/scripts/remove_entity.sh
new file mode 100755
index 00000000..60645a7e
--- /dev/null
+++ b/metadata/scripts/remove_entity.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+error()
+{
+ echo "Error: $*"
+ exit 1
+}
+
+if [ -z "$1" ]; then
+ echo "$0 <Filename / id from metadata to remove>"
+ exit
+fi
+
+metadataurl=$1
+
+if [ -s "${metadataurl}" ]; then
+ entityidfn=$(basename "$metadataurl")
+else
+ case $metadataurl in
+ [0-9][0-9]*)
+ url="https://metadata.swamid.se/?rawXML=${metadataurl}"
+ ;;
+ https://metadata.swamid.se/?showEntity=*)
+ #url=$(echo "$metadataurl" | sed 's;showEntity;rawXML;')
+ url=${metadataurl//showEntity/rawXML}
+ ;;
+ esac
+ metadata=$(curl -L -m 5 -s -k -f "${url}")
+
+ [ -n "$metadata" ] || error "Failed to fetch metadata from $metadataurl"
+
+ entityid=$(echo "$metadata" | sed -n 's/.*entityID=['\''"]\([^"]*\)['\''"].*/\1/p')
+ [ -n "$entityid" ] || error "Failed to find entityID in metadata"
+ [ "$(echo "$entityid" | wc -l | sed 's/ //g')" = "1" ] || error "Multiple entityid:s found: $entityid"
+
+ entityidfn=$(echo "$entityid" | sed 's;.*://;;' | sed 's/[^a-zwA-ZW0-9_.-]/-/g' | sed 's/$/.xml/')
+ [ -n "$entityidfn" ] || error "Failed to generate filename from entityid $entityid"
+fi
+file=$(find swamid-testing swamid-2.0 swamid-edugain -name "$entityidfn" | head -1)
+
+if [ -r "$file" ]; then
+ echo "---------------------"
+ grep "$file" ./*.mxml
+ echo "---------------------"
+ echo -n "OK to remove ? [Y/n]"
+ read -r x
+ case $x in
+ "Y"|"y"|"") ;;
+ *) exit 1 ;;
+ esac
+
+ echo
+ git rm "$file"
+
+ grep "$file" ./*.mxml | sed 's/\(.*\.mxml\): <xi:include href=".*"\/>/\1/' | while read -r mxmlfile ; do
+ sed -i.bak "/\/$entityidfn/d" "$mxmlfile"
+ if [ -f "$mxmlfile".bak ]; then
+ rm "$mxmlfile".bak
+ fi
+ done
+ git diff
+ git status
+else
+ error "File not found: $file"
+fi
diff --git a/metadata/scripts/rename-xml.sh b/metadata/scripts/rename-xml.sh
new file mode 100755
index 00000000..4326579f
--- /dev/null
+++ b/metadata/scripts/rename-xml.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+#
+# Fetch Service Provider metadata and save into entityid filename
+#
+
+error()
+{
+ echo "Error: $*" 1>&2
+ exit 1
+}
+
+metadata=$1
+if [ -z "$metadata" ] ; then
+ cat <<EOF
+Usage: `basename $0` <file to check>
+Ex: `basename $0` account.eciu.eu.xml
+EOF
+ exit 1
+fi
+
+entityid=`cat "$metadata" | sed -n 's/.*entityID=['\''"]\([^"]*\)['\''"].*/\1/p'`
+[ -n "$entityid" ] || error "Failed to find entityID in metadata"
+[ `echo "$entityid" | wc -l` = 1 ] || error "Multiple entityid:s found: `echo $entityid`"
+
+entityidfn=`echo "$entityid" | sed 's;https*://;;' | sed 's/[^a-zA-Z0-9_.-]/-/g' | sed 's/$/.xml/'`
+[ -n "$entityidfn" ] || error "Failed to generate filename from entityid $entityid"
+
+[ -r "$entityidfn" ] && new=false || new=true
+if $new ; then
+ echo -n "Move $metadata into $entityidfn [Y/n]? "
+else
+ echo "$entityidfn and $metadata are the same"
+ exit
+fi
+read x
+case $x in
+ Y|y|"")
+ git mv $metadata $entityidfn
+
+ ;;
+ *)
+ echo "Nothing done"
+ ;;
+esac
diff --git a/metadata/scripts/unique_id.pl b/metadata/scripts/unique_id.pl
new file mode 100644
index 00000000..18ec9e13
--- /dev/null
+++ b/metadata/scripts/unique_id.pl
@@ -0,0 +1,7 @@
+#!/usr/bin/env perl
+
+use Date::Calc qw/Today_and_Now/;
+
+($year,$month,$day,$hour,$min,$sec) = Today_and_Now();
+print sprintf "swamid%s%02d%02dT%02d%02d%02dZ",$year,$month,$day,$hour,$min,$sec;
+
diff --git a/metadata/scripts/update-and-validate.sh b/metadata/scripts/update-and-validate.sh
new file mode 100755
index 00000000..a4a1bddb
--- /dev/null
+++ b/metadata/scripts/update-and-validate.sh
@@ -0,0 +1,38 @@
+#!/bin/sh
+# Used on metadata.swamid.se and metadata.lab.swamid.se
+
+update()
+{
+# echo "git pull"
+ git pull -q || return $?
+
+ # Remove old entitys from DB
+ ls swamid-edugain/*.xml swamid-2.0/*.xml swamid-testing/*.xml | sed 's@swamid-.*/\(.*\).xml@.time/\1.validate@' | sort > /tmp/$$.xml
+ ls .time/*.validate > /tmp/$$.time
+ comm -13 /tmp/$$.xml /tmp/$$.time | while read file; do
+ docker exec swamid-metadata-sp /var/www/scripts/removeEntity.bash /opt/metadata/$file
+ done
+ rm /tmp/$$.xml /tmp/$$.time
+
+ # remove some flag-files to get DB refreshed
+ if [ $(find .time/ -ctime +15 -type f | wc -l) -ne 0 ]; then
+ rm `find .time/ -ctime +15 -type f | tail -10`
+ fi
+
+ # Update/Import "new" xml-files
+ make -f Makefile.validate -s
+
+ # check URL:s
+ docker exec swamid-metadata-sp php /var/www/scripts/checkURLs.php
+
+ # update TestResults from release-check -> DB
+ docker exec swamid-metadata-sp php /var/www/scripts/updateTestResults.php
+
+ # Cleanup Pending-queue
+ docker exec swamid-metadata-sp php /var/www/scripts/cleanupPending.php
+}
+
+cd /opt/metadata || exit 1
+update
+# to be able to show non-swamid entities on web-page
+wget -qO swamid-2.0.xml https://mds.swamid.se/md/swamid-2.0.xml
diff --git a/metadata/scripts/update-cron.sh b/metadata/scripts/update-cron.sh
new file mode 100755
index 00000000..f00daa03
--- /dev/null
+++ b/metadata/scripts/update-cron.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+update()
+{
+ echo "git pull"
+ git pull || return $?
+ echo
+
+ echo "make aggregate"
+ make aggregate || return $?
+ echo
+
+ echo "make (2007) web site for backwards compat"
+ env SWAMID_MK_CONFIG=swamid2007.mk make web || return $?
+ echo
+
+ echo "make (2016)"
+ env SWAMID_MK_CONFIG=swamid2016.mk make || return $?
+ echo
+
+ #echo "make (2016 - test)"
+ #env SWAMID_MK_CONFIG=swamid2016-test.mk make # Allow this to fail
+ #echo
+}
+
+cd /opt/swamid-metadata || exit 1
+update
diff --git a/metadata/scripts/update.sh b/metadata/scripts/update.sh
new file mode 100755
index 00000000..5d1a9a2f
--- /dev/null
+++ b/metadata/scripts/update.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+update()
+{
+ echo "git pull"
+ git pull || return $?
+ echo
+
+ echo "make aggregate"
+ make aggregate || return $?
+ echo
+
+ echo "make test"
+ make test || return $?
+ echo
+
+ echo "make"
+ make || return $?
+ echo
+}
+
+cd /opt/swamid-metadata || exit 1
+output=$(update 2>&1)
+ret=$?
+if [ "$ret" != "0" ] ; then
+ echo "$output"
+ exit $ret
+fi
diff --git a/metadata/scripts/urls2hostport.py b/metadata/scripts/urls2hostport.py
new file mode 100644
index 00000000..c4ff33bf
--- /dev/null
+++ b/metadata/scripts/urls2hostport.py
@@ -0,0 +1,6 @@
+
+import sys
+from urlparse import urlparse
+for url in sys.stdin:
+ o = urlparse(url)
+ print o.netloc
diff --git a/metadata/scripts/verify-uiinfo-urls.sh b/metadata/scripts/verify-uiinfo-urls.sh
new file mode 100755
index 00000000..b7e1f40a
--- /dev/null
+++ b/metadata/scripts/verify-uiinfo-urls.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+x=$1
+if [ ! -f "$x" ]; then
+ exit
+fi
+
+for i in `cat $x | sed 's;\(</*\)[a-z0-9]*:;\1;g' | sed 's/xmlns="[^"]*"//' | xmllint --xpath /EntityDescriptor//Extensions/UIInfo - 2>/dev/null |grep -E "https?://" | awk -F'[><]' '{print $3}' | sort -u`; do
+ code=`curl -L --connect-timeout 5 --retry 2 -o /dev/null --silent --head --write-out '%{http_code}\n' $i`
+ if [ $code -eq 405 ]; then
+ code=`curl -L --connect-timeout 5 --retry 2 -o /dev/null --silent --write-out '%{http_code}\n' $i`
+ fi
+ if [ $code -ne 200 ]; then
+ echo " Code $code: $x: $i" | sed 's/.*/&/'
+ fi
+done
diff --git a/metadata/scripts/weekly-routines-metadata.sh b/metadata/scripts/weekly-routines-metadata.sh
new file mode 100755
index 00000000..e6178165
--- /dev/null
+++ b/metadata/scripts/weekly-routines-metadata.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+# Used on metadata.swamid.se and metadata.lab.swamid.se
+
+# Store statistics
+docker exec swamid-metadata-sp php /var/www/scripts/saveWeekStats.php
+
+#Remove old URLs
+docker exec swamid-metadata-sp php /var/www/scripts/checkOldURLs.php