summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnthony G. Basile <blueness@gentoo.org>2011-05-01 12:24:52 -0400
committerAnthony G. Basile <blueness@gentoo.org>2011-05-01 12:24:52 -0400
commit6b060c7792c98bbde56eccbf3077580b35d80218 (patch)
tree9d760ba4e4f22231902820aefc8e9bb288c6676e /scripts
parentUpdate Grsec/PaX (diff)
downloadhardened-patchset-6b060c7792c98bbde56eccbf3077580b35d80218.tar.gz
hardened-patchset-6b060c7792c98bbde56eccbf3077580b35d80218.tar.bz2
hardened-patchset-6b060c7792c98bbde56eccbf3077580b35d80218.zip
Added script to automatically retrieve patches
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/ap.patch.sh32
-rwxr-xr-xscripts/fetch_grsecurity_test.pl209
-rwxr-xr-xscripts/just_fetch.pl107
-rwxr-xr-xscripts/mk.patch.sh32
-rwxr-xr-xscripts/re.patch.sh31
5 files changed, 411 insertions, 0 deletions
diff --git a/scripts/ap.patch.sh b/scripts/ap.patch.sh
new file mode 100755
index 0000000..9ad2c24
--- /dev/null
+++ b/scripts/ap.patch.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+SCRIPT="${0}"
+KMAJOR="${1}"
+PATCHSET="/root/hardened-patchset/${KMAJOR}"
+
+if [[ -z "${KMAJOR}" ]] ; then
+ echo "Usage: ${SCRIPT} <kernel_major>"
+ exit
+fi
+
+if [[ ! -d ${PATCHSET} ]] ; then
+ echo "Patchset ${KMAJOR} doesn't exist"
+ exit
+fi
+
+for p in $(ls ${PATCHSET}) ; do
+ if [[ ${p#0000} == $p ]] ; then
+ echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ echo
+ echo $p
+ echo
+ patch -p 1 --dry-run < ${PATCHSET}/$p
+ echo "Cont?"
+ read n
+ echo
+ patch -p 1 < ${PATCHSET}/$p
+ echo
+ echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ fi
+done
+
diff --git a/scripts/fetch_grsecurity_test.pl b/scripts/fetch_grsecurity_test.pl
new file mode 100755
index 0000000..2211b63
--- /dev/null
+++ b/scripts/fetch_grsecurity_test.pl
@@ -0,0 +1,209 @@
+#!/usr/bin/perl -w
+
+# Fetches new released patches, tarballs, etc that have been
+# announced on a web page and stores them locally.
+#
+# Copyright (C) 2010, Anthony G. Basile <blueness@gentoo.org>
+# Released under the GPLv2
+
+use strict ;
+use LWP::Simple ; ;
+use HTML::LinkExtor ;
+
+############################################################
+### Edit these to suit your needs ##########################
+############################################################
+
+my $storage_dir = "/home/basile/storage/grsecurity-test" ;
+my $upstream_url = "http://grsecurity.net/test.php" ;
+my @allowed_suffixes = ( ".patch", ".patch.sig", ".tar.gz", ".tar.gz.sig", ".asc" ) ;
+
+############################################################
+
+my $send_email = 1 ; # do you want to send email alerts
+
+my $sendmail = "/usr/sbin/sendmail -t" ;
+
+my $from = "From: " . "root\@opensource.dyc.edu\n" ;
+my $subject = "Subject: " . "New release from $upstream_url\n" ;
+my $reply_to = "Reply-to: " . "devnull\@localhost.invalid\n" ;
+my $send_to = "To: " . "basile\@opensource.dyc.edu\n" ;
+
+############################################################
+
+my %already_retrieved = () ; #set of already retreived files
+my %currently_available = () ; #set of currently available files
+
+
+sub sane
+{
+ my ( $name ) = @_ ;
+
+ return 0 if $name eq "" ; # no empty names
+ return 0 if $name =~ / / ; # no blanks in names
+
+ my $got_suffix = 0 ; # file must have legitimate suffix
+ foreach my $suffix ( @allowed_suffixes )
+ {
+ $got_suffix = 1 if $name =~ /$suffix$/ ;
+ }
+
+ return $got_suffix ;
+}
+
+
+sub get_already_retrieved
+{
+ if ( -d $storage_dir ) # check if storage_dir exists
+ {
+ my @file_names = `ls $storage_dir` ; # and get list of files
+ foreach my $file_name ( @file_names )
+ {
+ chomp( $file_name ) ;
+ $already_retrieved{ $file_name } = 1 if sane( $file_name ) ;
+ }
+ }
+ else # else create a new storage_dir
+ {
+ mkdir $storage_dir || die "Sorry I can't make $storage_dir\n" ;
+ print "\n\nCreated storage dir: $storage_dir\n\n" ;
+ }
+
+}
+
+
+sub print_already_retrieved
+{
+ print "\n\nAlready retrieved files from upstream:\n\n" ;
+ foreach my $file_name ( sort keys %already_retrieved ) # go through hash of already_retrieved files
+ {
+ print "\t$file_name\n" ; # and print
+ }
+ print "\n\n" ;
+}
+
+
+sub get_currently_available
+{
+ my $parser ;
+ my @links ;
+
+ $parser = HTML::LinkExtor->new( undef, $upstream_url ) ; # grab upstream web page
+ $parser->parse( get( $upstream_url ) )->eof ;
+
+ @links = $parser->links ; # grab the links out of it
+
+ foreach my $ref ( @links )
+ {
+ my $file_url = ${$ref}[2] ; # get just the url part
+ my $file_name = $file_url ;
+ $file_name =~ s/^.*\/(.*)$/$1/ ; # parse out the file name from the url
+
+ next unless sane( $file_name ) ; # if it fits the sane file names
+
+ $currently_available{ $file_name } = $file_url ; # insert it and its url as key=>value in currently_available
+ }
+}
+
+
+sub print_currently_available
+{
+ print "\n\nCurrently available files from upstream:\n\n" ;
+ foreach my $file_name ( sort keys %currently_available ) # go through hash of currently_available files
+ {
+ my $file_url = $currently_available{$file_name} ;
+ print "\t$file_name\n" ; # and print
+ #print "\t$file_name @ $file_url\n" ;
+ }
+ print "\n\n" ;
+}
+
+
+sub download_newly_available
+{
+ my $downloads = "" ;
+
+ chdir( $storage_dir ) ;
+ foreach my $file_name ( sort keys %currently_available ) # go through each of the currently_available files
+ {
+ next if $already_retrieved{ $file_name } ; # and if its not in the already_retrieved
+ print "\tDownloading $file_name ... " ;
+ my $file_url = $currently_available{ $file_name } ;
+ if ( getstore( $file_url, $file_name ) ) # download it and report success/failure
+ {
+ print "OK\n" ;
+ $downloads .= "\t$file_name\n" ;
+ }
+ else
+ {
+ print "FAIL\n" ;
+ }
+ }
+
+ return $downloads ;
+}
+
+
+sub print_successful_downloads
+{
+ my ( $downloads ) = @_ ;
+
+ if( $downloads ne "" )
+ {
+ print "\n\nSuccessfully downloaded files from upstream:\n\n" ;
+ print $downloads ;
+ print "\n\n" ;
+ }
+ else
+ {
+ print "\n\nNo files downloaded from upstream --- nothing to report.\n\n" ;
+ print "\n\n" ;
+ }
+}
+
+
+sub email_successful_downloads
+{
+ my ( $downloads ) = @_ ;
+
+ if( $send_email == 1 && $downloads ne "" )
+ {
+ print "\n\nEmailing notification of successfully downloaded files $send_to.\n\n" ;
+
+ my $content = "\n\nSuccessfully downloaded files from upstream:\n\n" ;
+ $content .= $downloads ;
+ $content .= "\n\n" ;
+
+ open (SENDMAIL, "|$sendmail") or die "Cannot open $sendmail: $!";
+ print SENDMAIL $from ;
+ print SENDMAIL $subject ;
+ print SENDMAIL $reply_to ;
+ print SENDMAIL $send_to;
+ print SENDMAIL "Content-type: text/plain\n\n";
+ print SENDMAIL $content;
+ close(SENDMAIL);
+ }
+ else
+ {
+ print "\n\nNo files downloaded from upstream --- nothing to email.\n\n" ;
+ print "\n\n" ;
+ }
+}
+
+
+sub main
+{
+ get_already_retrieved() ;
+ print_already_retrieved() ;
+
+ get_currently_available() ;
+ print_currently_available() ;
+
+ my $downloads = download_newly_available() ;
+
+ print_successful_downloads( $downloads ) ;
+ email_successful_downloads( $downloads ) ;
+}
+
+main() ;
+
diff --git a/scripts/just_fetch.pl b/scripts/just_fetch.pl
new file mode 100755
index 0000000..bc9f921
--- /dev/null
+++ b/scripts/just_fetch.pl
@@ -0,0 +1,107 @@
+#!/usr/bin/perl
+
+use strict ;
+use LWP::Simple ; ;
+use HTML::LinkExtor ;
+
+my $upstream_url = "http://grsecurity.net/test.php" ;
+
+my $file_pattern = "grsecurity-2.2.2-";
+my @allowed_suffixes = ( ".patch", ".patch.sig" ) ;
+
+my %currently_available = () ;
+
+
+sub sane
+{
+ my ( $name ) = @_ ;
+
+ return 0 if $name eq "" ;
+ return 0 if $name =~ / / ;
+
+ my $got_suffix = 0 ;
+ foreach my $suffix ( @allowed_suffixes )
+ {
+ $got_suffix = 1 if $name =~ /$suffix$/ ;
+ }
+
+ return $got_suffix ;
+}
+
+
+sub get_currently_available
+{
+ my $parser ;
+ my @links ;
+
+ $parser = HTML::LinkExtor->new( undef, $upstream_url ) ;
+ $parser->parse( get( $upstream_url ) )->eof ;
+
+ @links = $parser->links ;
+
+ foreach my $ref ( @links )
+ {
+ my $file_url = ${$ref}[2] ;
+ my $file_name = $file_url ;
+ $file_name =~ s/^.*\/(.*)$/$1/ ;
+
+ next unless sane( $file_name ) ;
+
+ $currently_available{ $file_name } = $file_url ;
+ }
+}
+
+
+sub download_newly_available
+{
+ my $downloads = "" ;
+
+ foreach my $file_name ( sort keys %currently_available )
+ {
+ next unless $file_name =~ /$file_pattern/ ;
+ print "\tDownloading $file_name ... " ;
+ my $file_url = $currently_available{ $file_name } ;
+ if ( getstore( $file_url, $file_name ) )
+ {
+ print "OK\n" ;
+ $downloads .= "\t$file_name\n" ;
+ }
+ else
+ {
+ print "FAIL\n" ;
+ }
+ }
+
+ return $downloads ;
+}
+
+
+sub print_successful_downloads
+{
+ my ( $downloads ) = @_ ;
+
+ if( $downloads ne "" )
+ {
+ print "\n\nSuccessfully downloaded files from upstream:\n\n" ;
+ print $downloads ;
+ print "\n\n" ;
+ }
+ else
+ {
+ print "\n\nNo files downloaded from upstream --- nothing to report.\n\n" ;
+ print "\n\n" ;
+ }
+}
+
+
+sub main
+{
+ get_currently_available() ;
+ my $downloads = download_newly_available() ;
+
+ print_successful_downloads( $downloads ) ;
+}
+
+main() ;
+
+
diff --git a/scripts/mk.patch.sh b/scripts/mk.patch.sh
new file mode 100755
index 0000000..0086237
--- /dev/null
+++ b/scripts/mk.patch.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+SCRIPT_NAME="${0}"
+KERNEL_VER="${1%/}"
+HGPV_PATCH="${2}"
+TAR="/bin/tar"
+GPG="/usr/bin/gpg"
+SIGNAS="basile"
+
+################################################################################
+
+die() {
+ echo "$1"
+ exit
+}
+
+sanity() {
+ [[ -d "${KERNEL_VER}" ]] || die "Can't find ${KERNEL_VER} directory"
+ [[ -e "${TAR}" ]] || die "Can't find tar"
+ [[ -n "${HGPV_PATCH}" ]] || die "No HGPV patchlevel given"
+}
+
+################################################################################
+
+sanity
+
+HGPV="${KERNEL_VER}-${HGPV_PATCH}"
+HGPV_TARBALL="hardened-patches-${HGPV}.extras.tar.bz2"
+
+$TAR jcvf ${HGPV_TARBALL} ${KERNEL_VER}
+
+$GPG -a -s -b --default-key $SIGNAS ${HGPV_TARBALL}
diff --git a/scripts/re.patch.sh b/scripts/re.patch.sh
new file mode 100755
index 0000000..d1ac153
--- /dev/null
+++ b/scripts/re.patch.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+SCRIPT="${0}"
+KMAJOR="${1}"
+PATCHSET="/root/hardened-patchset/${KMAJOR}"
+
+if [[ -z "${KMAJOR}" ]] ; then
+ echo "Usage: ${SCRIPT} <kernel_major>"
+ exit
+fi
+
+if [[ ! -d ${PATCHSET} ]] ; then
+ echo "Patchset ${KMAJOR} doesn't exist"
+ exit
+fi
+
+for p in $(ls ${PATCHSET} | sort -r) ; do
+ if [[ ${p#0000} == $p ]] ; then
+ echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ echo
+ echo $p
+ echo
+ echo "Cont?"
+ read n
+ echo
+ patch -p 1 -R < ${PATCHSET}/$p
+ echo
+ echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ fi
+done
+