Gentoo Archives: gentoo-commits

From: "Anthony G. Basile" <blueness@g.o>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] proj/hardened-patchset:master commit in: scripts/, /
Date: Sun, 01 May 2011 16:25:46
Message-Id: 6b060c7792c98bbde56eccbf3077580b35d80218.blueness@gentoo
1 commit: 6b060c7792c98bbde56eccbf3077580b35d80218
2 Author: Anthony G. Basile <blueness <AT> gentoo <DOT> org>
3 AuthorDate: Sun May 1 16:24:52 2011 +0000
4 Commit: Anthony G. Basile <blueness <AT> gentoo <DOT> org>
5 CommitDate: Sun May 1 16:24:52 2011 +0000
6 URL: http://git.overlays.gentoo.org/gitweb/?p=proj/hardened-patchset.git;a=commit;h=6b060c77
7
8 Added script to automatically retrieve patches
9
10 ---
11 ap.patch.sh => scripts/ap.patch.sh | 0
12 scripts/fetch_grsecurity_test.pl | 209 ++++++++++++++++++++++++++++++++++++
13 scripts/just_fetch.pl | 107 ++++++++++++++++++
14 mk.patch.sh => scripts/mk.patch.sh | 0
15 re.patch.sh => scripts/re.patch.sh | 0
16 5 files changed, 316 insertions(+), 0 deletions(-)
17
18 diff --git a/ap.patch.sh b/scripts/ap.patch.sh
19 similarity index 100%
20 rename from ap.patch.sh
21 rename to scripts/ap.patch.sh
22
23 diff --git a/scripts/fetch_grsecurity_test.pl b/scripts/fetch_grsecurity_test.pl
24 new file mode 100755
25 index 0000000..2211b63
26 --- /dev/null
27 +++ b/scripts/fetch_grsecurity_test.pl
28 @@ -0,0 +1,209 @@
29 +#!/usr/bin/perl -w
30 +
31 +# Fetches new released patches, tarballs, etc that have been
32 +# announced on a web page and stores them locally.
33 +#
34 +# Copyright (C) 2010, Anthony G. Basile <blueness@g.o>
35 +# Released under the GPLv2
36 +
37 +use strict ;
38 +use LWP::Simple ; ;
39 +use HTML::LinkExtor ;
40 +
41 +############################################################
42 +### Edit these to suit your needs ##########################
43 +############################################################
44 +
45 +my $storage_dir = "/home/basile/storage/grsecurity-test" ;
46 +my $upstream_url = "http://grsecurity.net/test.php" ;
47 +my @allowed_suffixes = ( ".patch", ".patch.sig", ".tar.gz", ".tar.gz.sig", ".asc" ) ;
48 +
49 +############################################################
50 +
51 +my $send_email = 1 ; # do you want to send email alerts
52 +
53 +my $sendmail = "/usr/sbin/sendmail -t" ;
54 +
55 +my $from = "From: " . "root\@opensource.dyc.edu\n" ;
56 +my $subject = "Subject: " . "New release from $upstream_url\n" ;
57 +my $reply_to = "Reply-to: " . "devnull\@localhost.invalid\n" ;
58 +my $send_to = "To: " . "basile\@opensource.dyc.edu\n" ;
59 +
60 +############################################################
61 +
62 +my %already_retrieved = () ; #set of already retreived files
63 +my %currently_available = () ; #set of currently available files
64 +
65 +
66 +sub sane
67 +{
68 + my ( $name ) = @_ ;
69 +
70 + return 0 if $name eq "" ; # no empty names
71 + return 0 if $name =~ / / ; # no blanks in names
72 +
73 + my $got_suffix = 0 ; # file must have legitimate suffix
74 + foreach my $suffix ( @allowed_suffixes )
75 + {
76 + $got_suffix = 1 if $name =~ /$suffix$/ ;
77 + }
78 +
79 + return $got_suffix ;
80 +}
81 +
82 +
83 +sub get_already_retrieved
84 +{
85 + if ( -d $storage_dir ) # check if storage_dir exists
86 + {
87 + my @file_names = `ls $storage_dir` ; # and get list of files
88 + foreach my $file_name ( @file_names )
89 + {
90 + chomp( $file_name ) ;
91 + $already_retrieved{ $file_name } = 1 if sane( $file_name ) ;
92 + }
93 + }
94 + else # else create a new storage_dir
95 + {
96 + mkdir $storage_dir || die "Sorry I can't make $storage_dir\n" ;
97 + print "\n\nCreated storage dir: $storage_dir\n\n" ;
98 + }
99 +
100 +}
101 +
102 +
103 +sub print_already_retrieved
104 +{
105 + print "\n\nAlready retrieved files from upstream:\n\n" ;
106 + foreach my $file_name ( sort keys %already_retrieved ) # go through hash of already_retrieved files
107 + {
108 + print "\t$file_name\n" ; # and print
109 + }
110 + print "\n\n" ;
111 +}
112 +
113 +
114 +sub get_currently_available
115 +{
116 + my $parser ;
117 + my @links ;
118 +
119 + $parser = HTML::LinkExtor->new( undef, $upstream_url ) ; # grab upstream web page
120 + $parser->parse( get( $upstream_url ) )->eof ;
121 +
122 + @links = $parser->links ; # grab the links out of it
123 +
124 + foreach my $ref ( @links )
125 + {
126 + my $file_url = ${$ref}[2] ; # get just the url part
127 + my $file_name = $file_url ;
128 + $file_name =~ s/^.*\/(.*)$/$1/ ; # parse out the file name from the url
129 +
130 + next unless sane( $file_name ) ; # if it fits the sane file names
131 +
132 + $currently_available{ $file_name } = $file_url ; # insert it and its url as key=>value in currently_available
133 + }
134 +}
135 +
136 +
137 +sub print_currently_available
138 +{
139 + print "\n\nCurrently available files from upstream:\n\n" ;
140 + foreach my $file_name ( sort keys %currently_available ) # go through hash of currently_available files
141 + {
142 + my $file_url = $currently_available{$file_name} ;
143 + print "\t$file_name\n" ; # and print
144 + #print "\t$file_name @ $file_url\n" ;
145 + }
146 + print "\n\n" ;
147 +}
148 +
149 +
150 +sub download_newly_available
151 +{
152 + my $downloads = "" ;
153 +
154 + chdir( $storage_dir ) ;
155 + foreach my $file_name ( sort keys %currently_available ) # go through each of the currently_available files
156 + {
157 + next if $already_retrieved{ $file_name } ; # and if its not in the already_retrieved
158 + print "\tDownloading $file_name ... " ;
159 + my $file_url = $currently_available{ $file_name } ;
160 + if ( getstore( $file_url, $file_name ) ) # download it and report success/failure
161 + {
162 + print "OK\n" ;
163 + $downloads .= "\t$file_name\n" ;
164 + }
165 + else
166 + {
167 + print "FAIL\n" ;
168 + }
169 + }
170 +
171 + return $downloads ;
172 +}
173 +
174 +
175 +sub print_successful_downloads
176 +{
177 + my ( $downloads ) = @_ ;
178 +
179 + if( $downloads ne "" )
180 + {
181 + print "\n\nSuccessfully downloaded files from upstream:\n\n" ;
182 + print $downloads ;
183 + print "\n\n" ;
184 + }
185 + else
186 + {
187 + print "\n\nNo files downloaded from upstream --- nothing to report.\n\n" ;
188 + print "\n\n" ;
189 + }
190 +}
191 +
192 +
193 +sub email_successful_downloads
194 +{
195 + my ( $downloads ) = @_ ;
196 +
197 + if( $send_email == 1 && $downloads ne "" )
198 + {
199 + print "\n\nEmailing notification of successfully downloaded files $send_to.\n\n" ;
200 +
201 + my $content = "\n\nSuccessfully downloaded files from upstream:\n\n" ;
202 + $content .= $downloads ;
203 + $content .= "\n\n" ;
204 +
205 + open (SENDMAIL, "|$sendmail") or die "Cannot open $sendmail: $!";
206 + print SENDMAIL $from ;
207 + print SENDMAIL $subject ;
208 + print SENDMAIL $reply_to ;
209 + print SENDMAIL $send_to;
210 + print SENDMAIL "Content-type: text/plain\n\n";
211 + print SENDMAIL $content;
212 + close(SENDMAIL);
213 + }
214 + else
215 + {
216 + print "\n\nNo files downloaded from upstream --- nothing to email.\n\n" ;
217 + print "\n\n" ;
218 + }
219 +}
220 +
221 +
222 +sub main
223 +{
224 + get_already_retrieved() ;
225 + print_already_retrieved() ;
226 +
227 + get_currently_available() ;
228 + print_currently_available() ;
229 +
230 + my $downloads = download_newly_available() ;
231 +
232 + print_successful_downloads( $downloads ) ;
233 + email_successful_downloads( $downloads ) ;
234 +}
235 +
236 +main() ;
237 +
238
239 diff --git a/scripts/just_fetch.pl b/scripts/just_fetch.pl
240 new file mode 100755
241 index 0000000..bc9f921
242 --- /dev/null
243 +++ b/scripts/just_fetch.pl
244 @@ -0,0 +1,107 @@
245 +#!/usr/bin/perl
246 +
247 +use strict ;
248 +use LWP::Simple ; ;
249 +use HTML::LinkExtor ;
250 +
251 +my $upstream_url = "http://grsecurity.net/test.php" ;
252 +
253 +my $file_pattern = "grsecurity-2.2.2-";
254 +my @allowed_suffixes = ( ".patch", ".patch.sig" ) ;
255 +
256 +my %currently_available = () ;
257 +
258 +
259 +sub sane
260 +{
261 + my ( $name ) = @_ ;
262 +
263 + return 0 if $name eq "" ;
264 + return 0 if $name =~ / / ;
265 +
266 + my $got_suffix = 0 ;
267 + foreach my $suffix ( @allowed_suffixes )
268 + {
269 + $got_suffix = 1 if $name =~ /$suffix$/ ;
270 + }
271 +
272 + return $got_suffix ;
273 +}
274 +
275 +
276 +sub get_currently_available
277 +{
278 + my $parser ;
279 + my @links ;
280 +
281 + $parser = HTML::LinkExtor->new( undef, $upstream_url ) ;
282 + $parser->parse( get( $upstream_url ) )->eof ;
283 +
284 + @links = $parser->links ;
285 +
286 + foreach my $ref ( @links )
287 + {
288 + my $file_url = ${$ref}[2] ;
289 + my $file_name = $file_url ;
290 + $file_name =~ s/^.*\/(.*)$/$1/ ;
291 +
292 + next unless sane( $file_name ) ;
293 +
294 + $currently_available{ $file_name } = $file_url ;
295 + }
296 +}
297 +
298 +
299 +sub download_newly_available
300 +{
301 + my $downloads = "" ;
302 +
303 + foreach my $file_name ( sort keys %currently_available )
304 + {
305 + next unless $file_name =~ /$file_pattern/ ;
306 + print "\tDownloading $file_name ... " ;
307 + my $file_url = $currently_available{ $file_name } ;
308 + if ( getstore( $file_url, $file_name ) )
309 + {
310 + print "OK\n" ;
311 + $downloads .= "\t$file_name\n" ;
312 + }
313 + else
314 + {
315 + print "FAIL\n" ;
316 + }
317 + }
318 +
319 + return $downloads ;
320 +}
321 +
322 +
323 +sub print_successful_downloads
324 +{
325 + my ( $downloads ) = @_ ;
326 +
327 + if( $downloads ne "" )
328 + {
329 + print "\n\nSuccessfully downloaded files from upstream:\n\n" ;
330 + print $downloads ;
331 + print "\n\n" ;
332 + }
333 + else
334 + {
335 + print "\n\nNo files downloaded from upstream --- nothing to report.\n\n" ;
336 + print "\n\n" ;
337 + }
338 +}
339 +
340 +
341 +sub main
342 +{
343 + get_currently_available() ;
344 + my $downloads = download_newly_available() ;
345 +
346 + print_successful_downloads( $downloads ) ;
347 +}
348 +
349 +main() ;
350 +
351 +
352
353 diff --git a/mk.patch.sh b/scripts/mk.patch.sh
354 similarity index 100%
355 rename from mk.patch.sh
356 rename to scripts/mk.patch.sh
357
358 diff --git a/re.patch.sh b/scripts/re.patch.sh
359 similarity index 100%
360 rename from re.patch.sh
361 rename to scripts/re.patch.sh