diff options
author | Michael Meeks <michael.meeks@suse.com> | 2013-05-23 16:34:23 +0100 |
---|---|---|
committer | Michael Meeks <michael.meeks@suse.com> | 2013-05-24 10:24:55 +0100 |
commit | 75ed8ae7e5b1eb1e6b53515ef5c8c5e6798986b6 (patch) | |
tree | d46b74aa7e84080e36795db8145b218f0df1adcc /solenv/bin/packimages.pl | |
parent | 2ac4beabe30e52d019d23143221dbbfe06830f31 (diff) |
combine and layer links.txt files into the images*.zip archives.
Change-Id: I0dd6730ac3bef95843de25d6813678273cf09832
Diffstat (limited to 'solenv/bin/packimages.pl')
-rw-r--r-- | solenv/bin/packimages.pl | 68 |
1 files changed, 67 insertions, 1 deletions
diff --git a/solenv/bin/packimages.pl b/solenv/bin/packimages.pl index 11fc9cf6ad5a..5c84dc647a45 100644 --- a/solenv/bin/packimages.pl +++ b/solenv/bin/packimages.pl @@ -27,6 +27,8 @@ use strict; use Getopt::Long; use File::Find; use File::Basename; +require File::Temp; +use File::Temp (); use Archive::Zip qw(:ERROR_CODES :CONSTANTS); #### globals #### @@ -64,10 +66,18 @@ $do_rebuild = is_file_newer(\%image_lists_hash) if $do_rebuild == 0; my ($global_hash_ref, $module_hash_ref, $custom_hash_ref) = iterate_image_lists($image_lists_ref); # custom_hash filled from filesystem lookup find_custom($custom_hash_ref); + +# build a consolidated set of links +my %links; +read_links(\%links, $global_path); +for my $path (@custom_path) { + read_links(\%links, $path); +} + my $zip_hash_ref = create_zip_list($global_hash_ref, $module_hash_ref, $custom_hash_ref); $do_rebuild = is_file_newer($zip_hash_ref) if $do_rebuild == 0; if ( $do_rebuild == 1 ) { - create_zip_archive($zip_hash_ref); + create_zip_archive($zip_hash_ref, \%links); replace_file($tmp_out_file, $out_file); print_message("packing $out_file finished.") if $verbose; } else { @@ -333,10 +343,19 @@ sub optimize_zip_layout($) sub create_zip_archive { my $zip_hash_ref = shift; + my $links_hash_ref = shift; print_message("creating image archive ...") if $verbose; my $zip = Archive::Zip->new(); + if (keys %{$links_hash_ref}) { + my $linktmp = write_links($links_hash_ref); + my $member = $zip->addFile($linktmp->filename, "links.txt", COMPRESSION_DEFLATED); + if (!$member) { + print_error("failed to add links file: $!", 5); + } + } + # FIXME: test - $member = addfile ... $member->desiredCompressionMethod( COMPRESSION_STORED ); # any measurable performance win/loss ? foreach ( optimize_zip_layout($zip_hash_ref) ) { @@ -425,3 +444,50 @@ sub print_error } return; } + +sub read_links($$) +{ + my $links = shift; + my $path = shift; + + my $fname = "$path/links.txt"; + if (!-f "$fname") { + print STDERR "no links in $fname\n"; + return; + } + + my $fh; + open ($fh, $fname) || die "Can't open: $fname: $!"; + # Syntax of links file: + # # comment + # missing-image image-to-load-instead + while (<$fh>) { + my $line = $_; + $line =~ s/\r//g; # DOS line-feeds + $line =~ s/\#.*$//; # kill comments + $line =~ m/^\s*$/ && next; # blank lines + if ($line =~ m/^([^\s]+)\s+(.*)$/) { + my ($missing, $replace) = ($1, $2); + # enter into hash, and overwrite previous layer if necessary + $links->{$1} = $2; + } else { + die "Malformed links line: '$line'\n"; + } + } + close ($fh); +} + +# write out the links to a tmp file +sub write_links($) +{ + my $links = shift; + my $tmp = File::Temp->new( TEMPLATE => "linksXXXXXXX", + UNLINK => 0 ); + $tmp || die "can't create tmp: $!"; + for my $missing (sort keys %{$links}) { + my $line = $missing . " " . $links->{$missing} . "\n"; + print $tmp $line; + } + binmode $tmp; # force flush + return $tmp; +} |