1
0
Fork 0

Adding upstream version 2.25.15.

Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
This commit is contained in:
Daniel Baumann 2025-06-21 11:04:07 +02:00
parent 10737b110a
commit b543f2e88d
Signed by: daniel.baumann
GPG key ID: BCC918A2ABD66424
485 changed files with 191459 additions and 0 deletions

View file

@ -0,0 +1,27 @@
# dummy subclass used to store all the redirections for later use
package Devscripts::Uscan::CatchRedirections;
use parent qw(LWP::UserAgent);
my @uscan_redirections;
sub redirect_ok {
my $self = shift;
my ($request) = @_;
if ($self->SUPER::redirect_ok(@_)) {
push @uscan_redirections, $request->uri;
return 1;
}
return 0;
}
sub get_redirections {
return \@uscan_redirections;
}
sub clear_redirections {
undef @uscan_redirections;
return;
}
1;

View file

@ -0,0 +1,394 @@
=head1 NAME
Devscripts::Uscan::Config - uscan configuration object
=head1 SYNOPSIS
use Devscripts::Uscan::Config;
my $config = Devscripts::Uscan::Config->new->parse;
=head1 DESCRIPTION
Uscan configuration object. It can scan configuration files
(B</etc/devscripts.conf> and B<~/.devscripts>) and command line arguments.
=cut
package Devscripts::Uscan::Config;
use strict;
use Devscripts::Uscan::Output;
use Exporter 'import';
use Moo;
extends 'Devscripts::Config';
our $CURRENT_WATCHFILE_VERSION = 4;
use constant default_user_agent => "Debian uscan"
. ($main::uscan_version ? " $main::uscan_version" : '');
our @EXPORT = (qw($CURRENT_WATCHFILE_VERSION));
# I - ACCESSORS
# Options + default values
has bare => (is => 'rw');
has check_dirname_level => (is => 'rw');
has check_dirname_regex => (is => 'rw');
has compression => (is => 'rw');
has copyright_file => (is => 'rw');
has destdir => (is => 'rw');
has download => (is => 'rw');
has download_current_version => (is => 'rw');
has download_debversion => (is => 'rw');
has download_version => (is => 'rw');
has exclusion => (is => 'rw');
has log => (is => 'rw');
has orig => (is => 'rw');
has package => (is => 'rw');
has pasv => (is => 'rw');
has http_header => (is => 'rw', default => sub { {} });
# repack to .tar.$zsuffix if 1
has repack => (is => 'rw');
has safe => (is => 'rw');
has signature => (is => 'rw');
has symlink => (is => 'rw');
has timeout => (is => 'rw');
has user_agent => (is => 'rw');
has uversion => (is => 'rw');
has vcs_export_uncompressed => (is => 'rw');
has watchfile => (is => 'rw');
# II - Options
use constant keys => [
# 2.1 - Simple parameters that can be set in ~/.devscripts and command line
[
'check-dirname-level=s', 'DEVSCRIPTS_CHECK_DIRNAME_LEVEL',
qr/^[012]$/, 1
],
[
'check-dirname-regex=s', 'DEVSCRIPTS_CHECK_DIRNAME_REGEX',
undef, 'PACKAGE(-.+)?'
],
['dehs!', 'USCAN_DEHS_OUTPUT', sub { $dehs = $_[1]; 1 }],
[
'destdir=s',
'USCAN_DESTDIR',
sub {
if (-d $_[1]) {
$_[0]->destdir($_[1]) if (-d $_[1]);
return 1;
}
return (0,
"The directory to store downloaded files(\$destdir): $_[1]");
},
'..'
],
['exclusion!', 'USCAN_EXCLUSION', 'bool', 1],
['timeout=i', 'USCAN_TIMEOUT', qr/^\d+$/, 20],
[
'user-agent|useragent=s',
'USCAN_USER_AGENT',
qr/\w/,
sub {
default_user_agent;
}
],
['repack', 'USCAN_REPACK', 'bool'],
# 2.2 - Simple command line args
['bare', undef, 'bool', 0],
['compression=s'],
['copyright-file=s'],
['download-current-version', undef, 'bool'],
['download-version=s'],
['download-debversion|dversion=s'],
['log', undef, 'bool'],
['package=s'],
['uversion|upstream-version=s'],
['vcs-export-uncompressed', 'USCAN_VCS_EXPORT_UNCOMPRESSED', 'bool'],
['watchfile=s'],
# 2.3 - More complex options
# http headers (#955268)
['http-header=s', 'USCAN_HTTP_HEADER', undef, sub { {} }],
# "download" and its aliases
[
undef,
'USCAN_DOWNLOAD',
sub {
return (1, 'Bad USCAN_DOWNLOAD value, skipping')
unless ($_[1] =~ /^(?:yes|(no))$/i);
$_[0]->download(0) if $1;
return 1;
}
],
[
'download|d+',
undef,
sub {
$_[1] =~ s/^yes$/1/i;
$_[1] =~ s/^no$/0/i;
return (0, "Wrong number of -d")
unless ($_[1] =~ /^[0123]$/);
$_[0]->download($_[1]);
return 1;
},
1
],
[
'force-download',
undef,
sub {
$_[0]->download(2);
}
],
['no-download', undef, sub { $_[0]->download(0); return 1; }],
['overwrite-download', undef, sub { $_[0]->download(3) }],
# "pasv"
[
'pasv|passive',
'USCAN_PASV',
sub {
return $_[0]->pasv('default')
unless ($_[1] =~ /^(yes|0|1|no)$/);
$_[0]->pasv({
yes => 1,
1 => 1,
no => 0,
0 => 0,
}->{$1});
return 1;
},
0
],
# "safe" and "symlink" and their aliases
['safe|report', 'USCAN_SAFE', 'bool', 0],
[
'report-status',
undef,
sub {
$_[0]->safe(1);
$verbose ||= 1;
}
],
['copy', undef, sub { $_[0]->symlink('copy') }],
['rename', undef, sub { $_[0]->symlink('rename') if ($_[1]); 1; }],
[
'symlink!',
'USCAN_SYMLINK',
sub {
$_[0]->symlink(
$_[1] =~ /^(no|0|rename)$/ ? $1
: $_[1] =~ /^(yes|1|symlink)$/ ? 'symlink'
: 'no'
);
return 1;
},
'symlink'
],
# "signature" and its aliases
['signature!', undef, 'bool', 1],
['skipsignature|skip-signature', undef, sub { $_[0]->signature(-1) }],
# "verbose" and its aliases
['debug', undef, sub { $verbose = 2 }],
['extra-debug', undef, sub { $verbose = 3 }],
['no-verbose', undef, sub { $verbose = 0; return 1; }],
[
'verbose|v+',
'USCAN_VERBOSE',
sub {
$verbose = ($_[1] =~ /^yes$/i ? 1 : $_[1] =~ /^(\d)$/ ? $1 : 0);
return 1;
}
],
# Display version
[
'version',
undef,
sub {
if ($_[1]) { $_[0]->version; exit 0 }
}
]];
use constant rules => [
sub {
my $self = shift;
if ($self->package) {
$self->download(0)
unless ($self->download > 1); # compatibility
return (0,
"The --package option requires to set the --watchfile option, too."
) unless defined $self->watchfile;
}
$self->download(0) if ($self->safe == 1 and $self->download == 1);
return 1;
},
# $signature: -1 = no downloading signature and no verifying signature,
# 0 = no downloading signature but verifying signature,
# 1 = downloading signature and verifying signature
sub {
my $self = shift;
$self->signature(-1)
if $self->download == 0; # Change default 1 -> -1
return 1;
},
sub {
if (defined $_[0]->watchfile and @ARGV) {
return (0, "Can't have directory arguments if using --watchfile");
}
return 1;
},
];
# help methods
sub usage {
my ($self) = @_;
print <<"EOF";
Usage: $progname [options] [dir ...]
Process watch files in all .../debian/ subdirs of those listed (or the
current directory if none listed) to check for upstream releases.
Options:
--no-conf, --noconf
Don\'t read devscripts config files;
must be the first option given
--no-verbose Don\'t report verbose information.
--verbose, -v Report verbose information.
--debug, -vv Report verbose information including the downloaded
web pages as processed to STDERR for debugging.
--extra-debug, -vvv Report also remote content during "search" step
--dehs Send DEHS style output (XML-type) to STDOUT, while
send all other uscan output to STDERR.
--no-dehs Use only traditional uscan output format (default)
--download, -d
Download the new upstream release (default)
--force-download, -dd
Download the new upstream release, even if up-to-date
(may not overwrite the local file)
--overwrite-download, -ddd
Download the new upstream release, even if up-to-date
(may overwrite the local file)
--no-download, --nodownload
Don\'t download and report information.
Previously downloaded tarballs may be used.
Change default to --skip-signature.
--signature Download signature and verify (default)
--no-signature Don\'t download signature but verify if already downloaded.
--skip-signature
Don\'t bother download signature nor verify it.
--safe, --report
avoid running unsafe scripts by skipping both the repacking
of the downloaded package and the updating of the new
source tree. Change default to --no-download and
--skip-signature.
--report-status (= --safe --verbose)
--download-version VERSION
Specify the version which the upstream release must
match in order to be considered, rather than using the
release with the highest version
--download-debversion VERSION
Specify the Debian package version to download the
corresponding upstream release version. The
dversionmangle and uversionmangle rules are
considered.
--download-current-version
Download the currently packaged version
--check-dirname-level N
Check parent directory name?
N=0 never check parent directory name
N=1 only when $progname changes directory (default)
N=2 always check parent directory name
--check-dirname-regex REGEX
What constitutes a matching directory name; REGEX is
a Perl regular expression; the string \`PACKAGE\' will
be replaced by the package name; see manpage for details
(default: 'PACKAGE(-.+)?')
--destdir Path of directory to which to download.
--package PACKAGE
Specify the package name rather than examining
debian/changelog; must use --upstream-version and
--watchfile with this option, no directory traversing
will be performed, no actions (even downloading) will be
carried out
--upstream-version VERSION
Specify the current upstream version in use rather than
parsing debian/changelog to determine this
--watchfile FILE
Specify the watch file rather than using debian/watch;
no directory traversing will be done in this case
--bare Disable all site specific special case codes to perform URL
redirections and page content alterations.
--no-exclusion Disable automatic exclusion of files mentioned in
debian/copyright field Files-Excluded and Files-Excluded-*
--pasv Use PASV mode for FTP connections
--no-pasv Don\'t use PASV mode for FTP connections (default)
--no-symlink Don\'t rename nor repack upstream tarball
--timeout N Specifies how much time, in seconds, we give remote
servers to respond (default 20 seconds)
--user-agent, --useragent
Override the default user agent string
--log Record md5sum changes of repackaging
--help Show this message
--version Show version information
Options passed on to mk-origtargz:
--symlink Create a correctly named symlink to downloaded file (default)
--rename Rename instead of symlinking
--copy Copy instead of symlinking
--repack Repack downloaded archives to change compression
--compression [ gzip | bzip2 | lzma | xz ]
When the upstream sources are repacked, use compression COMP
for the resulting tarball (default: gzip)
--copyright-file FILE
Remove files matching the patterns found in FILE
Default settings modified by devscripts configuration files:
$self->{modified_conf_msg}
EOF
}
sub version {
print <<"EOF";
This is $progname, from the Debian devscripts package, version $main::uscan_version
This code is copyright 1999-2006 by Julian Gilbey and 2018 by Xavier Guimard,
all rights reserved.
Original code by Christoph Lameter.
This program comes with ABSOLUTELY NO WARRANTY.
You are free to redistribute this code under the terms of the
GNU General Public License, version 2 or later.
EOF
}
1;
__END__
=head1 SEE ALSO
L<uscan>, L<Devscripts::Config>
=head1 AUTHOR
B<uscan> was originally written by Christoph Lameter
E<lt>clameter@debian.orgE<gt> (I believe), modified by Julian Gilbey
E<lt>jdg@debian.orgE<gt>. HTTP support was added by Piotr Roszatycki
E<lt>dexter@debian.orgE<gt>. B<uscan> was rewritten in Perl by Julian Gilbey.
Xavier Guimard E<lt>yadd@debian.orgE<gt> rewrote uscan in object
oriented Perl.
=head1 COPYRIGHT AND LICENSE
Copyright 2002-2006 by Julian Gilbey <jdg@debian.org>,
2018 by Xavier Guimard <yadd@debian.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
=cut

View file

@ -0,0 +1,36 @@
package Devscripts::Uscan::Ctype::nodejs;
use strict;
use Moo;
use JSON;
use Devscripts::Uscan::Output;
has dir => (is => 'ro');
has pkg => (
is => 'rw',
lazy => 1,
default => sub {
$_[0]->{dir} . '/package.json';
});
sub version {
my ($self) = @_;
return unless $self->dir and -d $self->dir;
unless (-r $self->pkg) {
uscan_warn "Unable to read $self->{pkg}, skipping current version";
return;
}
my ($version, $content);
{
local $/ = undef;
open my $f, $self->pkg;
$content = <$f>;
close $f;
}
eval { $version = decode_json($content)->{version}; };
uscan_warn $@ if $@;
return $version;
}
1;

View file

@ -0,0 +1,36 @@
package Devscripts::Uscan::Ctype::perl;
use strict;
use Moo;
use JSON;
use Devscripts::Uscan::Output;
has dir => (is => 'ro');
has pkg => (
is => 'rw',
lazy => 1,
default => sub {
$_[0]->{dir} . '/META.json';
});
sub version {
my ($self) = @_;
return unless $self->dir and -d $self->dir;
unless (-r $self->pkg) {
uscan_warn "Unable to read $self->{pkg}, skipping current version";
return;
}
my ($version, $content);
{
local $/ = undef;
open my $f, $self->pkg;
$content = <$f>;
close $f;
}
eval { $version = decode_json($content)->{version}; };
uscan_warn $@ if $@;
return $version;
}
1;

View file

@ -0,0 +1,346 @@
package Devscripts::Uscan::Downloader;
use strict;
use Cwd qw/cwd abs_path/;
use Devscripts::Uscan::CatchRedirections;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
use Dpkg::IPC;
use File::DirList;
use File::Find;
use File::Temp qw/tempdir/;
use File::Touch;
use Moo;
use URI;
our $haveSSL;
has git_upstream => (is => 'rw');
BEGIN {
eval { require LWP::UserAgent; };
if ($@) {
my $progname = basename($0);
if ($@ =~ /^Can\'t locate LWP\/UserAgent\.pm/) {
die "$progname: you must have the libwww-perl package installed\n"
. "to use this script";
} else {
die "$progname: problem loading the LWP::UserAgent module:\n $@\n"
. "Have you installed the libwww-perl package?";
}
}
eval { require LWP::Protocol::https; };
$haveSSL = $@ ? 0 : 1;
}
has agent =>
(is => 'rw', default => sub { "Debian uscan $main::uscan_version" });
has timeout => (is => 'rw');
has pasv => (
is => 'rw',
default => 'default',
trigger => sub {
my ($self, $nv) = @_;
if ($nv) {
uscan_verbose "Set passive mode: $self->{pasv}";
$ENV{'FTP_PASSIVE'} = $self->pasv;
} elsif ($ENV{'FTP_PASSIVE'}) {
uscan_verbose "Unset passive mode";
delete $ENV{'FTP_PASSIVE'};
}
});
has destdir => (is => 'rw');
# 0: no repo, 1: shallow clone, 2: full clone
has gitrepo_state => (
is => 'rw',
default => sub { 0 });
has git_export_all => (
is => 'rw',
default => sub { 0 });
has user_agent => (
is => 'rw',
lazy => 1,
default => sub {
my ($self) = @_;
my $user_agent
= Devscripts::Uscan::CatchRedirections->new(env_proxy => 1);
$user_agent->timeout($self->timeout);
$user_agent->agent($self->agent);
# Strip Referer header for Sourceforge to avoid SF sending back a
# "200 OK" with a <meta refresh=...> redirect
$user_agent->add_handler(
'request_prepare' => sub {
my ($request, $ua, $h) = @_;
$request->remove_header('Referer');
},
m_hostname => 'sourceforge.net',
);
$self->{user_agent} = $user_agent;
});
has ssl => (is => 'rw', default => sub { $haveSSL });
has headers => (
is => 'ro',
default => sub { {} });
sub download ($$$$$$$$) {
my (
$self, $url, $fname, $optref, $base,
$pkg_dir, $pkg, $mode, $gitrepo_dir
) = @_;
my ($request, $response);
$mode ||= $optref->mode;
if ($mode eq 'http') {
if ($url =~ /^https/ and !$self->ssl) {
uscan_die "$progname: you must have the "
. "liblwp-protocol-https-perl package installed\n"
. "to use https URLs";
}
# substitute HTML entities
# Is anything else than "&amp;" required? I doubt it.
uscan_verbose "Requesting URL:\n $url";
my $headers = HTTP::Headers->new;
$headers->header('Accept' => '*/*');
$headers->header('Referer' => $base);
my $uri_o = URI->new($url);
foreach my $k (keys %{ $self->headers }) {
if ($k =~ /^(.*?)@(.*)$/) {
my $baseUrl = $1;
my $hdr = $2;
if ($url =~ m#^\Q$baseUrl\E(?:/.*)?$#) {
$headers->header($hdr => $self->headers->{$k});
uscan_verbose "Set per-host custom header $hdr for $url";
} else {
uscan_debug "$url does not start with $1";
}
} else {
uscan_warn "Malformed http-header: $k";
}
}
$request = HTTP::Request->new('GET', $url, $headers);
$response = $self->user_agent->request($request, $fname);
if (!$response->is_success) {
uscan_warn((defined $pkg_dir ? "In directory $pkg_dir, d" : "D")
. "ownloading\n $url failed: "
. $response->status_line);
return 0;
}
} elsif ($mode eq 'ftp') {
uscan_verbose "Requesting URL:\n $url";
$request = HTTP::Request->new('GET', "$url");
$response = $self->user_agent->request($request, $fname);
if (!$response->is_success) {
uscan_warn(
(defined $pkg_dir ? "In directory $pkg_dir, d" : "D")
. "ownloading\n $url failed: "
. $response->status_line);
return 0;
}
} else { # elsif ($$optref{'mode'} eq 'git')
my $destdir = $self->destdir;
my $curdir = cwd();
$fname =~ m%(.*)/$pkg-([^_/]*)\.tar(?:\.(gz|xz|bz2|lzma|zstd?))?%;
my $dst = $1;
my $abs_dst = abs_path($dst);
my $ver = $2;
my $suffix = $3;
my ($gitrepo, $gitref) = split /[[:space:]]+/, $url, 2;
my $clean = sub {
uscan_exec_no_fail('rm', '-fr', $gitrepo_dir);
};
my $clean_and_die = sub {
$clean->();
uscan_die @_;
};
if ($mode eq 'svn') {
my $tempdir = tempdir(CLEANUP => 1);
my $old_umask = umask(oct('022'));
uscan_exec('svn', 'export', $url, "$tempdir/$pkg-$ver");
umask($old_umask);
find({
wanted => sub {
return if !-d $File::Find::name;
my ($newest) = grep { $_ ne '.' && $_ ne '..' }
map { $_->[13] } @{ File::DirList::list($_, 'M') };
return if !$newest;
my $touch
= File::Touch->new(reference => $_ . '/' . $newest);
$touch->touch($_);
},
bydepth => 1,
no_chdir => 1,
},
"$tempdir/$pkg-$ver"
);
uscan_exec(
'tar', '-C',
$tempdir, '--sort=name',
'--owner=root', '--group=root',
'-cvf', "$abs_dst/$pkg-$ver.tar",
"$pkg-$ver"
);
} elsif ($self->git_upstream) {
my ($infodir, $attr_file, $attr_bkp);
if ($self->git_export_all) {
# override any export-subst and export-ignore attributes
spawn(
exec => [qw|git rev-parse --git-path info/|],
to_string => \$infodir,
);
chomp $infodir;
mkdir $infodir unless -e $infodir;
spawn(
exec => [qw|git rev-parse --git-path info/attributes|],
to_string => \$attr_file,
);
chomp $attr_file;
spawn(
exec =>
[qw|git rev-parse --git-path info/attributes-uscan|],
to_string => \$attr_bkp,
);
chomp $attr_bkp;
rename $attr_file, $attr_bkp if -e $attr_file;
my $attr_fh;
unless (open($attr_fh, '>', $attr_file)) {
rename $attr_bkp, $attr_file if -e $attr_bkp;
uscan_die("could not open $attr_file for writing");
}
print $attr_fh "* -export-subst\n* -export-ignore\n";
close $attr_fh;
}
uscan_exec_no_fail('git', 'archive', '--format=tar',
"--prefix=$pkg-$ver/", "--output=$abs_dst/$pkg-$ver.tar",
$gitref) == 0
or $clean_and_die->("git archive failed");
if ($self->git_export_all) {
# restore attributes
if (-e $attr_bkp) {
rename $attr_bkp, $attr_file;
} else {
unlink $attr_file;
}
}
} else {
if ($self->gitrepo_state == 0) {
my @opts = ();
if ($optref->git->{modules}) {
foreach my $m (@{ $optref->git->{modules} }) {
push(@opts, "--recurse-submodules=$m");
}
} else {
push(@opts, '--bare');
}
$self->gitrepo_state(2);
if ($optref->git->{mode} eq 'shallow') {
my $tag = $gitref;
$tag =~ s#^refs/(?:tags|heads)/##;
if ($optref->git->{modules}) {
push(@opts, '--shallow-submodules');
}
push(@opts, '--depth=1', '-b', $tag);
$self->gitrepo_state(1);
}
uscan_exec('git', 'clone', @opts, $base,
"$destdir/$gitrepo_dir");
}
chdir "$destdir/$gitrepo_dir"
or
$clean_and_die->("Unable to chdir($destdir/$gitrepo_dir): $!");
if ($self->git_export_all) {
my (@info_dirs, @attr_files);
my @arr_refs = (\@info_dirs, \@attr_files);
my @gitpaths = ("info/", "info/attributes");
for (my $tmp, my $i = 0 ; $i < @gitpaths ; $i++) {
my @cmd
= ("git", "rev-parse", "--git-path", ${ gitpaths [$i] });
spawn(
exec => [@cmd],
to_string => \$tmp,
);
chomp $tmp;
push(@{ $arr_refs[$i] }, split(/\n/, $tmp));
if ($optref->git->{modules}) {
spawn(
exec =>
['git', 'submodule', '--quiet', 'foreach', @cmd],
to_string => \$tmp,
);
chomp $tmp;
push(@{ $arr_refs[$i] }, split(/\n/, $tmp));
}
}
foreach my $infodir (@info_dirs) {
mkdir $infodir unless -e $infodir;
}
# override any export-subst and export-ignore attributes
foreach my $attr_file (@attr_files) {
my $attr_fh;
open($attr_fh, '>', $attr_file);
print $attr_fh "* -export-subst\n* -export-ignore\n";
close $attr_fh;
}
}
# archive main repository
uscan_exec_no_fail('git', 'archive', '--format=tar',
"--prefix=$pkg-$ver/",
"--output=$abs_dst/$pkg-$ver.tar", $gitref) == 0
or $clean_and_die->("$gitrepo_dir", "git archive failed");
# archive submodules, append to main tarball, clean up
if ($optref->git->{modules}) {
my $cmd = join ' ',
"git archive --format=tar --prefix=$pkg-$ver/\$sm_path/",
"--output=$abs_dst/\$sha1.tar HEAD",
"&& tar -Af $abs_dst/$pkg-$ver.tar $abs_dst/\$sha1.tar",
"&& rm $abs_dst/\$sha1.tar";
uscan_exec_no_fail('git', 'submodule', '--quiet', 'foreach',
$cmd) == 0
or $clean_and_die->("git archive (submodules) failed");
}
chdir "$curdir"
or $clean_and_die->("Unable to chdir($curdir): $!");
}
if (defined($suffix)) {
chdir "$abs_dst"
or $clean_and_die->("Unable to chdir($abs_dst): $!");
if ($suffix eq 'gz') {
uscan_exec("gzip", "-n", "-9", "$pkg-$ver.tar");
} elsif ($suffix eq 'xz') {
uscan_exec("xz", "$pkg-$ver.tar");
} elsif ($suffix eq 'bz2') {
uscan_exec("bzip2", "$pkg-$ver.tar");
} elsif ($suffix eq 'lzma') {
uscan_exec("lzma", "$pkg-$ver.tar");
#} elsif ($suffix =~ /^zstd?$/) {
# uscan_exec("zstd", "$pkg-$ver.tar");
} else {
$clean_and_die->("Unknown suffix file to repack: $suffix");
}
chdir "$curdir"
or $clean_and_die->("Unable to chdir($curdir): $!");
}
$clean->();
}
return 1;
}
1;

View file

@ -0,0 +1,257 @@
=head1 NAME
Devscripts::Uscan::FindFiles - watchfile finder
=head1 SYNOPSIS
use Devscripts::Uscan::Config;
use Devscripts::Uscan::FindFiles;
# Get config
my $config = Devscripts::Uscan::Config->new->parse;
# Search watchfiles
my @wf = find_watch_files($config);
=head1 DESCRIPTION
This package exports B<find_watch_files()> function. This function search
Debian watchfiles following configuration parameters.
=head1 SEE ALSO
L<uscan>, L<Devscripts::Uscan::WatchFile>, L<Devscripts::Uscan::Config>
=head1 AUTHOR
B<uscan> was originally written by Christoph Lameter
E<lt>clameter@debian.orgE<gt> (I believe), modified by Julian Gilbey
E<lt>jdg@debian.orgE<gt>. HTTP support was added by Piotr Roszatycki
E<lt>dexter@debian.orgE<gt>. B<uscan> was rewritten in Perl by Julian Gilbey.
Xavier Guimard E<lt>yadd@debian.orgE<gt> rewrote uscan in object
oriented Perl.
=head1 COPYRIGHT AND LICENSE
Copyright 2002-2006 by Julian Gilbey <jdg@debian.org>,
2018 by Xavier Guimard <yadd@debian.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
=cut
package Devscripts::Uscan::FindFiles;
use strict;
use filetest 'access';
use Cwd qw/cwd/;
use Exporter 'import';
use Devscripts::Uscan::Output;
use Devscripts::Versort;
use Dpkg::Changelog::Parse qw(changelog_parse);
use File::Basename;
our @EXPORT = ('find_watch_files');
sub find_watch_files {
my ($config) = @_;
my $opwd = cwd();
# when --watchfile is used
if (defined $config->watchfile) {
uscan_verbose "Option --watchfile=$config->{watchfile} used";
my ($config) = (@_);
# no directory traversing then, and things are very simple
if (defined $config->package) {
# no need to even look for a changelog!
return (
['.', $config->package, $config->uversion, $config->watchfile]
);
} else {
# Check for debian/changelog file
until (-r 'debian/changelog') {
chdir '..' or uscan_die "can't chdir ..: $!";
if (cwd() eq '/') {
uscan_die "Are you in the source code tree?\n"
. " Cannot find readable debian/changelog anywhere!";
}
}
my ($package, $debversion, $uversion)
= scan_changelog($config, $opwd, 1);
return ([cwd(), $package, $uversion, $config->watchfile]);
}
}
# when --watchfile is not used, scan watch files
push @ARGV, '.' if !@ARGV;
{
local $, = ',';
uscan_verbose "Scan watch files in @ARGV";
}
# Run find to find the directories. We will handle filenames with spaces
# correctly, which makes this code a little messier than it would be
# otherwise.
my @dirs;
open FIND, '-|', 'find', '-L', @ARGV,
qw{-type d ( -name .git -prune -o -name debian -print ) }
or uscan_die "Couldn't exec find: $!";
while (<FIND>) {
chomp;
push @dirs, $_;
uscan_debug "Found $_";
}
close FIND;
uscan_die "No debian directories found" unless @dirs;
my @debdirs = ();
my $origdir = cwd;
for my $dir (@dirs) {
$dir =~ s%/debian$%%;
unless (chdir $origdir) {
uscan_warn "Couldn't chdir back to $origdir, skipping: $!";
next;
}
unless (chdir $dir) {
uscan_warn "Couldn't chdir $dir, skipping: $!";
next;
}
uscan_verbose "Check debian/watch and debian/changelog in $dir";
# Check for debian/watch file
if (-r 'debian/watch') {
unless (-r 'debian/changelog') {
uscan_warn
"Problems reading debian/changelog in $dir, skipping";
next;
}
my ($package, $debversion, $uversion)
= scan_changelog($config, $opwd);
next unless ($package);
uscan_verbose
"package=\"$package\" version=\"$uversion\" (no epoch/revision)";
push @debdirs, [$debversion, $dir, $package, $uversion];
}
}
uscan_warn "No watch file found" unless @debdirs;
# Was there a --upstream-version option?
if (defined $config->uversion) {
if (@debdirs == 1) {
$debdirs[0][3] = $config->uversion;
} else {
uscan_warn
"ignoring --upstream-version as more than one debian/watch file found";
}
}
# Now sort the list of directories, so that we process the most recent
# directories first, as determined by the package version numbers
@debdirs = Devscripts::Versort::deb_versort(@debdirs);
# Now process the watch files in order. If a directory d has
# subdirectories d/sd1/debian and d/sd2/debian, which each contain watch
# files corresponding to the same package, then we only process the watch
# file in the package with the latest version number.
my %donepkgs;
my @results;
for my $debdir (@debdirs) {
shift @$debdir; # don't need the Debian version number any longer
my $dir = $$debdir[0];
my $parentdir = dirname($dir);
my $package = $$debdir[1];
my $version = $$debdir[2];
if (exists $donepkgs{$parentdir}{$package}) {
uscan_warn
"Skipping $dir/debian/watch\n as this package has already been found";
next;
}
unless (chdir $origdir) {
uscan_warn "Couldn't chdir back to $origdir, skipping: $!";
next;
}
unless (chdir $dir) {
uscan_warn "Couldn't chdir $dir, skipping: $!";
next;
}
uscan_verbose
"$dir/debian/changelog sets package=\"$package\" version=\"$version\"";
push @results, [$dir, $package, $version, "debian/watch", cwd];
}
unless (chdir $origdir) {
uscan_die "Couldn't chdir back to $origdir! $!";
}
return @results;
}
sub scan_changelog {
my ($config, $opwd, $die) = @_;
my $out
= $die
? sub { uscan_die(@_) }
: sub { uscan_warn($_[0] . ', skipping'); return undef; };
# Figure out package info we need
my $changelog = eval { changelog_parse(); };
if ($@) {
return $out->("Problems parsing debian/changelog");
}
my ($package, $debversion, $uversion);
$package = $changelog->{Source};
return $out->("Problem determining the package name from debian/changelog")
unless defined $package;
$debversion = $changelog->{Version};
return $out->("Problem determining the version from debian/changelog")
unless defined $debversion;
uscan_verbose
"package=\"$package\" version=\"$debversion\" (as seen in debian/changelog)";
# Check the directory is properly named for safety
if ($config->check_dirname_level == 2
or ($config->check_dirname_level == 1 and cwd() ne $opwd)) {
my $good_dirname;
my $re = $config->check_dirname_regex;
$re =~ s/PACKAGE/\Q$package\E/g;
if ($re =~ m%/%) {
$good_dirname = (cwd() =~ m%^$re$%);
} else {
$good_dirname = (basename(cwd()) =~ m%^$re$%);
}
return $out->("The directory name "
. basename(cwd())
. " doesn't match the requirement of\n"
. " --check-dirname-level=$config->{check_dirname_level} --check-dirname-regex=$re .\n"
. " Set --check-dirname-level=0 to disable this sanity check feature."
) unless $good_dirname;
}
# Get current upstream version number
if (defined $config->uversion) {
$uversion = $config->uversion;
} else {
$uversion = $debversion;
$uversion =~ s/-[^-]+$//; # revision
$uversion =~ s/^\d+://; # epoch
}
return ($package, $debversion, $uversion);
}
1;

View file

@ -0,0 +1,317 @@
package Devscripts::Uscan::Keyring;
use strict;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
use Dpkg::IPC;
use Dpkg::Path qw/find_command/;
use File::Copy qw/copy move/;
use File::Path qw/make_path remove_tree/;
use File::Temp qw/tempfile tempdir/;
use List::Util qw/first/;
use MIME::Base64;
# _pgp_* functions are strictly for applying or removing ASCII armor.
# see https://www.rfc-editor.org/rfc/rfc9580.html#section-6 for more
# details.
# Note that these _pgp_* functions are only necessary while relying on
# gpgv, and gpgv itself does not verify multiple signatures correctly
# (see https://bugs.debian.org/1010955)
sub _pgp_unarmor_data {
my ($type, $data, $filename) = @_;
# note that we ignore an incorrect or absent checksum, following the
# guidance of
# https://www.rfc-editor.org/rfc/rfc9580.html#section-6.1-3
my $armor_regex = qr{
-----BEGIN\ PGP\ \Q$type\E-----[\r\t ]*\n
(?:[^:\n]+:\ [^\n]*[\r\t ]*\n)*
[\r\t ]*\n
([a-zA-Z0-9/+\n]+={0,2})[\r\t ]*\n
(?:=[a-zA-Z0-9/+]{4}[\r\t ]*\n)?
-----END\ PGP\ \Q$type\E-----
}xm;
my $blocks = 0;
my $binary;
while ($data =~ m/$armor_regex/g) {
$binary .= decode_base64($1);
$blocks++;
}
if ($blocks > 1) {
uscan_warn "Found multiple concatenated ASCII Armor blocks in\n"
. " $filename, which is not an interoperable construct.\n"
. " See <https://tests.sequoia-pgp.org/results.html#ASCII_Armor>.\n"
. " Please concatenate them into a single ASCII Armor block. For example:\n"
. " sq keyring merge --overwrite --output $filename \\\n"
. " $filename";
}
return $binary;
}
sub _pgp_armor_checksum {
my ($data) = @_;
# from https://www.rfc-editor.org/rfc/rfc9580.html#section-6.1.1
#
# #define CRC24_INIT 0xB704CEL
# #define CRC24_GENERATOR 0x864CFBL
# typedef unsigned long crc24;
# crc24 crc_octets(unsigned char *octets, size_t len)
# {
# crc24 crc = CRC24_INIT;
# int i;
# while (len--) {
# crc ^= (*octets++) << 16;
# for (i = 0; i < 8; i++) {
# crc <<= 1;
# if (crc & 0x1000000) {
# crc &= 0xffffff; /* Clear bit 25 to avoid overflow */
# crc ^= CRC24_GENERATOR;
# }
# }
# }
# return crc & 0xFFFFFFL;
# }
#
# the resulting three-octet-wide value then gets base64-encoded into
# four base64 ASCII characters.
my $CRC24_INIT = 0xB704CE;
my $CRC24_GENERATOR = 0x864CFB;
my @bytes = unpack 'C*', $data;
my $crc = $CRC24_INIT;
for my $b (@bytes) {
$crc ^= ($b << 16);
for (1 .. 8) {
$crc <<= 1;
if ($crc & 0x1000000) {
$crc &= 0xffffff; # Clear bit 25 to avoid overflow
$crc ^= $CRC24_GENERATOR;
}
}
}
my $sum
= pack('CCC', (($crc >> 16) & 0xff, ($crc >> 8) & 0xff, $crc & 0xff));
return encode_base64($sum, q{});
}
sub _pgp_armor_data {
my ($type, $data) = @_;
my $out = encode_base64($data, q{}) =~ s/(.{1,64})/$1\n/gr;
chomp $out;
my $crc = _pgp_armor_checksum($data);
my $armor = <<~"ARMOR";
-----BEGIN PGP $type-----
$out
=$crc
-----END PGP $type-----
ARMOR
return $armor;
}
sub new {
my ($class) = @_;
my $keyring;
my $havegpgv = first { find_command($_) } qw(gpgv);
my $havesopv = first { find_command($_) } qw(sopv);
my $havesop
= first { find_command($_) } qw(sqop rsop pgpainless-cli gosop);
uscan_die("Please install a sopv variant.")
unless (defined $havegpgv or defined $havesopv);
# upstream/signing-key.pgp and upstream-signing-key.pgp are deprecated
# but supported
if (-r "debian/upstream/signing-key.asc") {
$keyring = "debian/upstream/signing-key.asc";
} else {
my $binkeyring = first { -r $_ } qw(
debian/upstream/signing-key.pgp
debian/upstream-signing-key.pgp
);
if (defined $binkeyring) {
make_path('debian/upstream', { mode => 0700, verbose => 'true' });
# convert to the policy complying armored key
uscan_verbose(
"Found upstream binary signing keyring: $binkeyring");
# Need to convert to an armored key
$keyring = "debian/upstream/signing-key.asc";
uscan_warn "Found deprecated binary keyring ($binkeyring). "
. "Please save it in armored format in $keyring. For example:\n"
. " sop armor < $binkeyring > $keyring";
if ($havesop) {
spawn(
exec => [$havesop, 'armor'],
from_file => $binkeyring,
to_file => $keyring,
wait_child => 1,
);
} else {
open my $inkeyring, '<', $binkeyring
or uscan_warn(
"Can't open $binkeyring to read deprecated binary keyring"
);
read $inkeyring, my $keycontent, -s $inkeyring;
close $inkeyring;
open my $outkeyring, '>', $keyring
or uscan_warn(
"Can't open $keyring for writing ASCII-armored keyring");
my $outkey = _pgp_armor_data('PUBLIC KEY BLOCK', $keycontent);
print $outkeyring $outkey
or
uscan_warn("Can't write ASCII-armored keyring to $keyring");
close $outkeyring or uscan_warn("Failed to close $keyring");
}
uscan_warn("Generated upstream signing keyring: $keyring");
move $binkeyring, "$binkeyring.backup";
uscan_verbose(
"Renamed upstream binary signing keyring: $binkeyring.backup");
}
}
# Need to convert an armored key to binary for use by gpgv
if (defined $keyring) {
uscan_verbose("Found upstream signing keyring: $keyring");
if ($keyring =~ m/\.asc$/ && !defined $havesopv)
{ # binary keyring is only necessary for gpgv:
my $pgpworkdir = tempdir(CLEANUP => 1);
my $newkeyring = "$pgpworkdir/upstream-signing-key.pgp";
open my $inkeyring, '<', $keyring
or uscan_die("Can't open keyring file $keyring");
read $inkeyring, my $keycontent, -s $inkeyring;
close $inkeyring;
my $binkey
= _pgp_unarmor_data('PUBLIC KEY BLOCK', $keycontent, $keyring);
if ($binkey) {
open my $outkeyring, '>:raw', $newkeyring
or uscan_die("Can't write to temporary keyring $newkeyring");
print $outkeyring $binkey
or uscan_die("Can't write $newkeyring");
close $outkeyring or uscan_die("Can't close $newkeyring");
$keyring = $newkeyring;
} else {
uscan_die("Failed to dearmor key(s) from $keyring");
}
}
}
# Return undef if not key found
else {
return undef;
}
my $self = bless {
keyring => $keyring,
gpgv => $havegpgv,
sopv => $havesopv,
}, $class;
return $self;
}
sub verify {
my ($self, $sigfile, $newfile) = @_;
uscan_verbose(
"Verifying OpenPGP self signature of $newfile and extract $sigfile");
if ($self->{sopv}) {
spawn(
exec => [$self->{sopv}, 'inline-verify', $self->{keyring}],
from_file => $newfile,
to_file => $sigfile,
wait_child => 1
) or uscan_die("OpenPGP signature did not verify.");
} else {
unless (
uscan_exec_no_fail(
$self->{gpgv},
'--homedir' => '/dev/null',
'--keyring' => $self->{keyring},
'-o' => "$sigfile",
"$newfile"
) >> 8 == 0
) {
uscan_die("OpenPGP signature did not verify.");
}
}
}
sub verifyv {
my ($self, $sigfile, $base) = @_;
uscan_verbose("Verifying OpenPGP signature $sigfile for $base");
if ($self->{sopv}) {
spawn(
exec => [$self->{sopv}, 'verify', $sigfile, $self->{keyring}],
from_file => $base,
wait_child => 1
) or uscan_die("OpenPGP signature did not verify.");
} else {
unless (
uscan_exec_no_fail(
$self->{gpgv},
'--homedir' => '/dev/null',
'--keyring' => $self->{keyring},
$sigfile, $base
) >> 8 == 0
) {
uscan_die("OpenPGP signature did not verify.");
}
}
}
sub verify_git {
my ($self, $gitdir, $tag, $git_upstream) = @_;
my $commit;
my @dir = $git_upstream ? () : ('--git-dir', $gitdir);
spawn(
exec => ['git', @dir, 'show-ref', $tag],
to_string => \$commit
);
uscan_die "git tag not found" unless ($commit);
$commit =~ s/\s.*$//;
chomp $commit;
my $file;
spawn(
exec => ['git', @dir, 'cat-file', '-p', $commit],
to_string => \$file
);
my $dir;
spawn(exec => ['mktemp', '-d'], to_string => \$dir);
chomp $dir;
unless ($file =~ /^(.*?\n)(\-+\s*BEGIN PGP SIGNATURE\s*\-+.*)$/s) {
uscan_die "Tag $tag is not signed";
}
open F, ">$dir/txt" or die $!;
open S, ">$dir/sig" or die $!;
print F $1;
print S $2;
close F;
close S;
if ($self->{sopv}) {
spawn(
exec => [$self->{sopv}, 'verify', "$dir/sig", $self->{keyring}],
from_file => "$dir/txt",
wait_child => 1
) or uscan_die("OpenPGP signature did not verify");
} else {
unless (
uscan_exec_no_fail(
$self->{gpgv},
'--homedir' => '/dev/null',
'--keyring' => $self->{keyring},
"$dir/sig", "$dir/txt"
) >> 8 == 0
) {
uscan_die("OpenPGP signature did not verify.");
}
}
remove_tree($dir);
}
1;

View file

@ -0,0 +1,129 @@
package Devscripts::Uscan::Output;
use strict;
use Devscripts::Output;
use Exporter 'import';
use File::Basename;
our @EXPORT = (
@Devscripts::Output::EXPORT, qw(
uscan_msg uscan_verbose dehs_verbose uscan_warn uscan_debug uscan_msg_raw
uscan_extra_debug uscan_die dehs_output $dehs $verbose $dehs_tags
$dehs_start_output $dehs_end_output $found
));
# ACCESSORS
our ($dehs, $dehs_tags, $dehs_start_output, $dehs_end_output, $found)
= (0, {}, 0, 0);
our $progname = basename($0);
sub printwarn_raw {
my ($msg, $w) = @_;
if ($w or $dehs) {
print STDERR "$msg";
} else {
print "$msg";
}
}
sub printwarn {
my ($msg, $w) = @_;
chomp $msg;
printwarn_raw("$msg\n", $w);
}
sub uscan_msg_raw {
printwarn_raw($_[0]);
}
sub uscan_msg {
printwarn($_[0]);
}
sub uscan_verbose {
ds_verbose($_[0], $dehs);
}
sub uscan_debug {
ds_debug($_[0], $dehs);
}
sub uscan_extra_debug {
ds_extra_debug($_[0], $dehs);
}
sub dehs_verbose ($) {
my $msg = $_[0];
push @{ $dehs_tags->{'messages'} }, "$msg\n";
uscan_verbose($msg);
}
sub uscan_warn ($) {
my $msg = $_[0];
push @{ $dehs_tags->{'warnings'} }, $msg if $dehs;
printwarn("$progname warn: $msg" . &Devscripts::Output::who_called, 1);
}
sub uscan_die ($) {
my $msg = $_[0];
if ($dehs) {
$dehs_tags = { 'errors' => "$msg" };
$dehs_end_output = 1;
dehs_output();
}
$msg = "$progname die: $msg" . &Devscripts::Output::who_called;
if ($Devscripts::Output::die_on_error) {
die $msg;
}
printwarn($msg, 1);
}
sub dehs_output () {
return unless $dehs;
if (!$dehs_start_output) {
print "<dehs>\n";
$dehs_start_output = 1;
}
for my $tag (
qw(package debian-uversion debian-mangled-uversion
upstream-version upstream-url decoded-checksum
status target target-path messages warnings errors)
) {
if (exists $dehs_tags->{$tag}) {
if (ref $dehs_tags->{$tag} eq "ARRAY") {
foreach my $entry (@{ $dehs_tags->{$tag} }) {
$entry =~ s/</&lt;/g;
$entry =~ s/>/&gt;/g;
$entry =~ s/&/&amp;/g;
print "<$tag>$entry</$tag>\n";
}
} else {
$dehs_tags->{$tag} =~ s/</&lt;/g;
$dehs_tags->{$tag} =~ s/>/&gt;/g;
$dehs_tags->{$tag} =~ s/&/&amp;/g;
print "<$tag>$dehs_tags->{$tag}</$tag>\n";
}
}
}
foreach my $cmp (@{ $dehs_tags->{'component-name'} }) {
print qq'<component id="$cmp">\n';
foreach my $tag (
qw(debian-uversion debian-mangled-uversion
upstream-version upstream-url target target-path)
) {
my $v = shift @{ $dehs_tags->{"component-$tag"} };
print " <component-$tag>$v</component-$tag>\n" if $v;
}
print "</component>\n";
}
if ($dehs_end_output) {
print "</dehs>\n";
}
# Don't repeat output
$dehs_tags = {};
}
1;

View file

@ -0,0 +1,475 @@
package Devscripts::Uscan::Utils;
use strict;
use Devscripts::Uscan::Output;
use Devscripts::Utils;
use Exporter 'import';
our @EXPORT = (
qw(fix_href recursive_regex_dir newest_dir get_compression
get_suffix get_priority quoted_regex_parse safe_replace mangle
uscan_exec uscan_exec_no_fail)
);
#######################################################################
# {{{ code 5: utility functions (download)
#######################################################################
sub fix_href ($) {
my ($href) = @_;
# Remove newline (code moved from outside fix_href)
$href =~ s/\n//g;
# Remove whitespace from URLs:
# https://www.w3.org/TR/html5/links.html#links-created-by-a-and-area-elements
$href =~ s/^\s+//;
$href =~ s/\s+$//;
return $href;
}
sub recursive_regex_dir ($$$$$$) {
# If return '', parent code to cause return 1
my ($line, $base, $dirversionmangle, $watchfile, $lineptr,
$download_version)
= @_;
$base =~ m%^(\w+://[^/]+)/(.*)$%;
my $site = $1;
my @dirs = ();
if (defined $2) {
@dirs = split /(\/)/, $2;
}
my $dir = '/';
foreach my $dirpattern (@dirs) {
if ($dirpattern =~ /\(.*\)/) {
uscan_verbose "dir=>$dir dirpattern=>$dirpattern";
my $newest_dir = newest_dir($line, $site, $dir, $dirpattern,
$dirversionmangle, $watchfile, $lineptr, $download_version);
uscan_verbose "newest_dir => '$newest_dir'";
if ($newest_dir ne '') {
$dir .= "$newest_dir";
} else {
uscan_debug "No \$newest_dir";
return '';
}
} else {
$dir .= "$dirpattern";
}
}
return $site . $dir;
}
# very similar to code above
sub newest_dir ($$$$$$$$) {
# return string $newdir as success
# return string '' if error, to cause grand parent code to return 1
my ($line, $site, $dir, $pattern, $dirversionmangle, $watchfile,
$lineptr, $download_version)
= @_;
my ($newdir);
uscan_verbose "Requesting URL:\n $site$dir";
if ($site =~ m%^http(s)?://%) {
require Devscripts::Uscan::http;
$newdir = Devscripts::Uscan::http::http_newdir($1, @_);
} elsif ($site =~ m%^ftp://%) {
require Devscripts::Uscan::ftp;
$newdir = Devscripts::Uscan::ftp::ftp_newdir(@_);
} else {
# Neither HTTP nor FTP site
uscan_warn "neither HTTP nor FTP site, impossible case for newdir().";
$newdir = '';
}
return $newdir;
}
#######################################################################
# }}} code 5: utility functions (download)
#######################################################################
#######################################################################
# {{{ code 6: utility functions (compression)
#######################################################################
# Get legal values for compression
sub get_compression ($) {
my $compression = $_[0];
my $canonical_compression;
# be liberal in what you accept...
my %opt2comp = (
gz => 'gzip',
gzip => 'gzip',
bz2 => 'bzip2',
bzip2 => 'bzip2',
lzma => 'lzma',
xz => 'xz',
zip => 'zip',
zst => 'zst',
zstd => 'zst',
);
# Normalize compression methods to the names used by Dpkg::Compression
if (exists $opt2comp{$compression}) {
$canonical_compression = $opt2comp{$compression};
} else {
uscan_die "$progname: invalid compression, $compression given.";
}
return $canonical_compression;
}
# Get legal values for compression suffix
sub get_suffix ($) {
my $compression = $_[0];
my $canonical_suffix;
# be liberal in what you accept...
my %opt2suffix = (
gz => 'gz',
gzip => 'gz',
bz2 => 'bz2',
bzip2 => 'bz2',
lzma => 'lzma',
xz => 'xz',
zip => 'zip',
zst => 'zst',
zstd => 'zst',
);
# Normalize compression methods to the names used by Dpkg::Compression
if (exists $opt2suffix{$compression}) {
$canonical_suffix = $opt2suffix{$compression};
} elsif ($compression eq 'default') {
require Devscripts::MkOrigtargz::Config;
return &Devscripts::MkOrigtargz::Config::default_compression;
} else {
uscan_die "$progname: invalid suffix, $compression given.";
}
return $canonical_suffix;
}
# Get compression priority
sub get_priority ($) {
my $href = $_[0];
my $priority = 0;
if ($href =~ m/\.tar\.gz/i) {
$priority = 1;
}
if ($href =~ m/\.tar\.bz2/i) {
$priority = 2;
}
if ($href =~ m/\.tar\.lzma/i) {
$priority = 3;
}
#if ($href =~ m/\.tar\.zstd?/i) {
# $priority = 4;
#}
if ($href =~ m/\.tar\.xz/i) {
$priority = 4;
}
return $priority;
}
#######################################################################
# }}} code 6: utility functions (compression)
#######################################################################
#######################################################################
# {{{ code 7: utility functions (regex)
#######################################################################
sub quoted_regex_parse($) {
my $pattern = shift;
my %closers = ('{', '}', '[', ']', '(', ')', '<', '>');
$pattern =~ /^(s|tr|y)(.)(.*)$/;
my ($sep, $rest) = ($2, $3 || '');
my $closer = $closers{$sep};
my $parsed_ok = 1;
my $regexp = '';
my $replacement = '';
my $flags = '';
my $open = 1;
my $last_was_escape = 0;
my $in_replacement = 0;
for my $char (split //, $rest) {
if ($char eq $sep and !$last_was_escape) {
$open++;
if ($open == 1) {
if ($in_replacement) {
# Separator after end of replacement
uscan_warn "Extra \"$sep\" after end of replacement.";
$parsed_ok = 0;
last;
} else {
$in_replacement = 1;
}
} else {
if ($open > 1) {
if ($in_replacement) {
$replacement .= $char;
} else {
$regexp .= $char;
}
}
}
} elsif ($char eq $closer and !$last_was_escape) {
$open--;
if ($open > 0) {
if ($in_replacement) {
$replacement .= $char;
} else {
$regexp .= $char;
}
} elsif ($open < 0) {
uscan_warn "Extra \"$closer\" after end of replacement.";
$parsed_ok = 0;
last;
}
} else {
if ($in_replacement) {
if ($open) {
$replacement .= $char;
} else {
$flags .= $char;
}
} else {
if ($open) {
$regexp .= $char;
} elsif ($char !~ m/\s/) {
uscan_warn
"Non-whitespace between <...> and <...> (or similars).";
$parsed_ok = 0;
last;
}
# skip if blanks between <...> and <...> (or similars)
}
}
# Don't treat \\ as an escape
$last_was_escape = ($char eq '\\' and !$last_was_escape);
}
unless ($in_replacement and $open == 0) {
uscan_warn "Empty replacement string.";
$parsed_ok = 0;
}
return ($parsed_ok, $regexp, $replacement, $flags);
}
sub safe_replace($$) {
my ($in, $pat) = @_;
eval "uscan_debug \"safe_replace input=\\\"\$\$in\\\"\\n\"";
$pat =~ s/^\s*(.*?)\s*$/$1/;
$pat =~ /^(s|tr|y)(.)/;
my ($op, $sep) = ($1, $2 || '');
my $esc = "\Q$sep\E";
my ($parsed_ok, $regexp, $replacement, $flags);
if ($sep eq '{' or $sep eq '(' or $sep eq '[' or $sep eq '<') {
($parsed_ok, $regexp, $replacement, $flags) = quoted_regex_parse($pat);
unless ($parsed_ok) {
uscan_warn "stop mangling: rule=\"$pat\"\n"
. " mangling rule with <...>, (...), {...} failed.";
return 0;
}
} elsif ($pat
!~ /^(?:s|tr|y)$esc((?:\\.|[^\\$esc])*)$esc((?:\\.|[^\\$esc])*)$esc([a-z]*)$/
) {
$sep = "/" if $sep eq '';
uscan_warn "stop mangling: rule=\"$pat\"\n"
. " rule doesn't match \"(s|tr|y)$sep.*$sep.*$sep\[a-z\]*\" (or similar).";
return 0;
} else {
($regexp, $replacement, $flags) = ($1, $2, $3);
}
uscan_debug
"safe_replace with regexp=\"$regexp\", replacement=\"$replacement\", and flags=\"$flags\"";
my $safeflags = $flags;
if ($op eq 'tr' or $op eq 'y') {
$safeflags =~ tr/cds//cd;
if ($safeflags ne $flags) {
uscan_warn "stop mangling: rule=\"$pat\"\n"
. " flags must consist of \"cds\" only.";
return 0;
}
$regexp =~ s/\\(.)/$1/g;
$replacement =~ s/\\(.)/$1/g;
$regexp =~ s/([^-])/'\\x' . unpack 'H*', $1/ge;
$replacement =~ s/([^-])/'\\x' . unpack 'H*', $1/ge;
eval "\$\$in =~ tr<$regexp><$replacement>$flags;";
if ($@) {
uscan_warn "stop mangling: rule=\"$pat\"\n"
. " mangling \"tr\" or \"y\" rule execution failed.";
return 0;
} else {
return 1;
}
} else {
$safeflags =~ tr/gix//cd;
if ($safeflags ne $flags) {
uscan_warn "stop mangling: rule=\"$pat\"\n"
. " flags must consist of \"gix\" only.";
return 0;
}
my $global = ($flags =~ s/g//);
$flags = "(?$flags)" if length $flags;
my $slashg;
if ($regexp =~ /(?<!\\)(\\\\)*\\G/) {
$slashg = 1;
# if it's not initial, it is too dangerous
if ($regexp =~ /^.*[^\\](\\\\)*\\G/) {
uscan_warn "stop mangling: rule=\"$pat\"\n"
. " dangerous use of \\G with regexp=\"$regexp\".";
return 0;
}
}
# Behave like Perl and treat e.g. "\." in replacement as "."
# We allow the case escape characters to remain and
# process them later
$replacement =~ s/(^|[^\\])\\([^luLUE])/$1$2/g;
# Unescape escaped separator characters
$replacement =~ s/\\\Q$sep\E/$sep/g;
# If bracketing quotes were used, also unescape the
# closing version
### {{ ### (FOOL EDITOR for non-quoted kets)
$replacement =~ s/\\\Q}\E/}/g if $sep eq '{';
$replacement =~ s/\\\Q]\E/]/g if $sep eq '[';
$replacement =~ s/\\\Q)\E/)/g if $sep eq '(';
$replacement =~ s/\\\Q>\E/>/g if $sep eq '<';
# The replacement below will modify $replacement so keep
# a copy. We'll need to restore it to the current value if
# the global flag was set on the input pattern.
my $orig_replacement = $replacement;
my ($first, $last, $pos, $zerowidth, $matched, @captures) = (0, -1, 0);
while (1) {
eval {
# handle errors due to unsafe constructs in $regexp
no re 'eval';
# restore position
pos($$in) = $pos if $pos;
if ($zerowidth) {
# previous match was a zero-width match, simulate it to set
# the internal flag that avoids the infinite loop
$$in =~ /()/g;
}
# Need to use /g to make it use and save pos()
$matched = ($$in =~ /$flags$regexp/g);
if ($matched) {
# save position and size of the match
my $oldpos = $pos;
$pos = pos($$in);
($first, $last) = ($-[0], $+[0]);
if ($slashg) {
# \G in the match, weird things can happen
$zerowidth = ($pos == $oldpos);
# For example, matching without a match
$matched = 0
if ( not defined $first
or not defined $last);
} else {
$zerowidth = ($last - $first == 0);
}
for my $i (0 .. $#-) {
$captures[$i] = substr $$in, $-[$i], $+[$i] - $-[$i];
}
}
};
if ($@) {
uscan_warn "stop mangling: rule=\"$pat\"\n"
. " mangling \"s\" rule execution failed.";
return 0;
}
# No match; leave the original string untouched but return
# success as there was nothing wrong with the pattern
return 1 unless $matched;
# Replace $X
$replacement
=~ s/[\$\\](\d)/defined $captures[$1] ? $captures[$1] : ''/ge;
$replacement
=~ s/\$\{(\d)\}/defined $captures[$1] ? $captures[$1] : ''/ge;
$replacement =~ s/\$&/$captures[0]/g;
# Make \l etc escapes work
$replacement =~ s/\\l(.)/lc $1/e;
$replacement =~ s/\\L(.*?)(\\E|\z)/lc $1/e;
$replacement =~ s/\\u(.)/uc $1/e;
$replacement =~ s/\\U(.*?)(\\E|\z)/uc $1/e;
# Actually do the replacement
substr $$in, $first, $last - $first, $replacement;
# Update position
$pos += length($replacement) - ($last - $first);
if ($global) {
$replacement = $orig_replacement;
} else {
last;
}
}
return 1;
}
}
# call this as
# if mangle($watchfile, \$line, 'uversionmangle:',
# \@{$options{'uversionmangle'}}, \$version) {
# return 1;
# }
sub mangle($$$$$) {
my ($watchfile, $lineptr, $name, $rulesptr, $verptr) = @_;
foreach my $pat (@{$rulesptr}) {
if (!safe_replace($verptr, $pat)) {
uscan_warn "In $watchfile, potentially"
. " unsafe or malformed $name"
. " pattern:\n '$pat'"
. " found. Skipping watchline\n"
. " $$lineptr";
return 1;
}
uscan_debug "After $name $$verptr";
}
return 0;
}
*uscan_exec_no_fail = \&ds_exec_no_fail;
*uscan_exec = \&ds_exec;
#######################################################################
# }}} code 7: utility functions (regex)
#######################################################################
1;

View file

@ -0,0 +1,517 @@
=head1 NAME
Devscripts::Uscan::WatchFile - watchfile object for L<uscan>
=head1 SYNOPSIS
use Devscripts::Uscan::Config;
use Devscripts::Uscan::WatchFile;
my $config = Devscripts::Uscan::Config->new({
# Uscan config parameters. Example:
destdir => '..',
});
# You can use Devscripts::Uscan::FindFiles to find watchfiles
my $wf = Devscripts::Uscan::WatchFile->new({
config => $config,
package => $package,
pkg_dir => $pkg_dir,
pkg_version => $version,
watchfile => $watchfile,
});
return $wf->status if ( $wf->status );
# Do the job
return $wf->process_lines;
=head1 DESCRIPTION
Uscan class to parse watchfiles.
=head1 METHODS
=head2 new() I<(Constructor)>
Parse watch file and creates L<Devscripts::Uscan::WatchLine> objects for
each line.
=head3 Required parameters
=over
=item config: L<Devscripts::Uscan::Config> object
=item package: Debian package name
=item pkg_dir: Working directory
=item pkg_version: Current Debian package version
=back
=head2 Main accessors
=over
=item watchlines: ref to the array that contains watchlines objects
=item watch_version: format version of the watchfile
=back
=head2 process_lines()
Method that launches Devscripts::Uscan::WatchLine::process() on each watchline.
=head1 SEE ALSO
L<uscan>, L<Devscripts::Uscan::WatchLine>, L<Devscripts::Uscan::Config>,
L<Devscripts::Uscan::FindFiles>
=head1 AUTHOR
B<uscan> was originally written by Christoph Lameter
E<lt>clameter@debian.orgE<gt> (I believe), modified by Julian Gilbey
E<lt>jdg@debian.orgE<gt>. HTTP support was added by Piotr Roszatycki
E<lt>dexter@debian.orgE<gt>. B<uscan> was rewritten in Perl by Julian Gilbey.
Xavier Guimard E<lt>yadd@debian.orgE<gt> rewrote uscan in object
oriented Perl.
=head1 COPYRIGHT AND LICENSE
Copyright 2002-2006 by Julian Gilbey <jdg@debian.org>,
2018 by Xavier Guimard <yadd@debian.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
=cut
package Devscripts::Uscan::WatchFile;
use strict;
use Devscripts::Uscan::Downloader;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::WatchLine;
use Dpkg::Version;
use File::Copy qw/copy move/;
use List::Util qw/first/;
use Moo;
use constant {
ANY_VERSION => '(?:[-_]?[Vv]?(\d[\-+\.:\~\da-zA-Z]*))',
ARCHIVE_EXT =>
'(?i)(?:\.(?:tar\.xz|tar\.bz2|tar\.gz|tar\.zstd?|zip|tgz|tbz|txz))',
DEB_EXT => '(?:[\+~](debian|dfsg|ds|deb)(\.)?(\d+)?$)',
};
use constant SIGNATURE_EXT => ARCHIVE_EXT . '(?:\.(?:asc|pgp|gpg|sig|sign))';
# Required new() parameters
has config => (is => 'rw', required => 1);
has package => (is => 'ro', required => 1); # Debian package
has pkg_dir => (is => 'ro', required => 1);
has pkg_version => (is => 'ro', required => 1);
has bare => (
is => 'rw',
lazy => 1,
default => sub { $_[0]->config->bare });
has download => (
is => 'rw',
lazy => 1,
default => sub { $_[0]->config->download });
has downloader => (
is => 'ro',
lazy => 1,
default => sub {
Devscripts::Uscan::Downloader->new({
timeout => $_[0]->config->timeout,
agent => $_[0]->config->user_agent,
pasv => $_[0]->config->pasv,
destdir => $_[0]->config->destdir,
headers => $_[0]->config->http_header,
});
},
);
has signature => (
is => 'rw',
required => 1,
lazy => 1,
default => sub { $_[0]->config->signature });
has watchfile => (is => 'ro', required => 1); # usually debian/watch
# Internal attributes
has group => (is => 'rw', default => sub { [] });
has origcount => (is => 'rw');
has origtars => (is => 'rw', default => sub { [] });
has status => (is => 'rw', default => sub { 0 });
has watch_version => (is => 'rw');
has watchlines => (is => 'rw', default => sub { [] });
# Values shared between lines
has shared => (
is => 'rw',
lazy => 1,
default => \&new_shared,
);
sub new_shared {
return {
bare => $_[0]->bare,
components => [],
common_newversion => undef,
common_mangled_newversion => undef,
download => $_[0]->download,
download_version => undef,
origcount => undef,
origtars => [],
previous_download_available => undef,
previous_newversion => undef,
previous_newfile_base => undef,
previous_sigfile_base => undef,
signature => $_[0]->signature,
uscanlog => undef,
};
}
has keyring => (
is => 'ro',
default => sub { Devscripts::Uscan::Keyring->new });
sub BUILD {
my ($self, $args) = @_;
my $watch_version = 0;
my $nextline;
$dehs_tags = {};
uscan_verbose "Process watch file at: $args->{watchfile}\n"
. " package = $args->{package}\n"
. " version = $args->{pkg_version}\n"
. " pkg_dir = $args->{pkg_dir}";
$self->origcount(0); # reset to 0 for each watch file
unless (open WATCH, $args->{watchfile}) {
uscan_warn "could not open $args->{watchfile}: $!";
return 1;
}
my $lineNumber = 0;
while (<WATCH>) {
next if /^\s*\#/;
next if /^\s*$/;
s/^\s*//;
CHOMP:
# Reassemble lines split using \
chomp;
if (s/(?<!\\)\\$//) {
if (eof(WATCH)) {
uscan_warn
"$args->{watchfile} ended with \\; skipping last line";
$self->status(1);
last;
}
if ($watch_version > 3) {
# drop leading \s only if version 4
$nextline = <WATCH>;
$nextline =~ s/^\s*//;
$_ .= $nextline;
} else {
$_ .= <WATCH>;
}
goto CHOMP;
}
# "version" must be the first field
if (!$watch_version) {
# Looking for "version" field.
if (/^version\s*=\s*(\d+)(\s|$)/) { # Found
$watch_version = $1;
# Note that version=1 watchfiles have no "version" field so
# authorizated values are >= 2 and <= CURRENT_WATCHFILE_VERSION
if ( $watch_version < 2
or $watch_version
> $Devscripts::Uscan::Config::CURRENT_WATCHFILE_VERSION) {
# "version" field found but has no authorizated value
uscan_warn
"$args->{watchfile} version number is unrecognised; skipping watch file";
last;
}
# Next line
next;
}
# version=1 is deprecated
else {
$watch_version = 1;
}
}
if ($watch_version < 3) {
uscan_warn
"$args->{watchfile} is an obsolete version $watch_version watch file;\n"
. " please upgrade to a higher version\n"
. " (see uscan(1) for details).";
}
# "version" is fixed, parsing lines now
# Are there any warnings from this part to give if we're using dehs?
dehs_output if ($dehs);
# Handle shell \\ -> \
s/\\\\/\\/g if $watch_version == 1;
# Handle @PACKAGE@ @ANY_VERSION@ @ARCHIVE_EXT@ substitutions
s/\@PACKAGE\@/$args->{package}/g;
s/\@ANY_VERSION\@/ANY_VERSION/ge;
s/\@ARCHIVE_EXT\@/ARCHIVE_EXT/ge;
s/\@SIGNATURE_EXT\@/SIGNATURE_EXT/ge;
s/\@DEB_EXT\@/DEB_EXT/ge;
my $line = Devscripts::Uscan::WatchLine->new({
# Shared between lines
config => $self->config,
downloader => $self->downloader,
shared => $self->shared,
keyring => $self->keyring,
# Other parameters
line => $_,
pkg => $self->package,
pkg_dir => $self->pkg_dir,
pkg_version => $self->pkg_version,
watch_version => $watch_version,
watchfile => $self->watchfile,
});
push @{ $self->group }, $lineNumber
if ($line->type and $line->type =~ /^(?:group|checksum)$/);
push @{ $self->watchlines }, $line;
$lineNumber++;
}
close WATCH
or $self->status(1),
uscan_warn "problems reading $$args->{watchfile}: $!";
$self->watch_version($watch_version);
}
sub process_lines {
my ($self) = shift;
return $self->process_group if (@{ $self->group });
foreach (@{ $self->watchlines }) {
# search newfile and newversion
my $res = $_->process;
$self->status($res) if ($res);
}
return $self->{status};
}
sub process_group {
my ($self) = @_;
my $saveDconfig = $self->config->download_version;
# Build version
my @cur_versions = split /\+~/, $self->pkg_version;
my $checksum = 0;
my $newChecksum = 0;
if ( $cur_versions[$#cur_versions]
and $cur_versions[$#cur_versions] =~ s/^cs//) {
$checksum = pop @cur_versions;
}
my (@new_versions, @last_debian_mangled_uversions, @last_versions);
my $download = 0;
my $last_shared = $self->shared;
my $last_comp_version;
my @dversion;
my @ck_versions;
# Isolate component and following lines
if (my $v = $self->config->download_version) {
@dversion = map { s/\+.*$//; /^cs/ ? () : $_ } split /\+~/, $v;
}
foreach my $line (@{ $self->watchlines }) {
if ( $line->type and $line->type eq 'group'
or $line->type eq 'checksum') {
$last_shared = $self->new_shared;
$last_comp_version = shift @cur_versions if $line->type eq 'group';
}
if ($line->type and $line->type eq 'group') {
$line->{groupDversion} = shift @dversion;
}
$line->shared($last_shared);
$line->pkg_version($last_comp_version || 0);
}
# Check if download is needed
foreach my $line (@{ $self->watchlines }) {
next unless ($line->type eq 'group' or $line->type eq 'checksum');
# Stop on error
$self->config->download_version($line->{groupDversion})
if $line->{groupDversion};
$self->config->download_version(undef) if $line->type eq 'checksum';
if ( $line->parse
or $line->search
or $line->get_upstream_url
or $line->get_newfile_base
or ($line->type eq 'group' and $line->cmp_versions)
or ($line->ctype and $line->cmp_versions)) {
$self->{status} += $line->status;
return $self->{status};
}
$download = $line->shared->{download}
if $line->shared->{download} > $download
and ($line->type eq 'group' or $line->ctype);
}
foreach my $line (@{ $self->watchlines }) {
next unless $line->type eq 'checksum';
$newChecksum
= $self->sum($newChecksum, $line->search_result->{newversion});
push @ck_versions, $line->search_result->{newversion};
}
foreach my $line (@{ $self->watchlines }) {
next unless ($line->type eq 'checksum');
$line->parse_result->{mangled_lastversion} = $checksum;
my $tmp = $line->search_result->{newversion};
$line->search_result->{newversion} = $newChecksum;
unless ($line->ctype) {
if ($line->cmp_versions) {
$self->{status} += $line->status;
return $self->{status};
}
$download = $line->shared->{download}
if $line->shared->{download} > $download;
}
$line->search_result->{newversion} = $tmp;
if ($line->component) {
pop @{ $dehs_tags->{'component-upstream-version'} };
push @{ $dehs_tags->{'component-upstream-version'} }, $tmp;
}
}
foreach my $line (@{ $self->watchlines }) {
# Set same $download for all
$line->shared->{download} = $download;
# Non "group" lines where not initialized
unless ($line->type eq 'group' or $line->type eq 'checksum') {
if ( $line->parse
or $line->search
or $line->get_upstream_url
or $line->get_newfile_base
or $line->cmp_versions) {
$self->{status} += $line->status;
return $self->{status};
}
}
if ($line->download_file_and_sig) {
$self->{status} += $line->status;
return $self->{status};
}
if ($line->mkorigtargz) {
$self->{status} += $line->status;
return $self->{status};
}
if ($line->type eq 'group') {
push @new_versions, $line->shared->{common_mangled_newversion}
|| $line->shared->{common_newversion}
|| ();
push @last_versions, $line->parse_result->{lastversion};
push @last_debian_mangled_uversions,
$line->parse_result->{mangled_lastversion};
}
}
my $new_version = join '+~', @new_versions;
if ($newChecksum) {
$new_version .= "+~cs$newChecksum";
}
if ($checksum) {
push @last_versions, "cs$newChecksum";
push @last_debian_mangled_uversions, "cs$checksum";
}
$dehs_tags->{'upstream-version'} = $new_version;
$dehs_tags->{'debian-uversion'} = join('+~', @last_versions)
if (grep { $_ } @last_versions);
$dehs_tags->{'debian-mangled-uversion'} = join '+~',
@last_debian_mangled_uversions
if (grep { $_ } @last_debian_mangled_uversions);
my $mangled_ver
= Dpkg::Version->new(
"1:" . $dehs_tags->{'debian-mangled-uversion'} . "-0",
check => 0);
my $upstream_ver = Dpkg::Version->new("1:$new_version-0", check => 0);
if ($mangled_ver == $upstream_ver) {
$dehs_tags->{'status'} = "up to date";
} elsif ($mangled_ver > $upstream_ver) {
$dehs_tags->{'status'} = "only older package available";
} else {
$dehs_tags->{'status'} = "newer package available";
}
foreach my $line (@{ $self->watchlines }) {
my $path = $line->destfile or next;
my $ver = $line->shared->{common_mangled_newversion};
$path =~ s/\Q$ver\E/$new_version/;
uscan_warn "rename $line->{destfile} to $path\n";
rename $line->{destfile}, $path;
if ($dehs_tags->{"target-path"} eq $line->{destfile}) {
$dehs_tags->{"target-path"} = $path;
$dehs_tags->{target} =~ s/\Q$ver\E/$new_version/;
} else {
for (
my $i = 0 ;
$i < @{ $dehs_tags->{"component-target-path"} } ;
$i++
) {
if ($dehs_tags->{"component-target-path"}->[$i] eq
$line->{destfile}) {
$dehs_tags->{"component-target-path"}->[$i] = $path;
$dehs_tags->{"component-target"}->[$i]
=~ s/\Q$ver\E/$new_version/
or die $ver;
}
}
}
if ($line->signature_available) {
rename "$line->{destfile}.asc", "$path.asc";
rename "$line->{destfile}.sig", "$path.sig";
}
}
if (@ck_versions) {
my $v = join '+~', @ck_versions;
if ($dehs) {
$dehs_tags->{'decoded-checksum'} = $v;
} else {
uscan_verbose 'Checksum ref: ' . join('+~', @ck_versions) . "\n";
}
}
return 0;
}
sub sum {
my ($self, @versions) = @_;
my (@res, @str);
foreach my $v (@versions) {
my @tmp = grep { $_ ne '.' } version_split_digits($v);
for (my $i = 0 ; $i < @tmp ; $i++) {
$str[$i] //= '';
$res[$i] //= 0;
if ($tmp[$i] =~ /^\d+$/) {
$res[$i] += $tmp[$i];
} else {
uscan_die
"Checksum supports only digits in versions, $tmp[$i] is not accepted";
}
}
}
for (my $i = 0 ; $i < @res ; $i++) {
my $tmp = shift @str;
$res[$i] .= $tmp if $tmp ne '';
}
push @res, @str;
return join '.', @res;
}
1;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,95 @@
# Common sub shared between git and svn
package Devscripts::Uscan::_vcs;
use strict;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
use Exporter 'import';
use File::Basename;
our @EXPORT = ('get_refs');
our $progname = basename($0);
sub _vcs_newfile_base {
my ($self) = @_;
# Compression may optionally be deferred to mk-origtargz
my $newfile_base = "$self->{pkg}-$self->{search_result}->{newversion}.tar";
if (!$self->config->{vcs_export_uncompressed}) {
$newfile_base .= '.' . get_suffix($self->compression);
}
return $newfile_base;
}
sub get_refs {
my ($self, $command, $ref_pattern, $package) = @_;
my @command = @$command;
my ($newfile, $newversion);
{
local $, = ' ';
uscan_verbose "Execute: @command";
}
open(REFS, "-|", @command)
|| uscan_die "$progname: you must have the $package package installed";
my @refs;
my $ref;
my $version;
while (<REFS>) {
chomp;
uscan_debug "$_";
if ($_ =~ $ref_pattern) {
$ref = $1;
foreach my $_pattern (@{ $self->patterns }) {
$version = join(".",
map { $_ if defined($_) } $ref =~ m&^$_pattern$&);
if (
mangle(
$self->watchfile, \$self->line,
'uversionmangle:', \@{ $self->uversionmangle },
\$version
)
) {
return undef;
}
push @refs, [$version, $ref];
}
}
}
if (@refs) {
@refs = Devscripts::Versort::upstream_versort(@refs);
my $msg = "Found the following matching refs:\n";
foreach my $ref (@refs) {
$msg .= " $$ref[1] ($$ref[0])\n";
}
uscan_verbose "$msg";
if ($self->shared->{download_version}
and not $self->versionmode eq 'ignore') {
# extract ones which has $version in the above loop matched with $download_version
my @vrefs
= grep { $$_[0] eq $self->shared->{download_version} } @refs;
if (@vrefs) {
($newversion, $newfile) = @{ $vrefs[0] };
} else {
uscan_warn
"$progname warning: In $self->{watchfile} no matching"
. " refs for version "
. $self->shared->{download_version}
. " in watch line\n "
. $self->{line};
return undef;
}
} else {
($newversion, $newfile) = @{ $refs[0] };
}
} else {
uscan_warn "$progname warning: In $self->{watchfile},\n"
. " no matching refs for watch line\n"
. " $self->{line}";
return undef;
}
return ($newversion, $newfile);
}
1;

View file

@ -0,0 +1,90 @@
# Common sub shared between http and ftp
package Devscripts::Uscan::_xtp;
use strict;
use File::Basename;
use Exporter 'import';
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
our @EXPORT = ('partial_version');
sub _xtp_newfile_base {
my ($self) = @_;
my $newfile_base;
if (@{ $self->filenamemangle }) {
# HTTP or FTP site (with filenamemangle)
if ($self->versionless) {
$newfile_base = $self->upstream_url;
} else {
$newfile_base = $self->search_result->{newfile};
}
my $cmp = $newfile_base;
uscan_verbose "Matching target for filenamemangle: $newfile_base";
if (
mangle(
$self->watchfile, \$self->line,
'filenamemangle:', \@{ $self->filenamemangle },
\$newfile_base
)
) {
$self->status(1);
return undef;
}
if ($newfile_base =~ m/^(?:https?|ftp):/) {
$newfile_base = basename($newfile_base);
}
if ($cmp eq $newfile_base) {
uscan_die "filenamemangle failed for $cmp";
}
unless ($self->search_result->{newversion}) {
# uversionmanglesd version is '', make best effort to set it
$newfile_base
=~ m/^.+?[-_]?(\d[\-+\.:\~\da-zA-Z]*)(?:\.tar\.(gz|bz2|xz|zstd?)|\.zip)$/i;
$self->search_result->{newversion} = $1;
unless ($self->search_result->{newversion}) {
uscan_warn
"Fix filenamemangle to produce a filename with the correct version";
$self->status(1);
return undef;
}
uscan_verbose
"Newest upstream tarball version from the filenamemangled filename: $self->{search_result}->{newversion}";
}
} else {
# HTTP or FTP site (without filenamemangle)
$newfile_base = basename($self->search_result->{newfile});
if ($self->mode eq 'http') {
# Remove HTTP header trash
$newfile_base =~ s/[\?#].*$//; # PiPy
# just in case this leaves us with nothing
if ($newfile_base eq '') {
uscan_warn
"No good upstream filename found after removing tailing ?... and #....\n Use filenamemangle to fix this.";
$self->status(1);
return undef;
}
}
}
return $newfile_base;
}
sub partial_version {
my ($download_version) = @_;
my ($d1, $d2, $d3);
if (defined $download_version) {
uscan_verbose "download version requested: $download_version";
if ($download_version
=~ m/^([-~\+\w]+)(\.[-~\+\w]+)?(\.[-~\+\w]+)?(\.[-~\+\w]+)?$/) {
$d1 = "$1" if defined $1;
$d2 = "$1$2" if defined $2;
$d3 = "$1$2$3" if defined $3;
}
}
return ($d1, $d2, $d3);
}
1;

280
lib/Devscripts/Uscan/ftp.pm Normal file
View file

@ -0,0 +1,280 @@
package Devscripts::Uscan::ftp;
use strict;
use Cwd qw/abs_path/;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
use Devscripts::Uscan::_xtp;
use Moo::Role;
#######################################################################
# search $newfile $newversion (ftp mode)
#######################################################################
sub ftp_search {
my ($self) = @_;
# FTP site
uscan_verbose "Requesting URL:\n $self->{parse_result}->{base}";
my $request = HTTP::Request->new('GET', $self->parse_result->{base});
my $response = $self->downloader->user_agent->request($request);
if (!$response->is_success) {
uscan_warn
"In watch file $self->{watchfile}, reading FTP directory\n $self->{parse_result}->{base} failed: "
. $response->status_line . "";
return undef;
}
my $content = $response->content;
uscan_extra_debug
"received content:\n$content\n[End of received content] by FTP";
# FTP directory listings either look like:
# info info ... info filename [ -> linkname]
# or they're HTMLised (if they've been through an HTTP proxy)
# so we may have to look for <a href="filename"> type patterns
uscan_verbose "matching pattern $self->{parse_result}->{pattern}";
my (@files);
# We separate out HTMLised listings from standard listings, so
# that we can target our search correctly
if ($content =~ /<\s*a\s+[^>]*href/i) {
uscan_verbose "HTMLized FTP listing by the HTTP proxy";
while ($content
=~ m/(?:<\s*a\s+[^>]*href\s*=\s*\")((?-i)$self->{parse_result}->{pattern})\"/gi
) {
my $file = fix_href($1);
my $mangled_version
= join(".", $file =~ m/^$self->{parse_result}->{pattern}$/);
if (
mangle(
$self->watchfile, \$self->line,
'uversionmangle:', \@{ $self->uversionmangle },
\$mangled_version
)
) {
return undef;
}
my $match = '';
if (defined $self->shared->{download_version}
and not $self->versionmode eq 'ignore') {
if ($mangled_version eq $self->shared->{download_version}) {
$match = "matched with the download version";
}
}
my $priority = $mangled_version . '-' . get_priority($file);
push @files, [$priority, $mangled_version, $file, $match];
}
} else {
uscan_verbose "Standard FTP listing.";
# they all look like:
# info info ... info filename [ -> linkname]
for my $ln (split(/\n/, $content)) {
$ln =~ s/^d.*$//; # FTP listing of directory, '' skipped
$ln =~ s/\s+->\s+\S+$//; # FTP listing for link destination
$ln =~ s/^.*\s(\S+)$/$1/; # filename only
if ($ln and $ln =~ m/^($self->{parse_result}->{filepattern})$/) {
my $file = $1;
my $mangled_version = join(".",
$file =~ m/^$self->{parse_result}->{filepattern}$/);
if (
mangle(
$self->watchfile, \$self->line,
'uversionmangle:', \@{ $self->uversionmangle },
\$mangled_version
)
) {
return undef;
}
my $match = '';
if (defined $self->shared->{download_version}) {
if ($mangled_version eq $self->shared->{download_version})
{
$match = "matched with the download version";
}
}
my $priority = $mangled_version . '-' . get_priority($file);
push @files, [$priority, $mangled_version, $file, $match];
}
}
}
if (@files) {
@files = Devscripts::Versort::versort(@files);
my $msg
= "Found the following matching files on the web page (newest first):\n";
foreach my $file (@files) {
$msg .= " $$file[2] ($$file[1]) index=$$file[0] $$file[3]\n";
}
uscan_verbose $msg;
}
my ($newversion, $newfile);
if (defined $self->shared->{download_version}) {
# extract ones which has $match in the above loop defined
my @vfiles = grep { $$_[3] } @files;
if (@vfiles) {
(undef, $newversion, $newfile, undef) = @{ $vfiles[0] };
} else {
uscan_warn
"In $self->{watchfile} no matching files for version $self->{shared}->{download_version}"
. " in watch line\n $self->{line}";
return undef;
}
} else {
if (@files) {
(undef, $newversion, $newfile, undef) = @{ $files[0] };
} else {
uscan_warn
"In $self->{watchfile} no matching files for watch line\n $self->{line}";
return undef;
}
}
return ($newversion, $newfile);
}
sub ftp_upstream_url {
my ($self) = @_;
return $self->parse_result->{base} . $self->search_result->{newfile};
}
*ftp_newfile_base = \&Devscripts::Uscan::_xtp::_xtp_newfile_base;
sub ftp_newdir {
my ($line, $site, $dir, $pattern, $dirversionmangle, $watchfile,
$lineptr, $download_version)
= @_;
my $downloader = $line->downloader;
my ($request, $response, $newdir);
my ($download_version_short1, $download_version_short2,
$download_version_short3)
= partial_version($download_version);
my $base = $site . $dir;
$request = HTTP::Request->new('GET', $base);
$response = $downloader->user_agent->request($request);
if (!$response->is_success) {
uscan_warn
"In watch file $watchfile, reading webpage\n $base failed: "
. $response->status_line;
return '';
}
my $content = $response->content;
uscan_extra_debug
"received content:\n$content\n[End of received content] by FTP";
# FTP directory listings either look like:
# info info ... info filename [ -> linkname]
# or they're HTMLised (if they've been through an HTTP proxy)
# so we may have to look for <a href="filename"> type patterns
uscan_verbose "matching pattern $pattern";
my (@dirs);
my $match = '';
# We separate out HTMLised listings from standard listings, so
# that we can target our search correctly
if ($content =~ /<\s*a\s+[^>]*href/i) {
uscan_verbose "HTMLized FTP listing by the HTTP proxy";
while (
$content =~ m/(?:<\s*a\s+[^>]*href\s*=\s*\")((?-i)$pattern)\"/gi) {
my $dir = $1;
uscan_verbose "Matching target for dirversionmangle: $dir";
my $mangled_version = join(".", $dir =~ m/^$pattern$/);
if (
mangle(
$watchfile, $lineptr,
'dirversionmangle:', \@{$dirversionmangle},
\$mangled_version
)
) {
return 1;
}
$match = '';
if (defined $download_version
and $mangled_version eq $download_version) {
$match = "matched with the download version";
}
if (defined $download_version_short3
and $mangled_version eq $download_version_short3) {
$match = "matched with the download version (partial 3)";
}
if (defined $download_version_short2
and $mangled_version eq $download_version_short2) {
$match = "matched with the download version (partial 2)";
}
if (defined $download_version_short1
and $mangled_version eq $download_version_short1) {
$match = "matched with the download version (partial 1)";
}
push @dirs, [$mangled_version, $dir, $match];
}
} else {
# they all look like:
# info info ... info filename [ -> linkname]
uscan_verbose "Standard FTP listing.";
foreach my $ln (split(/\n/, $content)) {
$ln =~ s/^-.*$//; # FTP listing of file, '' skipped
$ln =~ s/\s+->\s+\S+$//; # FTP listing for link destination
$ln =~ s/^.*\s(\S+)$/$1/; # filename only
if ($ln =~ m/^($pattern)(\s+->\s+\S+)?$/) {
my $dir = $1;
uscan_verbose "Matching target for dirversionmangle: $dir";
my $mangled_version = join(".", $dir =~ m/^$pattern$/);
if (
mangle(
$watchfile, $lineptr,
'dirversionmangle:', \@{$dirversionmangle},
\$mangled_version
)
) {
return 1;
}
$match = '';
if (defined $download_version
and $mangled_version eq $download_version) {
$match = "matched with the download version";
}
if (defined $download_version_short3
and $mangled_version eq $download_version_short3) {
$match = "matched with the download version (partial 3)";
}
if (defined $download_version_short2
and $mangled_version eq $download_version_short2) {
$match = "matched with the download version (partial 2)";
}
if (defined $download_version_short1
and $mangled_version eq $download_version_short1) {
$match = "matched with the download version (partial 1)";
}
push @dirs, [$mangled_version, $dir, $match];
}
}
}
# extract ones which has $match in the above loop defined
my @vdirs = grep { $$_[2] } @dirs;
if (@vdirs) {
@vdirs = Devscripts::Versort::upstream_versort(@vdirs);
$newdir = $vdirs[0][1];
}
if (@dirs) {
@dirs = Devscripts::Versort::upstream_versort(@dirs);
my $msg
= "Found the following matching FTP directories (newest first):\n";
foreach my $dir (@dirs) {
$msg .= " $$dir[1] ($$dir[0]) $$dir[2]\n";
}
uscan_verbose $msg;
$newdir //= $dirs[0][1];
} else {
uscan_warn
"In $watchfile no matching dirs for pattern\n $base$pattern";
$newdir = '';
}
return $newdir;
}
# Nothing to clean here
sub ftp_clean { 0 }
1;

192
lib/Devscripts/Uscan/git.pm Normal file
View file

@ -0,0 +1,192 @@
package Devscripts::Uscan::git;
use strict;
use Cwd qw/abs_path/;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
use Devscripts::Uscan::_vcs;
use Dpkg::IPC;
use File::Path 'remove_tree';
use Moo::Role;
######################################################
# search $newfile $newversion (git mode/versionless)
######################################################
sub git_search {
my ($self) = @_;
my ($newfile, $newversion);
if ($self->versionless) {
$newfile = $self->parse_result->{filepattern}; # HEAD or heads/<branch>
if ($self->pretty eq 'describe') {
$self->git->{mode} = 'full';
}
if ( $self->git->{mode} eq 'shallow'
and $self->parse_result->{filepattern} eq 'HEAD') {
uscan_exec(
'git',
'clone',
'--quiet',
'--bare',
'--depth=1',
$self->parse_result->{base},
"$self->{downloader}->{destdir}/" . $self->gitrepo_dir
);
$self->downloader->gitrepo_state(1);
} elsif ($self->git->{mode} eq 'shallow'
and $self->parse_result->{filepattern} ne 'HEAD')
{ # heads/<branch>
$newfile =~ s&^heads/&&; # Set to <branch>
uscan_exec(
'git',
'clone',
'--quiet',
'--bare',
'--depth=1',
'-b',
"$newfile",
$self->parse_result->{base},
"$self->{downloader}->{destdir}/" . $self->gitrepo_dir
);
$self->downloader->gitrepo_state(1);
} else {
uscan_exec(
'git', 'clone', '--quiet', '--bare',
$self->parse_result->{base},
"$self->{downloader}->{destdir}/" . $self->gitrepo_dir
);
$self->downloader->gitrepo_state(2);
}
if ($self->pretty eq 'describe') {
# use unannotated tags to be on safe side
spawn(
exec => [
'git',
"--git-dir=$self->{downloader}->{destdir}/$self->{gitrepo_dir}",
'describe',
'--tags'
],
wait_child => 1,
to_string => \$newversion
);
$newversion =~ s/-/./g;
chomp($newversion);
if (
mangle(
$self->watchfile, \$self->line,
'uversionmangle:', \@{ $self->uversionmangle },
\$newversion
)
) {
return undef;
}
} else {
my $tmp = $ENV{TZ};
$ENV{TZ} = 'UTC';
$newfile
= $self->parse_result->{filepattern}; # HEAD or heads/<branch>
if ($self->parse_result->{filepattern} eq 'HEAD') {
spawn(
exec => [
'git',
"--git-dir=$self->{downloader}->{destdir}/$self->{gitrepo_dir}",
'log',
'-1',
"--date=format-local:$self->{date}",
"--pretty=$self->{pretty}"
],
wait_child => 1,
to_string => \$newversion
);
} else {
$newfile =~ s&^heads/&&; # Set to <branch>
spawn(
exec => [
'git',
"--git-dir=$self->{downloader}->{destdir}/$self->{gitrepo_dir}",
'log',
'-1',
'-b',
"$newfile",
"--date=format-local:$self->{date}",
"--pretty=$self->{pretty}"
],
wait_child => 1,
to_string => \$newversion
);
}
$ENV{TZ} = $tmp;
chomp($newversion);
}
}
################################################
# search $newfile $newversion (git mode w/tag)
################################################
elsif ($self->mode eq 'git') {
my @args = ('ls-remote', $self->parse_result->{base});
# Try to use local upstream branch if available
if (-d '.git') {
my $out;
eval {
spawn(
exec => ['git', 'remote', '--verbose', 'show'],
wait_child => 1,
to_string => \$out
);
};
# Check if git repo found in debian/watch exists in
# `git remote show` output
if ($out and $out =~ /^(\S+)\s+\Q$self->{parse_result}->{base}\E/m)
{
$self->downloader->git_upstream($1);
uscan_warn
"Using $self->{downloader}->{git_upstream} remote origin";
# Found, launch a "fetch" to be up to date
spawn(
exec => ['git', 'fetch', $self->downloader->git_upstream],
wait_child => 1
);
@args = ('show-ref');
}
}
($newversion, $newfile)
= get_refs($self, ['git', @args], qr/^\S+\s+([^\^\{\}]+)$/, 'git');
return undef if !defined $newversion;
}
return ($newversion, $newfile);
}
sub git_upstream_url {
my ($self) = @_;
my $upstream_url
= $self->parse_result->{base} . ' ' . $self->search_result->{newfile};
return $upstream_url;
}
*git_newfile_base = \&Devscripts::Uscan::_vcs::_vcs_newfile_base;
sub git_clean {
my ($self) = @_;
# If git cloned repo exists and not --debug ($verbose=2) -> remove it
if ( $self->downloader->gitrepo_state > 0
and $verbose < 2
and !$self->downloader->git_upstream) {
my $err;
uscan_verbose "Removing git repo ($self->{downloader}->{destdir}/"
. $self->gitrepo_dir . ")";
remove_tree "$self->{downloader}->{destdir}/" . $self->gitrepo_dir,
{ error => \$err };
if (@$err) {
local $, = "\n\t";
uscan_warn "Errors during git repo clean:\n\t@$err";
}
$self->downloader->gitrepo_state(0);
} else {
uscan_debug "Keep git repo ($self->{downloader}->{destdir}/"
. $self->gitrepo_dir . ")";
}
return 0;
}
1;

View file

@ -0,0 +1,510 @@
package Devscripts::Uscan::http;
use strict;
use Cwd qw/abs_path/;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
use Devscripts::Uscan::_xtp;
use Moo::Role;
*http_newfile_base = \&Devscripts::Uscan::_xtp::_xtp_newfile_base;
##################################
# search $newversion (http mode)
##################################
#returns (\@patterns, \@base_sites, \@base_dirs)
sub handle_redirection {
my ($self, $pattern, @additional_bases) = @_;
my @redirections = @{ $self->downloader->user_agent->get_redirections };
my (@patterns, @base_sites, @base_dirs);
uscan_verbose "redirections: @redirections" if @redirections;
foreach my $_redir (@redirections, @additional_bases) {
my $base_dir = $_redir;
$base_dir =~ s%^\w+://[^/]+/%/%;
$base_dir =~ s%/[^/]*(?:[#?].*)?$%/%;
if ($_redir =~ m%^(\w+://[^/]+)%) {
my $base_site = $1;
push @patterns,
quotemeta($base_site) . quotemeta($base_dir) . "$pattern";
push @base_sites, $base_site;
push @base_dirs, $base_dir;
# remove the filename, if any
my $base_dir_orig = $base_dir;
$base_dir =~ s%/[^/]*$%/%;
if ($base_dir ne $base_dir_orig) {
push @patterns,
quotemeta($base_site) . quotemeta($base_dir) . "$pattern";
push @base_sites, $base_site;
push @base_dirs, $base_dir;
}
}
}
return (\@patterns, \@base_sites, \@base_dirs);
}
sub http_search {
my ($self) = @_;
# $content: web page to be scraped to find the URLs to be downloaded
if ($self->{parse_result}->{base} =~ /^https/ and !$self->downloader->ssl)
{
uscan_die
"you must have the liblwp-protocol-https-perl package installed\nto use https URLs";
}
uscan_verbose "Requesting URL:\n $self->{parse_result}->{base}";
my $request = HTTP::Request->new('GET', $self->parse_result->{base});
foreach my $k (keys %{ $self->downloader->headers }) {
if ($k =~ /^(.*?)@(.*)$/) {
my $baseUrl = $1;
my $hdr = $2;
if ($self->parse_result->{base} =~ m#^\Q$baseUrl\E(?:/.*)?$#) {
$request->header($hdr => $self->headers->{$k});
uscan_verbose "Set per-host custom header $hdr for "
. $self->parse_result->{base};
} else {
uscan_debug
"$self->parse_result->{base} does not start with $1";
}
} else {
uscan_warn "Malformed http-header: $k";
}
}
$request->header('Accept-Encoding' => 'gzip');
$request->header('Accept' => '*/*');
my $response = $self->downloader->user_agent->request($request);
if (!$response->is_success) {
uscan_warn
"In watchfile $self->{watchfile}, reading webpage\n $self->{parse_result}->{base} failed: "
. $response->status_line;
return undef;
}
my ($patterns, $base_sites, $base_dirs)
= handle_redirection($self, $self->{parse_result}->{filepattern});
push @{ $self->patterns }, @$patterns;
push @{ $self->sites }, @$base_sites;
push @{ $self->basedirs }, @$base_dirs;
my $content = $response->decoded_content;
uscan_extra_debug
"received content:\n$content\n[End of received content] by HTTP";
my @hrefs;
if (!$self->searchmode or $self->searchmode eq 'html') {
@hrefs = $self->html_search($content, $self->patterns);
} elsif ($self->searchmode eq 'plain') {
@hrefs = $self->plain_search($content);
} else {
uscan_warn 'Unknown searchmode "' . $self->searchmode . '", skipping';
return undef;
}
if (@hrefs) {
@hrefs = Devscripts::Versort::versort(@hrefs);
my $msg
= "Found the following matching hrefs on the web page (newest first):\n";
foreach my $href (@hrefs) {
$msg .= " $$href[2] ($$href[1]) index=$$href[0] $$href[3]\n";
}
uscan_verbose $msg;
}
my ($newversion, $newfile);
if (defined $self->shared->{download_version}
and not $self->versionmode eq 'ignore') {
# extract ones which has $match in the above loop defined
my @vhrefs = grep { $$_[3] } @hrefs;
if (@vhrefs) {
(undef, $newversion, $newfile, undef) = @{ $vhrefs[0] };
} else {
uscan_warn
"In $self->{watchfile} no matching hrefs for version $self->{shared}->{download_version}"
. " in watch line\n $self->{line}";
return undef;
}
} else {
if (@hrefs) {
(undef, $newversion, $newfile, undef) = @{ $hrefs[0] };
} else {
uscan_warn
"In $self->{watchfile} no matching files for watch line\n $self->{line}";
return undef;
}
}
return ($newversion, $newfile);
}
#######################################################################
# determine $upstream_url (http mode)
#######################################################################
# http is complicated due to absolute/relative URL issue
sub http_upstream_url {
my ($self) = @_;
my $upstream_url;
my $newfile = $self->search_result->{newfile};
if ($newfile =~ m%^\w+://%) {
$upstream_url = $newfile;
} elsif ($newfile =~ m%^//%) {
$upstream_url = $self->parse_result->{site};
$upstream_url =~ s/^(https?:).*/$1/;
$upstream_url .= $newfile;
} elsif ($newfile =~ m%^/%) {
# absolute filename
# Were there any redirections? If so try using those first
if ($#{ $self->patterns } > 0) {
# replace $site here with the one we were redirected to
foreach my $index (0 .. $#{ $self->patterns }) {
if ("$self->{sites}->[$index]$newfile"
=~ m&^$self->{patterns}->[$index]$&) {
$upstream_url = "$self->{sites}->[$index]$newfile";
last;
}
}
if (!defined($upstream_url)) {
uscan_verbose
"Unable to determine upstream url from redirections,\n"
. "defaulting to using site specified in watch file";
$upstream_url = "$self->{sites}->[0]$newfile";
}
} else {
$upstream_url = "$self->{sites}->[0]$newfile";
}
} else {
# relative filename, we hope
# Were there any redirections? If so try using those first
if ($#{ $self->patterns } > 0) {
# replace $site here with the one we were redirected to
foreach my $index (0 .. $#{ $self->patterns }) {
# skip unless the basedir looks like a directory
next unless $self->{basedirs}->[$index] =~ m%/$%;
my $nf = "$self->{basedirs}->[$index]$newfile";
if ("$self->{sites}->[$index]$nf"
=~ m&^$self->{patterns}->[$index]$&) {
$upstream_url = "$self->{sites}->[$index]$nf";
last;
}
}
if (!defined($upstream_url)) {
uscan_verbose
"Unable to determine upstream url from redirections,\n"
. "defaulting to using site specified in watch file";
$upstream_url = "$self->{parse_result}->{urlbase}$newfile";
}
} else {
$upstream_url = "$self->{parse_result}->{urlbase}$newfile";
}
}
# mangle if necessary
$upstream_url =~ s/&amp;/&/g;
uscan_verbose "Matching target for downloadurlmangle: $upstream_url";
if (@{ $self->downloadurlmangle }) {
if (
mangle(
$self->watchfile, \$self->line,
'downloadurlmangle:', \@{ $self->downloadurlmangle },
\$upstream_url
)
) {
$self->status(1);
return undef;
}
}
return $upstream_url;
}
sub http_newdir {
my ($https, $line, $site, $dir, $pattern, $dirversionmangle,
$watchfile, $lineptr, $download_version)
= @_;
my $downloader = $line->downloader;
my ($request, $response, $newdir);
my ($download_version_short1, $download_version_short2,
$download_version_short3)
= partial_version($download_version);
my $base = $site . $dir;
$pattern .= "/?";
if (defined($https) and !$downloader->ssl) {
uscan_die
"$progname: you must have the liblwp-protocol-https-perl package installed\n"
. "to use https URLs";
}
# At least for now, set base in the line object - other methods need it
local $line->parse_result->{base} = $base;
$request = HTTP::Request->new('GET', $base);
$response = $downloader->user_agent->request($request);
if (!$response->is_success) {
uscan_warn
"In watch file $watchfile, reading webpage\n $base failed: "
. $response->status_line;
return '';
}
my $content = $response->content;
if ( $response->header('Content-Encoding')
and $response->header('Content-Encoding') =~ /^gzip$/i) {
require IO::Uncompress::Gunzip;
require IO::String;
uscan_debug "content seems gzip encoded, let's decode it";
my $out;
if (IO::Uncompress::Gunzip::gunzip(IO::String->new($content), \$out)) {
$content = $out;
} else {
uscan_warn 'Unable to decode remote content: '
. $IO::Uncompress::GunzipError;
return '';
}
}
uscan_extra_debug
"received content:\n$content\n[End of received content] by HTTP";
clean_content(\$content);
my ($dirpatterns, $base_sites, $base_dirs)
= handle_redirection($line, $pattern, $base);
$downloader->user_agent->clear_redirections; # we won't be needing that
my @hrefs;
for my $parsed (
html_search($line, $content, $dirpatterns, 'dirversionmangle')) {
my ($priority, $mangled_version, $href, $match) = @$parsed;
$match = '';
if (defined $download_version
and $mangled_version eq $download_version) {
$match = "matched with the download version";
}
if (defined $download_version_short3
and $mangled_version eq $download_version_short3) {
$match = "matched with the download version (partial 3)";
}
if (defined $download_version_short2
and $mangled_version eq $download_version_short2) {
$match = "matched with the download version (partial 2)";
}
if (defined $download_version_short1
and $mangled_version eq $download_version_short1) {
$match = "matched with the download version (partial 1)";
}
push @hrefs, [$mangled_version, $href, $match];
}
# extract ones which has $match in the above loop defined
my @vhrefs = grep { $$_[2] } @hrefs;
if (@vhrefs) {
@vhrefs = Devscripts::Versort::upstream_versort(@vhrefs);
$newdir = $vhrefs[0][1];
}
if (@hrefs) {
@hrefs = Devscripts::Versort::upstream_versort(@hrefs);
my $msg = "Found the following matching directories (newest first):\n";
foreach my $href (@hrefs) {
$msg .= " $$href[1] ($$href[0]) $$href[2]\n";
}
uscan_verbose $msg;
$newdir //= $hrefs[0][1];
} else {
uscan_warn
"In $watchfile,\n no matching hrefs for pattern\n $site$dir$pattern";
return '';
}
# just give the final directory component
$newdir =~ s%/$%%;
$newdir =~ s%^.*/%%;
return ($newdir);
}
# Nothing to clean here
sub http_clean { 0 }
sub clean_content {
my ($content) = @_;
# We need this horrid stuff to handle href=foo type
# links. OK, bad HTML, but we have to handle it nonetheless.
# It's bug #89749.
$$content =~ s/href\s*=\s*(?=[^\"\'])([^\s>]+)/href="$1"/ig;
# Strip comments
$$content =~ s/<!-- .*?-->//sg;
return $content;
}
sub url_canonicalize_dots {
my ($base, $url) = @_;
if ($url !~ m{^[^:#?/]+://}) {
if ($url =~ m{^//}) {
$base =~ m{^[^:#?/]+:}
and $url = $& . $url;
} elsif ($url =~ m{^/}) {
$base =~ m{^[^:#?/]+://[^/#?]*}
and $url = $& . $url;
} else {
uscan_debug "Resolving urls with query part unimplemented"
if ($url =~ m/^[#?]/);
$base =~ m{^[^:#?/]+://[^/#?]*(?:/(?:[^#?/]*/)*)?} and do {
my $base_to_path = $&;
$base_to_path .= '/' unless $base_to_path =~ m|/$|;
$url = $base_to_path . $url;
};
}
}
$url =~ s{^([^:#?/]+://[^/#?]*)(/[^#?]*)}{
my ($h, $p) = ($1, $2);
$p =~ s{/\.(?:/|$|(?=[#?]))}{/}g;
1 while $p =~ s{/(?!\.\./)[^/]*/\.\.(?:/|(?=[#?])|$)}{/}g;
$h.$p;}e;
$url;
}
sub html_search {
my ($self, $content, $patterns, $mangle) = @_;
# pagenmangle: should not abuse this slow operation
if (
mangle(
$self->watchfile, \$self->line,
'pagemangle:\n', [@{ $self->pagemangle }],
\$content
)
) {
return undef;
}
if ( !$self->shared->{bare}
and $content =~ m%^<[?]xml%i
and $content =~ m%xmlns="http://s3.amazonaws.com/doc/2006-03-01/"%
and $content !~ m%<Key><a\s+href%) {
# this is an S3 bucket listing. Insert an 'a href' tag
# into the content for each 'Key', so that it looks like html (LP: #798293)
uscan_warn
"*** Amazon AWS special case code is deprecated***\nUse opts=pagemangle rule, instead";
$content =~ s%<Key>([^<]*)</Key>%<Key><a href="$1">$1</a></Key>%g;
uscan_extra_debug
"processed content:\n$content\n[End of processed content] by Amazon AWS special case code";
}
clean_content(\$content);
# Is there a base URL given?
if ($content =~ /<\s*base\s+[^>]*href\s*=\s*([\"\'])(.*?)\1/i) {
$self->parse_result->{urlbase}
= url_canonicalize_dots($self->parse_result->{base}, $2);
} else {
$self->parse_result->{urlbase} = $self->parse_result->{base};
}
uscan_extra_debug
"processed content:\n$content\n[End of processed content] by fix bad HTML code";
# search hrefs in web page to obtain a list of uversionmangled version and matching download URL
{
local $, = ',';
uscan_verbose "Matching pattern:\n @{$self->{patterns}}";
}
my @hrefs;
while ($content =~ m/<\s*a\s+[^>]*(?<=\s)href\s*=\s*([\"\'])(.*?)\1/sgi) {
my $href = $2;
$href = fix_href($href);
my $href_canonical
= url_canonicalize_dots($self->parse_result->{urlbase}, $href);
if (defined $self->hrefdecode) {
if ($self->hrefdecode eq 'percent-encoding') {
uscan_debug "... Decoding from href: $href";
$href =~ s/%([A-Fa-f\d]{2})/chr hex $1/eg;
$href_canonical =~ s/%([A-Fa-f\d]{2})/chr hex $1/eg;
} else {
uscan_warn "Illegal value for hrefdecode: "
. "$self->{hrefdecode}";
return undef;
}
}
uscan_extra_debug "Checking href $href";
foreach my $_pattern (@$patterns) {
if (my @match = $href =~ /^$_pattern$/) {
push @hrefs,
parse_href($self, $href_canonical, $_pattern, \@match,
$mangle);
}
uscan_extra_debug "Checking href $href_canonical";
if (my @match = $href_canonical =~ /^$_pattern$/) {
push @hrefs,
parse_href($self, $href_canonical, $_pattern, \@match,
$mangle);
}
}
}
return @hrefs;
}
sub plain_search {
my ($self, $content) = @_;
my @hrefs;
foreach my $_pattern (@{ $self->patterns }) {
while ($content =~ s/.*?($_pattern)//) {
push @hrefs, $self->parse_href($1, $_pattern, $2);
}
}
$self->parse_result->{urlbase} = $self->parse_result->{base};
return @hrefs;
}
sub parse_href {
my ($self, $href, $_pattern, $match, $mangle) = @_;
$mangle //= 'uversionmangle';
my $mangled_version;
if ($self->watch_version == 2) {
# watch_version 2 only recognised one group; the code
# below will break version 2 watch files with a construction
# such as file-([\d\.]+(-\d+)?) (bug #327258)
$mangled_version
= ref $match eq 'ARRAY'
? $match->[0]
: $match;
} else {
# need the map { ... } here to handle cases of (...)?
# which may match but then return undef values
if ($self->versionless) {
# exception, otherwise $mangled_version = 1
$mangled_version = '';
} else {
$mangled_version = join(".",
map { $_ if defined($_) }
ref $match eq 'ARRAY' ? @$match : $href =~ m&^$_pattern$&);
}
if (
mangle(
$self->watchfile, \$self->line,
"$mangle:", \@{ $self->$mangle },
\$mangled_version
)
) {
return ();
}
}
$match = '';
if (defined $self->shared->{download_version}) {
if ($mangled_version eq $self->shared->{download_version}) {
$match = "matched with the download version";
}
}
my $priority = $mangled_version . '-' . get_priority($href);
return [$priority, $mangled_version, $href, $match];
}
1;

View file

@ -0,0 +1,67 @@
package Devscripts::Uscan::svn;
use strict;
use Cwd qw/abs_path/;
use Devscripts::Uscan::Output;
use Devscripts::Uscan::Utils;
use Devscripts::Uscan::_vcs;
use Dpkg::IPC;
use File::Path 'remove_tree';
use Moo::Role;
######################################################
# search $newfile $newversion (svn mode/versionless)
######################################################
sub svn_search {
my ($self) = @_;
my ($newfile, $newversion);
if ($self->versionless) {
$newfile = $self->parse_result->{base};
spawn(
exec => [
'svn', 'info',
'--show-item', 'last-changed-revision',
'--no-newline', $self->parse_result->{base}
],
wait_child => 1,
to_string => \$newversion
);
chomp($newversion);
$newversion = sprintf '0.0~svn%d', $newversion;
if (
mangle(
$self->watchfile, \$self->line,
'uversionmangle:', \@{ $self->uversionmangle },
\$newversion
)
) {
return undef;
}
}
################################################
# search $newfile $newversion (svn mode w/tag)
################################################
elsif ($self->mode eq 'svn') {
my @args = ('list', $self->parse_result->{base});
($newversion, $newfile)
= get_refs($self, ['svn', @args], qr/(.+)/, 'subversion');
return undef if !defined $newversion;
}
return ($newversion, $newfile);
}
sub svn_upstream_url {
my ($self) = @_;
my $upstream_url = $self->parse_result->{base};
if (!$self->versionless) {
$upstream_url .= '/' . $self->search_result->{newfile};
}
return $upstream_url;
}
*svn_newfile_base = \&Devscripts::Uscan::_vcs::_vcs_newfile_base;
sub svn_clean { }
1;