summaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 12:01:11 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 12:01:11 +0000
commit3be121a05dcd170854a8dac6437b29f297a6ff4e (patch)
tree05cf57183f5a23394eca11b00f97a74a5dfdf79d /lib
parentInitial commit. (diff)
downloaddevscripts-3be121a05dcd170854a8dac6437b29f297a6ff4e.tar.xz
devscripts-3be121a05dcd170854a8dac6437b29f297a6ff4e.zip
Adding upstream version 2.23.4+deb12u1.upstream/2.23.4+deb12u1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--lib/Devscripts/Compression.pm104
-rw-r--r--lib/Devscripts/Config.pm418
-rw-r--r--lib/Devscripts/DB_File_Lock.pm364
-rw-r--r--lib/Devscripts/Debbugs.pm481
-rw-r--r--lib/Devscripts/JSONCache.pm97
-rw-r--r--lib/Devscripts/MkOrigtargz.pm631
-rw-r--r--lib/Devscripts/MkOrigtargz/Config.pm247
-rw-r--r--lib/Devscripts/Output.pm83
-rw-r--r--lib/Devscripts/PackageDeps.pm307
-rw-r--r--lib/Devscripts/Packages.pm313
-rwxr-xr-xlib/Devscripts/Salsa.pm410
-rwxr-xr-xlib/Devscripts/Salsa/Config.pm460
-rw-r--r--lib/Devscripts/Salsa/Hooks.pm303
-rwxr-xr-xlib/Devscripts/Salsa/Repo.pm73
-rw-r--r--lib/Devscripts/Salsa/add_user.pm40
-rwxr-xr-xlib/Devscripts/Salsa/check_repo.pm205
-rw-r--r--lib/Devscripts/Salsa/checkout.pm79
-rw-r--r--lib/Devscripts/Salsa/create_repo.pm47
-rw-r--r--lib/Devscripts/Salsa/del_repo.pm26
-rw-r--r--lib/Devscripts/Salsa/del_user.pm32
-rw-r--r--lib/Devscripts/Salsa/fork.pm36
-rw-r--r--lib/Devscripts/Salsa/forks.pm45
-rw-r--r--lib/Devscripts/Salsa/group.pm35
-rw-r--r--lib/Devscripts/Salsa/join.pm20
-rw-r--r--lib/Devscripts/Salsa/last_ci_status.pm74
-rw-r--r--lib/Devscripts/Salsa/list_groups.pm40
-rw-r--r--lib/Devscripts/Salsa/list_repos.pm42
-rw-r--r--lib/Devscripts/Salsa/merge_request.pm174
-rw-r--r--lib/Devscripts/Salsa/merge_requests.pm49
-rwxr-xr-xlib/Devscripts/Salsa/pipeline_schedule.pm124
-rwxr-xr-xlib/Devscripts/Salsa/pipeline_schedules.pm70
-rw-r--r--lib/Devscripts/Salsa/protect_branch.pm43
-rw-r--r--lib/Devscripts/Salsa/protected_branches.pm27
-rw-r--r--lib/Devscripts/Salsa/purge_cache.pm15
-rw-r--r--lib/Devscripts/Salsa/push.pm106
-rw-r--r--lib/Devscripts/Salsa/push_repo.pm71
-rw-r--r--lib/Devscripts/Salsa/rename_branch.pm45
-rw-r--r--lib/Devscripts/Salsa/search_group.pm37
-rw-r--r--lib/Devscripts/Salsa/search_project.pm57
-rw-r--r--lib/Devscripts/Salsa/search_user.pm36
-rw-r--r--lib/Devscripts/Salsa/update_repo.pm140
-rw-r--r--lib/Devscripts/Salsa/update_safe.pm22
-rw-r--r--lib/Devscripts/Salsa/update_user.pm38
-rw-r--r--lib/Devscripts/Salsa/whoami.pm24
-rw-r--r--lib/Devscripts/Set.pm126
-rw-r--r--lib/Devscripts/Uscan/CatchRedirections.pm27
-rw-r--r--lib/Devscripts/Uscan/Config.pm392
-rw-r--r--lib/Devscripts/Uscan/Ctype/nodejs.pm36
-rw-r--r--lib/Devscripts/Uscan/Ctype/perl.pm36
-rw-r--r--lib/Devscripts/Uscan/Downloader.pm303
-rw-r--r--lib/Devscripts/Uscan/FindFiles.pm257
-rw-r--r--lib/Devscripts/Uscan/Keyring.pm168
-rw-r--r--lib/Devscripts/Uscan/Output.pm129
-rw-r--r--lib/Devscripts/Uscan/Utils.pm475
-rw-r--r--lib/Devscripts/Uscan/WatchFile.pm517
-rw-r--r--lib/Devscripts/Uscan/WatchLine.pm1850
-rw-r--r--lib/Devscripts/Uscan/_vcs.pm93
-rw-r--r--lib/Devscripts/Uscan/_xtp.pm90
-rw-r--r--lib/Devscripts/Uscan/ftp.pm280
-rw-r--r--lib/Devscripts/Uscan/git.pm172
-rw-r--r--lib/Devscripts/Uscan/http.pm510
-rw-r--r--lib/Devscripts/Uscan/svn.pm67
-rw-r--r--lib/Devscripts/Utils.pm31
-rw-r--r--lib/Devscripts/Versort.pm60
64 files changed, 11709 insertions, 0 deletions
diff --git a/lib/Devscripts/Compression.pm b/lib/Devscripts/Compression.pm
new file mode 100644
index 0000000..c138948
--- /dev/null
+++ b/lib/Devscripts/Compression.pm
@@ -0,0 +1,104 @@
+# Copyright James McCoy <jamessan@debian.org> 2013.
+# Modifications copyright 2002 Julian Gilbey <jdg@debian.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+package Devscripts::Compression;
+
+use Dpkg::Compression qw(!compression_get_property);
+use Dpkg::IPC;
+use Exporter qw(import);
+
+our @EXPORT = (
+ @Dpkg::Compression::EXPORT,
+ qw(compression_get_file_extension_regex compression_guess_from_file),
+);
+
+eval {
+ Dpkg::Compression->VERSION(1.02);
+ 1;
+} or do {
+ # Ensure we have compression_get_file_extension_regex, regardless of the
+ # version of Dpkg::Compression to ease backporting.
+ *{'Devscripts::Compression::compression_get_file_extension_regex'} = sub {
+ return $compression_re_file_ext;
+ };
+};
+
+# This can potentially be moved to Dpkg::Compression
+
+my %mime2comp = (
+ "application/x-gzip" => "gzip",
+ "application/gzip" => "gzip",
+ "application/x-bzip2" => "bzip2",
+ "application/bzip2 " => "bzip2",
+ "application/x-xz" => "xz",
+ "application/xz" => "xz",
+ "application/zip" => "zip",
+ "application/x-compress" => "compress",
+ "application/java-archive" => "zip",
+ "application/x-tar" => "tar",
+ "application/zstd" => "zst",
+ "application/x-zstd" => "zst",
+);
+
+sub compression_guess_from_file {
+ my $filename = shift;
+ my $mimetype;
+ spawn(
+ exec => ['file', '--dereference', '--brief', '--mime-type', $filename],
+ to_string => \$mimetype,
+ wait_child => 1
+ );
+ chomp($mimetype);
+ if (exists $mime2comp{$mimetype}) {
+ return $mime2comp{$mimetype};
+ } else {
+ return;
+ }
+}
+
+# comp_prog and default_level aren't provided because a) they aren't needed in
+# devscripts and b) the Dpkg::Compression API isn't rich enough to support
+# these as compressors
+my %comp_properties = (
+ compress => {
+ file_ext => 'Z',
+ decomp_prog => ['uncompress'],
+ },
+ zip => {
+ file_ext => 'zip',
+ decomp_prog => ['unzip'],
+ },
+ zst => {
+ file_ext => 'zst',
+ #comp_prog => ['zstd'],
+ decomp_prog => ['unzstd'],
+ default_level => 3,
+ });
+
+sub compression_get_property {
+ my ($compression, $property) = @_;
+ if (!exists $comp_properties{$compression}) {
+ return Dpkg::Compression::compression_get_property($compression,
+ $property);
+ }
+
+ if (exists $comp_properties{$compression}{$property}) {
+ return $comp_properties{$compression}{$property};
+ }
+ return;
+}
+
+1;
diff --git a/lib/Devscripts/Config.pm b/lib/Devscripts/Config.pm
new file mode 100644
index 0000000..e13a5a1
--- /dev/null
+++ b/lib/Devscripts/Config.pm
@@ -0,0 +1,418 @@
+
+=head1 NAME
+
+Devscripts::Config - devscripts Perl scripts configuration object
+
+=head1 SYNOPSIS
+
+ # Configuration module
+ package Devscripts::My::Config;
+ use Moo;
+ extends 'Devscripts::Config';
+
+ use constant keys => [
+ [ 'text1=s', 'MY_TEXT', qr/^\S/, 'Default_text' ],
+ # ...
+ ];
+
+ has text1 => ( is => 'rw' );
+
+ # Main package or script
+ package Devscripts::My;
+
+ use Moo;
+ my $config = Devscripts::My::Config->new->parse;
+ 1;
+
+=head1 DESCRIPTION
+
+Devscripts Perl scripts configuration object. It can scan configuration files
+(B</etc/devscripts.conf> and B<~/.devscripts>) and command line arguments.
+
+A devscripts configuration package has just to declare:
+
+=over
+
+=item B<keys> constant: array ref I<(see below)>
+
+=item B<rules> constant: hash ref I<(see below)>
+
+=back
+
+=head1 KEYS
+
+Each element of B<keys> constant is an array containing four elements which can
+be undefined:
+
+=over
+
+=item the string to give to L<Getopt::Long>
+
+=item the name of the B<devscripts.conf> key
+
+=item the rule to check value. It can be:
+
+=over
+
+=item B<regexp> ref: will be applied to the value. If it fails against the
+devscripts.conf value, Devscripts::Config will warn. If it fails against the
+command line argument, Devscripts::Config will die.
+
+=item B<sub> ref: function will be called with 2 arguments: current config
+object and proposed value. Function must return a true value to continue or
+0 to stop. This is not simply a "check" function: Devscripts::Config will not
+do anything else than read the result to continue with next argument or stop.
+
+=item B<"bool"> string: means that value is a boolean. devscripts.conf value
+can be either "yes", 1, "no", 0.
+
+=back
+
+=item the default value
+
+=back
+
+=head2 RULES
+
+It is possible to declare some additional rules to check the logic between
+options:
+
+ use constant rules => [
+ sub {
+ my($self)=@_;
+ # OK
+ return 1 if( $self->a < $self->b );
+ # OK with warning
+ return ( 1, 'a should be lower than b ) if( $self->a > $self->b );
+ # NOK with an error
+ return ( 0, 'a must not be equal to b !' );
+ },
+ sub {
+ my($self)=@_;
+ # ...
+ return 1;
+ },
+ ];
+
+=head1 METHODS
+
+=head2 new()
+
+Constructor
+
+=cut
+
+package Devscripts::Config;
+
+use strict;
+use Devscripts::Output;
+use Dpkg::IPC;
+use File::HomeDir;
+use Getopt::Long qw(:config bundling permute no_getopt_compat);
+use Moo;
+
+# Common options
+has common_opts => (
+ is => 'ro',
+ default => sub {
+ [[
+ 'help', undef,
+ sub {
+ if ($_[1]) { $_[0]->usage; exit 0 }
+ }
+ ]]
+ });
+
+# Internal attributes
+
+has modified_conf_msg => (is => 'rw', default => sub { '' });
+
+$ENV{HOME} = File::HomeDir->my_home;
+
+our @config_files
+ = ('/etc/devscripts.conf', ($ENV{HOME} ? "$ENV{HOME}/.devscripts" : ()));
+
+sub keys {
+ die "conffile_keys() must be defined in sub classes";
+}
+
+=head2 parse()
+
+Launches B<parse_conf_files()>, B<parse_command_line()> and B<check_rules>
+
+=cut
+
+sub BUILD {
+ my ($self) = @_;
+ $self->set_default;
+}
+
+sub parse {
+ my ($self) = @_;
+
+ # 1 - Parse /etc/devscripts.conf and ~/.devscripts
+ $self->parse_conf_files;
+
+ # 2 - Parse command line
+ $self->parse_command_line;
+
+ # 3 - Check rules
+ $self->check_rules;
+ return $self;
+}
+
+# I - Parse /etc/devscripts.conf and ~/.devscripts
+
+=head2 parse_conf_files()
+
+Reads values in B</etc/devscripts.conf> and B<~/.devscripts>
+
+=cut
+
+sub set_default {
+ my ($self) = @_;
+ my $keys = $self->keys;
+ foreach my $key (@$keys) {
+ my ($kname, $name, $check, $default) = @$key;
+ next unless (defined $default);
+ $kname =~ s/^\-\-//;
+ $kname =~ s/-/_/g;
+ $kname =~ s/[!\|=].*$//;
+ if (ref $default) {
+ unless (ref $default eq 'CODE') {
+ die "Default value must be a sub ($kname)";
+ }
+ $self->{$kname} = $default->();
+ } else {
+ $self->{$kname} = $default;
+ }
+ }
+}
+
+sub parse_conf_files {
+ my ($self) = @_;
+
+ my @cfg_files = @config_files;
+ if (@ARGV) {
+ if ($ARGV[0] =~ /^--no-?conf$/) {
+ $self->modified_conf_msg(" (no configuration files read)");
+ shift @ARGV;
+ return $self;
+ }
+ my @tmp;
+ while ($ARGV[0] and $ARGV[0] =~ s/^--conf-?file(?:=(.+))?//) {
+ shift @ARGV;
+ my $file = $1 || shift(@ARGV);
+ if ($file) {
+ unless ($file =~ s/^\+//) {
+ @cfg_files = ();
+ }
+ push @tmp, $file;
+ } else {
+ return ds_die
+ "Unable to parse --conf-file option, aborting parsing";
+ }
+ }
+ push @cfg_files, @tmp;
+ }
+
+ @cfg_files = grep { -r $_ } @cfg_files;
+ my $keys = $self->keys;
+ if (@cfg_files) {
+ my @key_names = map { $_->[1] ? $_->[1] : () } @$keys;
+ my %config_vars;
+
+ my $shell_cmd = q{for file ; do . "$file"; done ;};
+
+ # Read back values
+ $shell_cmd .= q{ printf '%s\0' };
+ my @shell_key_names = map { qq{"\$$_"} } @key_names;
+ $shell_cmd .= join(' ', @shell_key_names);
+ my $shell_out;
+ spawn(
+ exec => [
+ '/bin/bash', '-c',
+ $shell_cmd, 'devscripts-config-loader',
+ @cfg_files
+ ],
+ wait_child => 1,
+ to_string => \$shell_out
+ );
+ @config_vars{@key_names} = map { s/^\s*(.*?)\s*/$1/ ? $_ : undef }
+ split(/\0/, $shell_out, -1);
+
+ # Check validity and set value
+ foreach my $key (@$keys) {
+ my ($kname, $name, $check, $default) = @$key;
+ next unless ($name);
+ $kname //= '';
+ $kname =~ s/^\-\-//;
+ $kname =~ s/-/_/g;
+ $kname =~ s/[!|=+].*$//;
+ # Case 1: nothing in conf files, set default
+ next unless (length $config_vars{$name});
+ if (defined $check) {
+ if (not(ref $check)) {
+ $check
+ = $self->_subs_check($check, $kname, $name, $default);
+ }
+ if (ref $check eq 'CODE') {
+ my ($res, $msg)
+ = $check->($self, $config_vars{$name}, $kname);
+ ds_warn $msg unless ($res);
+ next;
+ } elsif (ref $check eq 'Regexp') {
+ unless ($config_vars{$name} =~ $check) {
+ ds_warn("Bad $name value $config_vars{$name}");
+ next;
+ }
+ } else {
+ ds_die("Unknown check type for $name");
+ return undef;
+ }
+ }
+ $self->{$kname} = $config_vars{$name};
+ $self->{modified_conf_msg} .= " $name=$config_vars{$name}\n";
+ if (ref $default) {
+ my $ref = ref $default->();
+ my @tmp = ($config_vars{$name} =~ /\s+"([^"]*)"(?>\s+)/g);
+ $config_vars{$name} =~ s/\s+"([^"]*)"\s+/ /g;
+ push @tmp, split(/\s+/, $config_vars{$name});
+ if ($ref eq 'ARRAY') {
+ $self->{$kname} = \@tmp;
+ } elsif ($ref eq 'HASH') {
+ $self->{$kname}
+ = { map { /^(.*?)=(.*)$/ ? ($1 => $2) : ($_ => 1) }
+ @tmp };
+ }
+ }
+ }
+ }
+ return $self;
+}
+
+# II - Parse command line
+
+=head2 parse_command_line()
+
+Parse command line arguments
+
+=cut
+
+sub parse_command_line {
+ my ($self, @arrays) = @_;
+ my $opts = {};
+ my $keys = [@{ $self->common_opts }, @{ $self->keys }];
+ # If default value is set to [], we must prepare hash ref to be able to
+ # receive more than one value
+ foreach (@$keys) {
+ if ($_->[3] and ref($_->[3])) {
+ my $kname = $_->[0];
+ $kname =~ s/[!\|=].*$//;
+ $opts->{$kname} = $_->[3]->();
+ }
+ }
+ unless (GetOptions($opts, map { $_->[0] ? ($_->[0]) : () } @$keys)) {
+ $_[0]->usage;
+ exit 1;
+ }
+ foreach my $key (@$keys) {
+ my ($kname, $tmp, $check, $default) = @$key;
+ next unless ($kname);
+ $kname =~ s/[!|=+].*$//;
+ my $name = $kname;
+ $kname =~ s/-/_/g;
+ if (defined $opts->{$name}) {
+ next if (ref $opts->{$name} eq 'ARRAY' and !@{ $opts->{$name} });
+ next if (ref $opts->{$name} eq 'HASH' and !%{ $opts->{$name} });
+ if (defined $check) {
+ if (not(ref $check)) {
+ $check
+ = $self->_subs_check($check, $kname, $name, $default);
+ }
+ if (ref $check eq 'CODE') {
+ my ($res, $msg) = $check->($self, $opts->{$name}, $kname);
+ ds_die "Bad value for $name: $msg" unless ($res);
+ } elsif (ref $check eq 'Regexp') {
+ if ($opts->{$name} =~ $check) {
+ $self->{$kname} = $opts->{$name};
+ } else {
+ ds_die("Bad $name value in command line");
+ }
+ } else {
+ ds_die("Unknown check type for $name");
+ }
+ } else {
+ $self->{$kname} = $opts->{$name};
+ }
+ }
+ }
+ return $self;
+}
+
+sub check_rules {
+ my ($self) = @_;
+ if ($self->can('rules')) {
+ if (my $rules = $self->rules) {
+ my $i = 0;
+ foreach my $sub (@$rules) {
+ $i++;
+ my ($res, $msg) = $sub->($self);
+ if ($res) {
+ ds_warn($msg) if ($msg);
+ } else {
+ ds_error($msg || "config rule $i");
+ # ds_error may not die if $Devscripts::Output::die_on_error
+ # is set to 0
+ next;
+ }
+ }
+ }
+ }
+ return $self;
+}
+
+sub _subs_check {
+ my ($self, $check, $kname, $name, $default) = @_;
+ if ($check eq 'bool') {
+ $check = sub {
+ $_[0]->{$kname} = (
+ $_[1] =~ /^(?:1|yes)$/i ? 1
+ : $_[1] =~ /^(?:0|no)$/i ? 0
+ : $default ? $default
+ : undef
+ );
+ return 1;
+ };
+ } else {
+ $self->die("Unknown check type for $name");
+ }
+ return $check;
+}
+
+# Default usage: switch to manpage
+sub usage {
+ $progname =~ s/\.pl//;
+ exec("man", '-P', '/bin/cat', $progname);
+}
+
+1;
+__END__
+=head1 SEE ALSO
+
+L<devscripts>
+
+=head1 AUTHOR
+
+Xavier Guimard E<lt>yadd@debian.orgE<gt>
+
+=head1 COPYRIGHT AND LICENSE
+
+Copyright 2018 by Xavier Guimard <yadd@debian.org>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+
+=cut
diff --git a/lib/Devscripts/DB_File_Lock.pm b/lib/Devscripts/DB_File_Lock.pm
new file mode 100644
index 0000000..12a4680
--- /dev/null
+++ b/lib/Devscripts/DB_File_Lock.pm
@@ -0,0 +1,364 @@
+#
+# DB_File::Lock
+#
+# by David Harris <dharris@drh.net>
+#
+# Copyright (c) 1999-2000 David R. Harris. All rights reserved.
+# This program is free software; you can redistribute it and/or modify it
+# under the same terms as Perl itself.
+#
+
+# We rename the package so that we don't have to package it separately.
+# package DB_File::Lock;
+package Devscripts::DB_File_Lock;
+
+require 5.004;
+
+use strict;
+use vars qw($VERSION @ISA $locks);
+
+@ISA = qw(DB_File);
+$VERSION = '0.05';
+
+use DB_File ();
+use Fcntl qw(:flock O_RDWR O_RDONLY O_WRONLY O_CREAT);
+use Carp qw(croak carp);
+use Symbol ();
+
+# import function can't be inherited, so this magic required
+sub import {
+ my $ourname = shift;
+ my @imports
+ = @_; # dynamic scoped var, still in scope after package call in eval
+ my $module = caller;
+ my $calling = $ISA[0];
+ eval " package $module; import $calling, \@imports; ";
+}
+
+sub _lock_and_tie {
+ my $package = shift;
+
+ ## Grab the type of tie
+
+ my $tie_type = pop @_;
+
+ ## There are two ways of passing data defined by DB_File
+
+ my $lock_data;
+ my @dbfile_data;
+
+ if (@_ == 5) {
+ $lock_data = pop @_;
+ @dbfile_data = @_;
+ } elsif (@_ == 2) {
+ $lock_data = pop @_;
+ @dbfile_data = @{ $_[0] };
+ } else {
+ croak "invalid number of arguments";
+ }
+
+ ## Decipher the lock_data
+
+ my $mode;
+ my $nonblocking = 0;
+ my $lockfile_name = $dbfile_data[0] . ".lock";
+ my $lockfile_mode;
+
+ if (lc($lock_data) eq "read") {
+ $mode = "read";
+ } elsif (lc($lock_data) eq "write") {
+ $mode = "write";
+ } elsif (ref($lock_data) eq "HASH") {
+ $mode = lc $lock_data->{mode};
+ croak "invalid mode ($mode)" if ($mode ne "read" and $mode ne "write");
+ $nonblocking = $lock_data->{nonblocking};
+ $lockfile_name = $lock_data->{lockfile_name}
+ if (defined $lock_data->{lockfile_name});
+ $lockfile_mode = $lock_data->{lockfile_mode};
+ } else {
+ croak "invalid lock_data ($lock_data)";
+ }
+
+ ## Warn about opening a lockfile for writing when only locking for reading
+
+ # NOTE: This warning disabled for RECNO because RECNO seems to require O_RDWR
+ # even when opening only for reading.
+
+ carp
+"opening with write access when locking only for reading (use O_RDONLY to fix)"
+ if ((
+ $dbfile_data[1] && O_RDWR
+ or $dbfile_data[1] && O_WRONLY
+ ) # any kind of write access
+ and $mode eq "read" # and opening for reading
+ and $tie_type ne "TIEARRAY" # and not RECNO
+ );
+
+ ## Determine the mode of the lockfile, if not given
+
+ # THEORY: if someone can read or write the database file, we must allow
+ # them to read and write the lockfile.
+
+ if (not defined $lockfile_mode) {
+ $lockfile_mode = 0600; # we must be allowed to read/write lockfile
+ $lockfile_mode |= 0060 if ($dbfile_data[2] & 0060);
+ $lockfile_mode |= 0006 if ($dbfile_data[2] & 0006);
+ }
+
+ ## Open the lockfile, lock it, and open the database
+
+ my $lockfile_fh = Symbol::gensym();
+ my $saved_umask = umask(0000) if (umask() & $lockfile_mode);
+ my $open_ok = sysopen($lockfile_fh, $lockfile_name, O_RDWR | O_CREAT,
+ $lockfile_mode);
+ umask($saved_umask) if (defined $saved_umask);
+ $open_ok or croak "could not open lockfile ($lockfile_name)";
+
+ my $flock_flags
+ = ($mode eq "write" ? LOCK_EX : LOCK_SH) | ($nonblocking ? LOCK_NB : 0);
+ if (not flock $lockfile_fh, $flock_flags) {
+ close $lockfile_fh;
+ return undef if ($nonblocking);
+ croak "could not flock lockfile";
+ }
+
+ my $self
+ = $tie_type eq "TIEHASH"
+ ? $package->SUPER::TIEHASH(@_)
+ : $package->SUPER::TIEARRAY(@_);
+ if (not $self) {
+ close $lockfile_fh;
+ return $self;
+ }
+
+ ## Store the info for the DESTROY function
+
+ my $id = "" . $self;
+ $id =~ s/^[^=]+=//; # remove the package name in case re-blessing occurs
+ $locks->{$id} = $lockfile_fh;
+
+ ## Return the object
+
+ return $self;
+}
+
+sub TIEHASH {
+ return _lock_and_tie(@_, 'TIEHASH');
+}
+
+sub TIEARRAY {
+ return _lock_and_tie(@_, 'TIEARRAY');
+}
+
+sub DESTROY {
+ my $self = shift;
+
+ my $id = "" . $self;
+ $id =~ s/^[^=]+=//;
+ my $lockfile_fh = $locks->{$id};
+ delete $locks->{$id};
+
+ $self->SUPER::DESTROY(@_);
+
+ # un-flock not needed, as we close here
+ close $lockfile_fh;
+}
+
+1;
+__END__
+
+=head1 NAME
+
+DB_File::Lock - Locking with flock wrapper for DB_File
+
+=head1 SYNOPSIS
+
+ use DB_File::Lock;
+ use Fcntl qw(:flock O_RDWR O_CREAT);
+
+ $locking = "read";
+ $locking = "write";
+ $locking = {
+ mode => "read",
+ nonblocking => 0,
+ lockfile_name => "/path/to/shared.lock",
+ lockfile_mode => 0600,
+ };
+
+ [$X =] tie %hash, 'DB_File::Lock', $filename, $flags, $mode, $DB_HASH, $locking;
+ [$X =] tie %hash, 'DB_File::Lock', $filename, $flags, $mode, $DB_BTREE, $locking;
+ [$X =] tie @array, 'DB_File::Lock', $filename, $flags, $mode, $DB_RECNO, $locking;
+
+ # or place the DB_File arguments inside a list reference:
+ [$X =] tie %hash, 'DB_File::Lock', [$filename, $flags, $mode, $DB_HASH], $locking;
+
+ ...use the same way as DB_File for the rest of the interface...
+
+=head1 DESCRIPTION
+
+This module provides a wrapper for the DB_File module, adding locking.
+
+When you need locking, simply use this module in place of DB_File and
+add an extra argument onto the tie command specifying if the file should
+be locked for reading or writing.
+
+The alternative is to write code like:
+
+ open(LOCK, "<$db_filename.lock") or die;
+ flock(LOCK, LOCK_SH) or die;
+ tie(%db_hash, 'DB_File', $db_filename, O_RDONLY, 0600, $DB_HASH) or die;
+ ... then read the database ...
+ untie(%db_hash);
+ close(LOCK);
+
+This module lets you write
+
+ tie(%db_hash, 'DB_File::Lock', $db_filename, O_RDONLY, 0600, $DB_HASH, 'read') or die;
+ ... then read the database ...
+ untie(%db_hash);
+
+This is better for two reasons:
+
+(1) Less cumbersome to write.
+
+(2) A fatal exception in the code working on the database which does
+not lead to process termination will probably not close the lockfile
+and therefore cause a dropped lock.
+
+=head1 USAGE DETAILS
+
+Tie to the database file by adding an additional locking argument
+to the list of arguments to be passed through to DB_File, such as:
+
+ tie(%db_hash, 'DB_File::Lock', $db_filename, O_RDONLY, 0600, $DB_HASH, 'read');
+
+or enclose the arguments for DB_File in a list reference:
+
+ tie(%db_hash, 'DB_File::Lock', [$db_filename, O_RDONLY, 0600, $DB_HASH], 'read');
+
+The filename used for the lockfile defaults to "$filename.lock"
+(the filename of the DB_File with ".lock" appended). Using a lockfile
+separate from the database file is recommended because it prevents weird
+interactions with the underlying database file library
+
+The additional locking argument added to the tie call can be:
+
+(1) "read" -- acquires a shared lock for reading
+
+(2) "write" -- acquires an exclusive lock for writing
+
+(3) A hash with the following keys (all optional except for the "mode"):
+
+=over 4
+
+=item mode
+
+the locking mode, "read" or "write".
+
+=item lockfile_name
+
+specifies the name of the lockfile to use. Default
+is "$filename.lock". This is useful for locking multiple resources with
+the same lockfiles.
+
+=item nonblocking
+
+determines if the flock call on the lockfile should
+block waiting for a lock, or if it should return failure if a lock can
+not be immediately attained. If "nonblocking" is set and a lock can not
+be attained, the tie command will fail. Currently, I'm not sure how to
+differentiate this between a failure form the DB_File layer.
+
+=item lockfile_mode
+
+determines the mode for the sysopen call in opening
+the lockfile. The default mode will be formulated to allow anyone that
+can read or write the DB_File permission to read and write the lockfile.
+(This is because some systems may require that one have write access to
+a file to lock it for reading, I understand.) The umask will be prevented
+from applying to this mode.
+
+=back
+
+Note: One may import the same values from DB_File::Lock as one may import
+from DB_File.
+
+=head1 GOOD LOCKING ETIQUETTE
+
+To avoid locking problems, realize that it is B<critical> that you release
+the lock as soon as possible. See the lock as a "hot potato", something
+that you must work with and get rid of as quickly as possible. See the
+sections of code where you have a lock as "critical" sections. Make sure
+that you call "untie" as soon as possible.
+
+It is often better to write:
+
+ # open database file with lock
+ # work with database
+ # lots of processing not related to database
+ # work with database
+ # close database and release lock
+
+as:
+
+ # open database file with lock
+ # work with database
+ # close database and release lock
+
+ # lots of processing not related to database
+
+ # open database file with lock
+ # work with database
+ # close database and release lock
+
+Also realize that when acquiring two locks at the same time, a deadlock
+situation can be caused.
+
+You can enter a deadlock situation if two processes simultaneously try to
+acquire locks on two separate databases. Each has locked only one of
+the databases, and cannot continue without locking the second. Yet this
+will never be freed because it is locked by the other process. If your
+processes all ask for their DB files in the same order, this situation
+cannot occur.
+
+=head1 OTHER LOCKING MODULES
+
+There are three locking wrappers for DB_File in CPAN right now. Each one
+implements locking differently and has different goals in mind. It is
+therefore worth knowing the difference, so that you can pick the right
+one for your application.
+
+Here are the three locking wrappers:
+
+Tie::DB_Lock -- DB_File wrapper which creates copies of the database file
+for read access, so that you have kind of a multiversioning concurrent
+read system. However, updates are still serial. Use for databases where
+reads may be lengthy and consistency problems may occur.
+
+Tie::DB_LockFile -- DB_File wrapper that has the ability to lock and
+unlock the database while it is being used. Avoids the tie-before-flock
+problem by simply re-tie-ing the database when you get or drop a
+lock. Because of the flexibility in dropping and re-acquiring the lock
+in the middle of a session, this can be massaged into a system that will
+work with long updates and/or reads if the application follows the hints
+in the POD documentation.
+
+DB_File::Lock (this module) -- extremely lightweight DB_File wrapper
+that simply flocks a lockfile before tie-ing the database and drops the
+lock after the untie. Allows one to use the same lockfile for multiple
+databases to avoid deadlock problems, if desired. Use for databases where
+updates are reads are quick and simple flock locking semantics are enough.
+
+(This text duplicated in the POD documentation, by the way.)
+
+=head1 AUTHOR
+
+David Harris <dharris@drh.net>
+
+Helpful insight from Stas Bekman <stas@stason.org>
+
+=head1 SEE ALSO
+
+DB_File(3).
+
+=cut
diff --git a/lib/Devscripts/Debbugs.pm b/lib/Devscripts/Debbugs.pm
new file mode 100644
index 0000000..355adc3
--- /dev/null
+++ b/lib/Devscripts/Debbugs.pm
@@ -0,0 +1,481 @@
+# This is Debbugs.pm from the Debian devscripts package
+#
+# Copyright (C) 2008 Adam D. Barratt
+# select() is Copyright (C) 2007 Don Armstrong
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+package Devscripts::Debbugs;
+
+=head1 OPTIONS
+
+=over
+
+=item select [key:value ...]
+
+Uses the SOAP interface to output a list of bugs which match the given
+selection requirements.
+
+The following keys are allowed, and may be given multiple times.
+
+=over 8
+
+=item package
+
+Binary package name.
+
+=item source
+
+Source package name.
+
+=item maintainer
+
+E-mail address of the maintainer.
+
+=item submitter
+
+E-mail address of the submitter.
+
+=item severity
+
+Bug severity.
+
+=item status
+
+Status of the bug.
+
+=item tag
+
+Tags applied to the bug. If I<users> is specified, may include
+usertags in addition to the standard tags.
+
+=item owner
+
+Bug's owner.
+
+=item correspondent
+
+Address of someone who sent mail to the log.
+
+=item affects
+
+Bugs which affect this package.
+
+=item bugs
+
+List of bugs to search within.
+
+=item users
+
+Users to use when looking up usertags.
+
+=item archive
+
+Whether to search archived bugs or normal bugs; defaults to 0
+(i.e. only search normal bugs). As a special case, if archive is
+'both', both archived and unarchived bugs are returned.
+
+=back
+
+For example, to select the set of bugs submitted by
+jrandomdeveloper@example.com and tagged wontfix, one would use
+
+select("submitter:jrandomdeveloper@example.com", "tag:wontfix")
+
+=back
+
+=cut
+
+use strict;
+use warnings;
+
+my $soapurl = 'Debbugs/SOAP/1';
+our $btsurl = 'http://bugs.debian.org/';
+my @errors;
+
+our $soap_timeout;
+
+sub soap_timeout {
+ my $timeout_arg = shift;
+ if (defined $timeout_arg and $timeout_arg =~ m{^[1-9]\d*$}) {
+ $soap_timeout = $timeout_arg;
+ }
+}
+
+sub init_soap {
+ my $soapproxyurl;
+ if ($btsurl =~ m%^https?://(.*)/?$%) {
+ $soapproxyurl = $btsurl . '/';
+ } else {
+ $soapproxyurl = 'http://' . $btsurl . '/';
+ }
+ $soapproxyurl =~ s%//$%/%;
+ $soapproxyurl .= 'cgi-bin/soap.cgi';
+ my %options;
+ if ($soap_timeout) {
+ $options{timeout} = $soap_timeout;
+ }
+ my $soap = SOAP::Lite->uri($soapurl)->proxy($soapproxyurl, %options);
+
+ $soap->transport->env_proxy();
+ $soap->on_fault(\&getSOAPError);
+
+ return $soap;
+}
+
+my $soap_broken;
+
+sub have_soap {
+ return ($soap_broken ? 0 : 1) if defined $soap_broken;
+ eval { require SOAP::Lite; };
+
+ if ($@) {
+ if ($@ =~ m%^Can't locate SOAP/%) {
+ $soap_broken = "the libsoap-lite-perl package is not installed";
+ } else {
+ $soap_broken = "couldn't load SOAP::Lite: $@";
+ }
+ } else {
+ $soap_broken = 0;
+ }
+ return ($soap_broken ? 0 : 1);
+}
+
+sub getSOAPError {
+ my ($soap, $result) = @_;
+ my $err;
+ if (ref($result)) {
+ $err = $result->faultstring;
+ } else {
+ $err = $soap->transport->status;
+ }
+ chomp $err;
+ push @errors, $err;
+
+ return new SOAP::SOM;
+}
+
+sub usertags {
+ die "Couldn't run usertags: $soap_broken\n" unless have_soap();
+
+ my @args = @_;
+
+ my $soap = init_soap();
+ my $usertags = $soap->get_usertag(@_);
+
+ if (@errors or not defined $usertags) {
+ my $error = join("\n", @errors);
+ die "Error retrieving usertags from SOAP server: $error\n";
+ }
+
+ my $result = $usertags->result();
+
+ if (@errors or not defined $result) {
+ my $error = join("\n", @errors);
+ die "Error retrieving usertags from SOAP server: $error\n";
+ }
+
+ return $result;
+}
+
+sub select {
+ die "Couldn't run select: $soap_broken\n" unless have_soap();
+ my @args = @_;
+ my %valid_keys = (
+ package => 'package',
+ pkg => 'package',
+ src => 'src',
+ source => 'src',
+ maint => 'maint',
+ maintainer => 'maint',
+ submitter => 'submitter',
+ from => 'submitter',
+ status => 'status',
+ tag => 'tag',
+ tags => 'tag',
+ usertag => 'tag',
+ usertags => 'tag',
+ owner => 'owner',
+ dist => 'dist',
+ distribution => 'dist',
+ bugs => 'bugs',
+ archive => 'archive',
+ severity => 'severity',
+ correspondent => 'correspondent',
+ affects => 'affects',
+ );
+ my %users;
+ my %search_parameters;
+ my $soap = init_soap();
+ for my $arg (@args) {
+ my ($key, $value) = split /:/, $arg, 2;
+ next unless $key;
+ if (exists $valid_keys{$key}) {
+ if ($valid_keys{$key} eq 'archive') {
+ $search_parameters{ $valid_keys{$key} } = $value
+ if $value;
+ } else {
+ push @{ $search_parameters{ $valid_keys{$key} } }, $value
+ if $value;
+ }
+ } elsif ($key =~ /users?$/) {
+ $users{$value} = 1 if $value;
+ } else {
+ warn "select(): Unrecognised key: $key\n";
+ }
+ }
+ my %usertags;
+ for my $user (keys %users) {
+ my $ut = usertags($user);
+ next unless defined $ut and $ut ne "";
+ for my $tag (keys %{$ut}) {
+ push @{ $usertags{$tag} }, @{ $ut->{$tag} };
+ }
+ }
+ my $bugs = $soap->get_bugs(%search_parameters,
+ (keys %usertags) ? (usertags => \%usertags) : ());
+
+ if (@errors or not defined $bugs) {
+ my $error = join("\n", @errors);
+ die "Error while retrieving bugs from SOAP server: $error\n";
+ }
+
+ my $result = $bugs->result();
+ if (@errors or not defined $result) {
+ my $error = join("\n", @errors);
+ die "Error while retrieving bugs from SOAP server: $error\n";
+ }
+
+ return $result;
+}
+
+sub status {
+ die "Couldn't run status: $soap_broken\n" unless have_soap();
+ my @args = @_;
+
+ my $soap = init_soap();
+
+ my $result = {};
+ while (my @slice = splice(@args, 0, 500)) {
+ my $bugs = $soap->get_status(@slice);
+
+ if (@errors or not defined $bugs) {
+ my $error = join("\n", @errors);
+ die
+ "Error while retrieving bug statuses from SOAP server: $error\n";
+ }
+
+ my $tmp = $bugs->result();
+
+ if (@errors or not defined $tmp) {
+ my $error = join("\n", @errors);
+ die
+ "Error while retrieving bug statuses from SOAP server: $error\n";
+ }
+
+ %$result = (%$result, %$tmp);
+ }
+
+ return $result;
+}
+
+sub versions {
+ die "Couldn't run versions: $soap_broken\n" unless have_soap();
+
+ my @args = @_;
+ my %valid_keys = (
+ package => 'package',
+ pkg => 'package',
+ src => 'source',
+ source => 'source',
+ time => 'time',
+ binary => 'no_source_arch',
+ notsource => 'no_source_arch',
+ archs => 'return_archs',
+ displayarch => 'return_archs',
+ );
+
+ my %search_parameters;
+ my @archs = ();
+ my @dists = ();
+
+ for my $arg (@args) {
+ my ($key, $value) = split /:/, $arg, 2;
+ $value ||= "1";
+ if ($key =~ /^arch(itecture)?$/) {
+ push @archs, $value;
+ } elsif ($key =~ /^dist(ribution)?$/) {
+ push @dists, $value;
+ } elsif (exists $valid_keys{$key}) {
+ $search_parameters{ $valid_keys{$key} } = $value;
+ }
+ }
+
+ $search_parameters{arch} = \@archs if @archs;
+ $search_parameters{dist} = \@dists if @dists;
+
+ my $soap = init_soap();
+
+ my $versions = $soap->get_versions(%search_parameters);
+
+ if (@errors or not defined $versions) {
+ my $error = join("\n", @errors);
+ die
+ "Error while retrieving package versions from SOAP server: $error\n";
+ }
+
+ my $result = $versions->result();
+
+ if (@errors or not defined $result) {
+ my $error = join("\n", @errors);
+ die "Error while retrieivng package versions from SOAP server: $error";
+ }
+
+ return $result;
+}
+
+sub versions_with_arch {
+ die "Couldn't run versions_with_arch: $soap_broken\n" unless have_soap();
+ my @args = @_;
+
+ my $versions = versions(@args, 'displayarch:1');
+
+ if (not defined $versions) {
+ die "Error while retrieivng package versions from SOAP server: $@";
+ }
+
+ return $versions;
+}
+
+sub newest_bugs {
+ die "Couldn't run newest_bugs: $soap_broken\n" unless have_soap();
+ my $count = shift || '';
+
+ return if $count !~ /^\d+$/;
+
+ my $soap = init_soap();
+
+ my $bugs = $soap->newest_bugs($count);
+
+ if (@errors or not defined $bugs) {
+ my $error = join("\n", @errors);
+ die "Error while retrieving newest bug list from SOAP server: $error";
+ }
+
+ my $result = $bugs->result();
+
+ if (@errors or not defined $result) {
+ my $error = join("\n", @errors);
+ die "Error while retrieving newest bug list from SOAP server: $error";
+ }
+
+ return $result;
+}
+
+# debbugs currently ignores the $msg_num parameter
+# but eventually it might not, so we support passing it
+
+sub bug_log {
+ die "Couldn't run bug_log: $soap_broken\n" unless have_soap();
+
+ my $bug = shift || '';
+ my $message = shift;
+
+ return if $bug !~ /^\d+$/;
+
+ my $soap = init_soap();
+
+ my $log = $soap->get_bug_log($bug, $message);
+
+ if (@errors or not defined $log) {
+ my $error = join("\n", @errors);
+ die "Error while retrieving bug log from SOAP server: $error\n";
+ }
+
+ my $result = $log->result();
+
+ if (@errors or not defined $result) {
+ my $error = join("\n", @errors);
+ die "Error while retrieving bug log from SOAP server: $error\n";
+ }
+
+ return $result;
+}
+
+sub binary_to_source {
+ die "Couldn't run binary_to_source: $soap_broken\n"
+ unless have_soap();
+
+ my $soap = init_soap();
+
+ my $binpkg = shift;
+ my $binver = shift;
+ my $arch = shift;
+
+ return if not defined $binpkg or not defined $binver;
+
+ my $mapping = $soap->binary_to_source($binpkg, $binver, $arch);
+
+ if (@errors or not defined $mapping) {
+ my $error = join("\n", @errors);
+ die
+"Error while retrieving binary to source mapping from SOAP server: $error\n";
+ }
+
+ my $result = $mapping->result();
+
+ if (@errors or not defined $result) {
+ my $error = join("\n", @errors);
+ die
+"Error while retrieving binary to source mapping from SOAP server: $error\n";
+ }
+
+ return $result;
+}
+
+sub source_to_binary {
+ die "Couldn't run source_to_binary: $soap_broken\n"
+ unless have_soap();
+
+ my $soap = init_soap();
+
+ my $srcpkg = shift;
+ my $srcver = shift;
+
+ return if not defined $srcpkg or not defined $srcver;
+
+ my $mapping = $soap->source_to_binary($srcpkg, $srcver);
+
+ if (@errors or not defined $mapping) {
+ my $error = join("\n", @errors);
+ die
+"Error while retrieving source to binary mapping from SOAP server: $error\n";
+ }
+
+ my $result = $mapping->result();
+
+ if (@errors or not defined $result) {
+ my $error = join("\n", @errors);
+ die
+"Error while retrieving source to binary mapping from SOAP server: $error\n";
+ }
+
+ return $result;
+}
+
+1;
+
+__END__
+
diff --git a/lib/Devscripts/JSONCache.pm b/lib/Devscripts/JSONCache.pm
new file mode 100644
index 0000000..b2e8761
--- /dev/null
+++ b/lib/Devscripts/JSONCache.pm
@@ -0,0 +1,97 @@
+package Devscripts::JSONCache;
+
+use strict;
+use JSON;
+use Moo;
+
+has file => (is => 'rw', required => 1);
+
+has saved => (is => 'rw');
+
+has _data => (is => 'rw');
+
+sub save_sec {
+ my ($self, $obj) = @_;
+ my $tmp = umask;
+ umask 0177;
+ open(my $fh, '>', $self->file) or ($self->saved(1) and die $!);
+ print $fh JSON::to_json($obj);
+ close $fh;
+ umask $tmp;
+}
+
+sub data {
+ my ($self) = @_;
+ return $self->_data if $self->_data;
+ my $res;
+ if (-r $self->file) {
+ open(F, $self->file) or ($self->saved(1) and die $!);
+ $res = JSON::from_json(join('', <F>) || "{}");
+ close F;
+ } else {
+ $self->save_sec({});
+ $self->saved(0);
+ }
+ return $self->_data($res);
+}
+
+sub TIEHASH {
+ my $r = shift->new({
+ file => shift,
+ @_,
+ });
+ # build data
+ $r->data;
+ return $r;
+}
+
+sub FETCH {
+ return $_[0]->data->{ $_[1] };
+}
+
+sub STORE {
+ $_[0]->data->{ $_[1] } = $_[2];
+}
+
+sub DELETE {
+ delete $_[0]->data->{ $_[1] };
+}
+
+sub CLEAR {
+ $_[0]->save({});
+}
+
+sub EXISTS {
+ return exists $_[0]->data->{ $_[1] };
+}
+
+sub FIRSTKEY {
+ my ($k) = sort { $a cmp $b } keys %{ $_[0]->data };
+ return $k;
+}
+
+sub NEXTKEY {
+ my ($self, $last) = @_;
+ my $i = 0;
+ my @keys = map {
+ return $_ if ($i);
+ $i++ if ($_ eq $last);
+ return ()
+ }
+ sort { $a cmp $b } keys %{ $_[0]->data };
+ return @keys ? $keys[0] : ();
+}
+
+sub SCALAR {
+ return scalar %{ $_[0]->data };
+}
+
+sub save {
+ return if ($_[0]->saved);
+ eval { $_[0]->save_sec($_[0]->data); };
+ $_[0]->saved(1);
+}
+
+*DESTROY = *UNTIE = *save;
+
+1;
diff --git a/lib/Devscripts/MkOrigtargz.pm b/lib/Devscripts/MkOrigtargz.pm
new file mode 100644
index 0000000..f4fa5fb
--- /dev/null
+++ b/lib/Devscripts/MkOrigtargz.pm
@@ -0,0 +1,631 @@
+package Devscripts::MkOrigtargz;
+
+use strict;
+use Cwd 'abs_path';
+use Devscripts::Compression
+ qw/compression_guess_from_file compression_get_property/;
+use Devscripts::MkOrigtargz::Config;
+use Devscripts::Output;
+use Devscripts::Uscan::Output;
+use Devscripts::Utils;
+use Dpkg::Changelog::Debian;
+use Dpkg::Control::Hash;
+use Dpkg::IPC;
+use Dpkg::Version;
+use File::Copy;
+use File::Spec;
+use File::Temp qw/tempdir/;
+use Moo;
+
+has config => (
+ is => 'rw',
+ default => sub {
+ Devscripts::MkOrigtargz::Config->new->parse;
+ },
+);
+
+has exclude_globs => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->exclude_file },
+);
+
+has include_globs => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->include_file },
+);
+
+has status => (is => 'rw', default => sub { 0 });
+has destfile_nice => (is => 'rw');
+
+our $found_comp;
+
+sub do {
+ my ($self) = @_;
+ $self->parse_copyrights or $self->make_orig_targz;
+ return $self->status;
+}
+
+sub make_orig_targz {
+ my ($self) = @_;
+
+ # Now we know what the final filename will be
+ my $destfilebase = sprintf "%s_%s.%s.tar", $self->config->package,
+ $self->config->version, $self->config->orig;
+ my $destfiletar = sprintf "%s/%s", $self->config->directory, $destfilebase;
+ my $destext
+ = $self->config->compression eq 'default'
+ ? 'default'
+ : compression_get_property($self->config->compression, "file_ext");
+ my $destfile;
+
+ # $upstream_tar is $upstream, unless the latter was a zip file.
+ my $upstream_tar = $self->config->upstream;
+
+ # Remember this for the final report
+ my $zipfile_deleted = 0;
+
+ # If the file is a zipfile, we need to create a tarfile from it.
+ if ($self->config->upstream_type eq 'zip') {
+ $destfile = $self->fix_dest_file($destfiletar);
+ if ($self->config->signature) {
+ $self->config->signature(4); # repack upstream file
+ }
+
+ my $tempdir = tempdir("uscanXXXX", TMPDIR => 1, CLEANUP => 1);
+ # Parent of the target directory should be under our control
+ $tempdir .= '/repack';
+ my @cmd;
+ if ($self->config->upstream_comp eq 'xpi') {
+ @cmd = ('xpi-unpack', $upstream_tar, $tempdir);
+ unless (ds_exec_no_fail(@cmd) >> 8 == 0) {
+ ds_die("Repacking from xpi failed (could not xpi-unpack)\n");
+ return $self->status(1);
+ }
+ } else {
+ unless (mkdir $tempdir) {
+ ds_die("Unable to mkdir($tempdir): $!\n");
+ return $self->status(1);
+ }
+ @cmd = ('unzip', '-q');
+ push @cmd, split ' ', $self->config->unzipopt
+ if defined $self->config->unzipopt;
+ push @cmd, ('-d', $tempdir, $upstream_tar);
+ unless (ds_exec_no_fail(@cmd) >> 8 == 0) {
+ ds_die("Repacking from zip or jar failed (could not unzip)\n");
+ return $self->status(1);
+ }
+ }
+
+ # Figure out the top-level contents of the tarball.
+ # If we'd pass "." to tar we'd get the same contents, but the filenames
+ # would start with ./, which is confusing later.
+ # This should also be more reliable than, say, changing directories and
+ # globbing.
+ unless (opendir(TMPDIR, $tempdir)) {
+ ds_die("Can't open $tempdir $!\n");
+ return $self->status(1);
+ }
+ my @files = grep { $_ ne "." && $_ ne ".." } readdir(TMPDIR);
+ close TMPDIR;
+
+ # tar it all up
+ spawn(
+ exec => [
+ 'tar', '--owner=root',
+ '--group=root', '--mode=a+rX',
+ '--create', '--file',
+ "$destfiletar", '--directory',
+ $tempdir, @files
+ ],
+ wait_child => 1
+ );
+ unless (-e "$destfiletar") {
+ ds_die(
+"Repacking from zip or jar to tar.$destext failed (could not create tarball)\n"
+ );
+ return $self->status(1);
+ }
+ eval {
+ compress_archive($destfiletar, $destfile,
+ $self->config->compression);
+ };
+ if ($@) {
+ ds_die($@);
+ return $self->status(1);
+ }
+
+ # rename means the user did not want this file to exist afterwards
+ if ($self->config->mode eq "rename") {
+ unlink $upstream_tar;
+ $zipfile_deleted++;
+ }
+
+ $self->config->mode('repack');
+ $upstream_tar = $destfile;
+ } elsif (compression_guess_from_file($upstream_tar) =~ /^zstd?$/) {
+ $self->config->force_repack(1);
+ }
+
+ # From now on, $upstream_tar is guaranteed to be a compressed tarball. It
+ # is always a full (possibly relative) path, and distinct from $destfile.
+
+ # Find out if we have to repack
+ my $do_repack = 0;
+ if ($self->config->repack) {
+ my $comp = compression_guess_from_file($upstream_tar);
+ unless ($comp) {
+ ds_die("Cannot determine compression method of $upstream_tar");
+ return $self->status(1);
+ }
+ $do_repack = (
+ $comp eq 'tar'
+ or ( $self->config->compression ne 'default'
+ and $comp ne $self->config->compression)
+ or ( $self->config->compression eq 'default'
+ and $comp ne
+ &Devscripts::MkOrigtargz::Config::default_compression));
+ }
+
+ # Removing files
+ my $deletecount = 0;
+ my @to_delete;
+
+ if (@{ $self->exclude_globs }) {
+ my @files;
+ my $files;
+ spawn(
+ exec => ['tar', '-t', '-a', '-f', $upstream_tar],
+ to_string => \$files,
+ wait_child => 1
+ );
+ @files = split /^/, $files;
+ chomp @files;
+
+ my %delete;
+ # find out what to delete
+ my @exclude_info;
+ eval {
+ @exclude_info
+ = map { { glob => $_, used => 0, regex => glob_to_regex($_) } }
+ @{ $self->exclude_globs };
+ };
+ if ($@) {
+ ds_die($@);
+ return $self->status(1);
+ }
+ for my $filename (sort @files) {
+ my $last_match;
+ for my $info (@exclude_info) {
+ if (
+ $filename
+ =~ m@^(?:[^/]*/)? # Possible leading directory, ignore it
+ (?:$info->{regex}) # User pattern
+ (?:/.*)?$ # Possible trailing / for a directory
+ @x
+ ) {
+ if (!$last_match) {
+ # if the current entry is a directory, check if it
+ # matches any exclude-ignored glob
+ my $ignore_this_exclude = 0;
+ for my $ignore_exclude (@{ $self->include_globs }) {
+ my $ignore_exclude_regex
+ = glob_to_regex($ignore_exclude);
+
+ if ($filename =~ $ignore_exclude_regex) {
+ $ignore_this_exclude = 1;
+ last;
+ }
+ if ( $filename =~ m,/$,
+ && $ignore_exclude =~ $info->{regex}) {
+ $ignore_this_exclude = 1;
+ last;
+ }
+ }
+ next if $ignore_this_exclude;
+ $delete{$filename} = 1;
+ }
+ $last_match = $info;
+ }
+ }
+ if (defined $last_match) {
+ $last_match->{used} = 1;
+ }
+ }
+
+ for my $info (@exclude_info) {
+ if (!$info->{used}) {
+ ds_warn
+"No files matched excluded pattern as the last matching glob: $info->{glob}\n";
+ }
+ }
+
+ # ensure files are mentioned before the directory they live in
+ # (otherwise tar complains)
+ @to_delete = sort { $b cmp $a } keys %delete;
+
+ $deletecount = scalar(@to_delete);
+ }
+
+ if ($deletecount or $self->config->force_repack) {
+ $destfilebase = sprintf "%s_%s%s.%s.tar", $self->config->package,
+ $self->config->version, $self->config->repack_suffix,
+ $self->config->orig;
+ $destfiletar = sprintf "%s/%s", $self->config->directory,
+ $destfilebase;
+ $destfile = $self->fix_dest_file($destfiletar);
+
+ # Zip -> tar process already created $destfile, so need to rename it
+ if ($self->config->upstream_type eq 'zip') {
+ move($upstream_tar, $destfile);
+ $upstream_tar = $destfile;
+ }
+ }
+
+ # Actually do the unpack, remove, pack cycle
+ if ($do_repack || $deletecount || $self->config->force_repack) {
+ $destfile ||= $self->fix_dest_file($destfiletar);
+ if ($self->config->signature) {
+ $self->config->signature(4); # repack upstream file
+ }
+ if ($self->config->upstream_comp) {
+ eval { decompress_archive($upstream_tar, $destfiletar) };
+ if ($@) {
+ ds_die($@);
+ return $self->status(1);
+ }
+ } else {
+ copy $upstream_tar, $destfiletar;
+ }
+ unlink $upstream_tar if $self->config->mode eq "rename";
+ # We have to use piping because --delete is broken otherwise, as
+ # documented at
+ # https://www.gnu.org/software/tar/manual/html_node/delete.html
+ if (@to_delete) {
+ # ARG_MAX: max number of bytes exec() can handle
+ my $arg_max;
+ spawn(
+ exec => ['getconf', 'ARG_MAX'],
+ to_string => \$arg_max,
+ wait_child => 1
+ );
+ # Under Hurd `getconf` above returns "undefined".
+ # It's apparently unlimited (?), so we just use a arbitrary number.
+ if ($arg_max =~ /\D/) { $arg_max = 131072; }
+ # Usually NAME_MAX=255, but here we use 128 to be on the safe side.
+ $arg_max = int($arg_max / 128);
+ # We use this lame splice on a totally arbitrary $arg_max because
+ # counting how many bytes there are in @to_delete is too
+ # inefficient.
+ while (my @next_n = splice @to_delete, 0, $arg_max) {
+ spawn(
+ exec => ['tar', '--delete', @next_n],
+ from_file => $destfiletar,
+ to_file => $destfiletar . ".tmp",
+ wait_child => 1
+ ) if scalar(@next_n) > 0;
+ move($destfiletar . ".tmp", $destfiletar);
+ }
+ }
+ eval {
+ compress_archive($destfiletar, $destfile,
+ $self->config->compression);
+ };
+ if ($@) {
+ ds_die $@;
+ return $self->status(1);
+ }
+
+ # Symlink no longer makes sense
+ $self->config->mode('repack');
+ $upstream_tar = $destfile;
+ } else {
+ $destfile = $self->fix_dest_file($destfiletar,
+ compression_guess_from_file($upstream_tar), 1);
+ }
+
+ # Final step: symlink, copy or rename for tarball.
+
+ my $same_name = abs_path($destfile) eq abs_path($self->config->upstream);
+ unless ($same_name) {
+ if ( $self->config->mode ne "repack"
+ and $upstream_tar ne $self->config->upstream) {
+ ds_die "Assertion failed";
+ return $self->status(1);
+ }
+
+ if ($self->config->mode eq "symlink") {
+ my $rel
+ = File::Spec->abs2rel($upstream_tar, $self->config->directory);
+ symlink $rel, $destfile;
+ } elsif ($self->config->mode eq "copy") {
+ copy($upstream_tar, $destfile);
+ } elsif ($self->config->mode eq "rename") {
+ move($upstream_tar, $destfile);
+ }
+ }
+
+ # Final step: symlink, copy or rename for signature file.
+
+ my $destsigfile;
+ if ($self->config->signature == 1) {
+ $destsigfile = sprintf "%s.asc", $destfile;
+ } elsif ($self->config->signature == 2) {
+ $destsigfile = sprintf "%s.asc", $destfiletar;
+ } elsif ($self->config->signature == 3) {
+ # XXX FIXME XXX place holder
+ $destsigfile = sprintf "%s.asc", $destfile;
+ } else {
+ # $self->config->signature == 0 or 4
+ $destsigfile = "";
+ }
+
+ if ($self->config->signature == 1 or $self->config->signature == 2) {
+ my $is_openpgp_ascii_armor = 0;
+ my $fh_sig;
+ unless (open($fh_sig, '<', $self->config->signature_file)) {
+ ds_die "Cannot open $self->{config}->{signature_file}\n";
+ return $self->status(1);
+ }
+ while (<$fh_sig>) {
+ if (m/^-----BEGIN PGP /) {
+ $is_openpgp_ascii_armor = 1;
+ last;
+ }
+ }
+ close($fh_sig);
+
+ if (not $is_openpgp_ascii_armor) {
+ my @enarmor
+ = `gpg --no-options --output - --enarmor $self->{config}->{signature_file} 2>&1`;
+ unless ($? == 0) {
+ ds_die
+"Failed to convert $self->{config}->{signature_file} to *.asc\n";
+ return $self->status(1);
+ }
+ unless (open(DESTSIG, '>', $destsigfile)) {
+ ds_die "Failed to open $destsigfile for write $!\n";
+ return $self->status(1);
+ }
+ foreach my $line (@enarmor) {
+ next if $line =~ m/^Version:/;
+ next if $line =~ m/^Comment:/;
+ $line =~ s/ARMORED FILE/SIGNATURE/;
+ print DESTSIG $line;
+ }
+ unless (close(DESTSIG)) {
+ ds_die
+"Cannot write signature file $self->{config}->{signature_file}\n";
+ return $self->status(1);
+ }
+ } else {
+ if (abs_path($self->config->signature_file) ne
+ abs_path($destsigfile)) {
+ if ($self->config->mode eq "symlink") {
+ my $rel = File::Spec->abs2rel(
+ $self->config->signature_file,
+ $self->config->directory
+ );
+ symlink $rel, $destsigfile;
+ } elsif ($self->config->mode eq "copy") {
+ copy($self->config->signature_file, $destsigfile);
+ } elsif ($self->config->mode eq "rename") {
+ move($self->config->signature_file, $destsigfile);
+ } else {
+ ds_die 'Strange mode="' . $self->config->mode . "\"\n";
+ return $self->status(1);
+ }
+ }
+ }
+ } elsif ($self->config->signature == 3) {
+ uscan_msg_raw
+"Skip adding upstream signature since upstream file has non-detached signature file.";
+ } elsif ($self->config->signature == 4) {
+ uscan_msg_raw
+ "Skip adding upstream signature since upstream file is repacked.";
+ }
+
+ # Final check: Is the tarball usable
+
+ # We are lazy and rely on Dpkg::IPC to report an error message
+ # (spawn does not report back the error code).
+ # We don't expect this to occur often anyways.
+ my $ret = spawn(
+ exec => ['tar', '--list', '--auto-compress', '--file', $destfile],
+ wait_child => 1,
+ to_file => '/dev/null'
+ );
+
+ # Tell the user what we did
+
+ my $upstream_nice = File::Spec->canonpath($self->config->upstream);
+ my $destfile_nice = File::Spec->canonpath($destfile);
+ $self->destfile_nice($destfile_nice);
+
+ if ($same_name) {
+ uscan_msg_raw "Leaving $destfile_nice where it is";
+ } else {
+ if ( $self->config->upstream_type eq 'zip'
+ or $do_repack
+ or $deletecount
+ or $self->config->force_repack) {
+ uscan_msg_raw
+ "Successfully repacked $upstream_nice as $destfile_nice";
+ } elsif ($self->config->mode eq "symlink") {
+ uscan_msg_raw
+ "Successfully symlinked $upstream_nice to $destfile_nice";
+ } elsif ($self->config->mode eq "copy") {
+ uscan_msg_raw
+ "Successfully copied $upstream_nice to $destfile_nice";
+ } elsif ($self->config->mode eq "rename") {
+ uscan_msg_raw
+ "Successfully renamed $upstream_nice to $destfile_nice";
+ } else {
+ ds_die 'Unknown mode ' . $self->config->mode;
+ return $self->status(1);
+ }
+ }
+
+ if ($deletecount) {
+ uscan_msg_raw ", deleting ${deletecount} files from it";
+ }
+ if ($zipfile_deleted) {
+ uscan_msg_raw ", and removed the original file";
+ }
+ print ".\n";
+ return 0;
+}
+
+sub decompress_archive {
+ my ($from_file, $to_file) = @_;
+ my $comp = compression_guess_from_file($from_file);
+ unless ($comp) {
+ die("Cannot determine compression method of $from_file");
+ }
+
+ my $cmd = compression_get_property($comp, 'decomp_prog');
+ spawn(
+ exec => $cmd,
+ from_file => $from_file,
+ to_file => $to_file,
+ wait_child => 1
+ );
+}
+
+sub compress_archive {
+ my ($from_file, $to_file, $comp) = @_;
+
+ my $cmd = compression_get_property($comp, 'comp_prog');
+ push(@{$cmd}, '-' . compression_get_property($comp, 'default_level'));
+ spawn(
+ exec => $cmd,
+ from_file => $from_file,
+ to_file => $to_file,
+ wait_child => 1
+ );
+ unlink $from_file;
+}
+
+# Adapted from Text::Glob::glob_to_regex_string
+sub glob_to_regex {
+ my ($glob) = @_;
+
+ if ($glob =~ m@/$@) {
+ ds_warn
+ "Files-Excluded pattern ($glob) should not have a trailing /\n";
+ chop($glob);
+ }
+ if ($glob =~ m/(?<!\\)(?:\\{2})*\\(?![\\*?])/) {
+ die
+"Invalid Files-Excluded pattern ($glob), \\ can only escape \\, *, or ? characters\n";
+ }
+
+ my ($regex, $escaping);
+ for my $c ($glob =~ m/(.)/gs) {
+ if (
+ $c eq '.'
+ || $c eq '('
+ || $c eq ')'
+ || $c eq '|'
+ || $c eq '+'
+ || $c eq '^'
+ || $c eq '$'
+ || $c eq '@'
+ || $c eq '%'
+ || $c eq '{'
+ || $c eq '}'
+ || $c eq '['
+ || $c eq ']'
+ ||
+
+ # Escape '#' since we're using /x in the pattern match
+ $c eq '#'
+ ) {
+ $regex .= "\\$c";
+ } elsif ($c eq '*') {
+ $regex .= $escaping ? "\\*" : ".*";
+ } elsif ($c eq '?') {
+ $regex .= $escaping ? "\\?" : ".";
+ } elsif ($c eq "\\") {
+ if ($escaping) {
+ $regex .= "\\\\";
+ $escaping = 0;
+ } else {
+ $escaping = 1;
+ }
+ next;
+ } else {
+ $regex .= $c;
+ $escaping = 0;
+ }
+ $escaping = 0;
+ }
+
+ return $regex;
+}
+
+sub parse_copyrights {
+ my ($self) = @_;
+ for my $copyright_file (@{ $self->config->copyright_file }) {
+ my $data = Dpkg::Control::Hash->new();
+ my $okformat
+ = qr'https?://www.debian.org/doc/packaging-manuals/copyright-format/[.\d]+';
+ eval {
+ $data->load($copyright_file);
+ 1;
+ } or do {
+ undef $data;
+ };
+ if (not -e $copyright_file) {
+ ds_die "File $copyright_file not found.";
+ return $self->status(1);
+ } elsif ($data
+ && defined $data->{format}
+ && $data->{format} =~ m@^$okformat/?$@) {
+ if ($data->{ $self->config->excludestanza }) {
+ push(
+ @{ $self->exclude_globs },
+ grep { $_ }
+ split(/\s+/, $data->{ $self->config->excludestanza }));
+ }
+ if ($data->{ $self->config->includestanza }) {
+ push(
+ @{ $self->include_globs },
+ grep { $_ }
+ split(/\s+/, $data->{ $self->config->includestanza }));
+ }
+ } else {
+ if (open my $file, '<', $copyright_file) {
+ while (my $line = <$file>) {
+ if ($line =~ m/\b$self->{config}->{excludestanza}.*:/i) {
+ ds_warn "The file $copyright_file mentions "
+ . $self->config->excludestanza
+ . ", but its "
+ . "format is not recognized. Specify Format: "
+ . "https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ "
+ . "in order to remove files from the tarball with mk-origtargz.\n";
+ last;
+ }
+ }
+ close $file;
+ } else {
+ ds_die "Unable to read $copyright_file: $!\n";
+ return $self->status(1);
+ }
+ }
+ }
+}
+
+sub fix_dest_file {
+ my ($self, $destfiletar, $comp, $force) = @_;
+ if ($self->config->compression eq 'default' or $force) {
+ $self->config->compression($comp
+ || &Devscripts::MkOrigtargz::Config::default_compression);
+ }
+ $comp = compression_get_property($self->config->compression, "file_ext");
+ $found_comp ||= $self->config->compression;
+ return sprintf "%s.%s", $destfiletar, $comp;
+}
+
+1;
diff --git a/lib/Devscripts/MkOrigtargz/Config.pm b/lib/Devscripts/MkOrigtargz/Config.pm
new file mode 100644
index 0000000..a4612be
--- /dev/null
+++ b/lib/Devscripts/MkOrigtargz/Config.pm
@@ -0,0 +1,247 @@
+package Devscripts::MkOrigtargz::Config;
+
+use strict;
+
+use Devscripts::Compression qw'compression_is_supported
+ compression_guess_from_file
+ compression_get_property';
+use Devscripts::Uscan::Output;
+use Exporter 'import';
+use File::Which;
+use Moo;
+
+use constant default_compression => 'xz';
+
+# regexp-assemble << END
+# tar\.gz
+# tgz
+# tar\.bz2
+# tbz2?
+# tar\.lzma
+# tlz(?:ma?)?
+# tar\.xz
+# txz
+# tar\.Z
+# tar
+# tar.zst
+# tar.zstd
+# END
+use constant tar_regex =>
+ qr/t(?:ar(?:\.(?:[gx]z|lzma|bz2|Z)|.zstd?)?|lz(?:ma?)?|[gx]z|bz2?)$/;
+
+extends 'Devscripts::Config';
+
+# Command-line parameters
+has component => (is => 'rw');
+has compression => (is => 'rw');
+has copyright_file => (is => 'rw');
+has directory => (is => 'rw');
+has exclude_file => (is => 'rw');
+has include_file => (is => 'rw');
+has force_repack => (is => 'rw');
+has package => (is => 'rw');
+has signature => (is => 'rw');
+has signature_file => (is => 'rw');
+has repack => (is => 'rw');
+has repack_suffix => (is => 'rw');
+has unzipopt => (is => 'rw');
+has version => (is => 'rw');
+
+# Internal accessors
+has mode => (is => 'rw');
+has orig => (is => 'rw', default => sub { 'orig' });
+has excludestanza => (is => 'rw', default => sub { 'Files-Excluded' });
+has includestanza => (is => 'rw', default => sub { 'Files-Included' });
+has upstream => (is => 'rw');
+has upstream_type => (is => 'rw');
+has upstream_comp => (is => 'rw');
+
+use constant keys => [
+ ['package=s'],
+ ['version|v=s'],
+ [
+ 'component|c=s',
+ undef,
+ sub {
+ if ($_[1]) {
+ $_[0]->orig("orig-$_[1]");
+ $_[0]->excludestanza("Files-Excluded-$_[1]");
+ $_[0]->includestanza("Files-Included-$_[1]");
+ }
+ 1;
+
+ }
+ ],
+ ['directory|C=s'],
+ ['exclude-file=s', undef, undef, sub { [] }],
+ ['include-file=s', undef, undef, sub { [] }],
+ ['force-repack'],
+ ['copyright-file=s', undef, undef, sub { [] }],
+ ['signature=i', undef, undef, 0],
+ ['signature-file=s', undef, undef, ''],
+ [
+ 'compression=s',
+ undef,
+ sub {
+ return (0, "Unknown compression scheme $_[1]")
+ unless ($_[1] eq 'default' or compression_is_supported($_[1]));
+ $_[0]->compression($_[1]);
+ },
+ ],
+ ['symlink', undef, \&setmode],
+ ['rename', undef, \&setmode],
+ ['copy', undef, \&setmode],
+ ['repack'],
+ ['repack-suffix|S=s', undef, undef, ''],
+ ['unzipopt=s'],
+];
+
+use constant rules => [
+ # Check --package if --version is used
+ sub {
+ return (
+ (defined $_[0]->{package} and not defined $_[0]->{version})
+ ? (0, 'If you use --package, you also have to specify --version')
+ : (1));
+ },
+ # Check that a tarball has been given and store it in $self->upstream
+ sub {
+ return (0, 'Please specify original tarball') unless (@ARGV == 1);
+ $_[0]->upstream($ARGV[0]);
+ return (
+ -r $_[0]->upstream
+ ? (1)
+ : (0, "Could not read $_[0]->{upstream}: $!"));
+ },
+ # Get Debian package name an version unless given
+ sub {
+ my ($self) = @_;
+ unless (defined $self->package) {
+
+ # get package name
+ my $c = Dpkg::Changelog::Debian->new(range => { count => 1 });
+ $c->load('debian/changelog');
+ if (my $msg = $c->get_parse_errors()) {
+ return (0, "could not parse debian/changelog:\n$msg");
+ }
+ my ($entry) = @{$c};
+ $self->package($entry->get_source());
+
+ # get version number
+ unless (defined $self->version) {
+ my $debversion = Dpkg::Version->new($entry->get_version());
+ if ($debversion->is_native()) {
+ return (0,
+ "Package with native version number $debversion; "
+ . "mk-origtargz makes no sense for native packages."
+ );
+ }
+ $self->version($debversion->version());
+ }
+
+ unshift @{ $self->copyright_file }, "debian/copyright"
+ if -r "debian/copyright";
+
+ # set destination directory
+ unless (defined $self->directory) {
+ $self->directory('..');
+ }
+ } else {
+ unless (defined $self->directory) {
+ $self->directory('.');
+ }
+ }
+ return 1;
+ },
+ # Get upstream type and compression
+ sub {
+ my ($self) = @_;
+ my $mime = compression_guess_from_file($self->upstream);
+
+ if (defined $mime and $mime eq 'zip') {
+ $self->upstream_type('zip');
+ my ($prog, $pkg);
+ if ($self->upstream =~ /\.xpi$/i) {
+ $self->upstream_comp('xpi');
+ $prog = 'xpi-unpack';
+ $pkg = 'mozilla-devscripts';
+ } else {
+ $self->upstream_comp('zip');
+ $prog = $pkg = 'unzip';
+ }
+ return (0,
+ "$prog binary not found."
+ . " You need to install the package $pkg"
+ . " to be able to repack "
+ . $self->upstream_type
+ . " upstream archives.\n")
+ unless (which $prog);
+ } elsif ($self->upstream =~ tar_regex) {
+ $self->upstream_type('tar');
+ if ($self->upstream =~ /\.tar$/) {
+ $self->upstream_comp('');
+ } else {
+ unless (
+ $self->upstream_comp(
+ compression_guess_from_file($self->upstream))
+ ) {
+ return (0,
+ "Unknown compression used in $self->{upstream}");
+ }
+ }
+ } else {
+ # TODO: Should we ignore the name and only look at what file knows?
+ return (0,
+ 'Parameter '
+ . $self->upstream
+ . ' does not look like a tar archive or a zip file.');
+ }
+ return 1;
+ },
+ # Default compression
+ sub {
+ my ($self) = @_;
+
+ # Case 1: format is 1.0
+ if (-r 'debian/source/format') {
+ open F, 'debian/source/format';
+ my $str = <F>;
+ unless ($str =~ /^([\d\.]+)/ and $1 >= 2.0) {
+ ds_warn
+"Source format is earlier than 2.0, switch compression to gzip";
+ $self->compression('gzip');
+ $self->repack(1) unless ($self->upstream_comp eq 'gzip');
+ }
+ close F;
+ } elsif (-d 'debian') {
+ ds_warn "Missing debian/source/format, switch compression to gzip";
+ $self->compression('gzip');
+ $self->repack(1) unless ($self->upstream_comp eq 'gzip');
+ } elsif ($self->upstream_type eq 'tar') {
+
+ # Uncompressed tar
+ if (!$self->upstream_comp) {
+ $self->repack(1);
+ }
+ }
+ # Set to default. Will be changed after setting do_repack
+ $self->compression('default')
+ unless ($self->compression);
+ return 1;
+ },
+ sub {
+ my ($self) = @_;
+ $self->{mode} ||= 'symlink';
+ },
+];
+
+sub setmode {
+ my ($self, $nv, $kname) = @_;
+ return unless ($nv);
+ if (defined $self->mode and $self->mode ne $kname) {
+ return (0, "--$self->{mode} and --$kname are mutually exclusive");
+ }
+ $self->mode($kname);
+}
+
+1;
diff --git a/lib/Devscripts/Output.pm b/lib/Devscripts/Output.pm
new file mode 100644
index 0000000..6ef2947
--- /dev/null
+++ b/lib/Devscripts/Output.pm
@@ -0,0 +1,83 @@
+package Devscripts::Output;
+
+use strict;
+use Exporter 'import';
+use File::Basename;
+use constant accept => qr/^y(?:es)?\s*$/i;
+use constant refuse => qr/^n(?:o)?\s*$/i;
+
+our @EXPORT = (
+ qw(ds_debug ds_extra_debug ds_verbose ds_warn ds_error
+ ds_die ds_msg who_called $progname $verbose
+ ds_prompt accept refuse $ds_yes)
+);
+
+# ACCESSORS
+our ($verbose, $die_on_error, $ds_yes) = (0, 1, 0);
+
+our $progname = basename($0);
+
+sub printwarn {
+ my ($msg, $w) = @_;
+ chomp $msg;
+ if ($w) {
+ print STDERR "$msg\n";
+ } else {
+ print "$msg\n";
+ }
+}
+
+sub ds_msg {
+ my $msg = $_[0];
+ printwarn("$progname: $msg", $_[1]);
+}
+
+sub ds_verbose {
+ my $msg = $_[0];
+ if ($verbose > 0) {
+ printwarn("$progname info: $msg", $_[1]);
+ }
+}
+
+sub who_called {
+ return '' unless ($verbose > 1);
+ my @out = caller(1);
+ return " [$out[0]: $out[2]]";
+}
+
+sub ds_warn {
+ my $msg = $_[0];
+ printwarn("$progname warn: $msg" . who_called, 1);
+}
+
+sub ds_debug {
+ my $msg = $_[0];
+ printwarn("$progname debug: $msg", $_[1]) if $verbose > 1;
+}
+
+sub ds_extra_debug {
+ my $msg = $_[0];
+ printwarn("$progname debug: $msg", $_[1]) if $verbose > 2;
+}
+
+*ds_die = \&ds_error;
+
+sub ds_error {
+ my $msg = $_[0];
+ $msg = "$progname error: $msg" . who_called;
+ if ($die_on_error) {
+ print STDERR "$msg\n";
+ exit 1;
+ }
+ printwarn($msg, 1);
+}
+
+sub ds_prompt {
+ return 'yes' if ($ds_yes > 0);
+ print STDERR shift;
+ my $s = <STDIN>;
+ chomp $s;
+ return $s;
+}
+
+1;
diff --git a/lib/Devscripts/PackageDeps.pm b/lib/Devscripts/PackageDeps.pm
new file mode 100644
index 0000000..8bad0d7
--- /dev/null
+++ b/lib/Devscripts/PackageDeps.pm
@@ -0,0 +1,307 @@
+# Based vaguely on the deprecated dpkg-perl package modules
+# Dpkg::Package::List and Dpkg::Package::Package.
+# This module creates an object which holds package names and dependencies
+# (just Depends and Pre-Depends).
+# It can also calculate the total set of subdependencies using the
+# fulldepends method.
+#
+# Copyright 2002 Julian Gilbey <jdg@debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+package Devscripts::PackageDeps;
+use strict;
+use Carp;
+use Dpkg::Control;
+use Dpkg::IPC;
+use FileHandle;
+require 5.006_000;
+
+# This reads in a package file list, such as /var/lib/dpkg/status,
+# and parses it. Using /var/lib/dpkg/status is deprecated in favor of
+# fromStatus().
+
+# Syntax: Devscripts::PackageDeps->new($filename)
+
+sub new ($$) {
+ my $this = shift;
+ my $class = ref($this) || $this;
+ my $filename = shift;
+
+ my $self = {};
+
+ if (!defined $filename) {
+ croak("requires filename as parameter");
+ }
+
+ bless($self, $class);
+
+ my $fh = FileHandle->new($filename, 'r');
+ unless (defined $fh) {
+ croak("Unable to load $filename: $!");
+ }
+ $self->parse($fh, $filename);
+ $fh->close or croak("Problems encountered reading $filename: $!");
+
+ return $self;
+}
+
+# This reads in dpkg's status information and parses it.
+
+# Syntax: Devscripts::PackageDeps->fromStatus()
+
+sub fromStatus ($) {
+ my $this = shift;
+ my $class = ref($this) || $this;
+
+ my $self = {};
+
+ bless($self, $class);
+
+ my $fh = FileHandle->new;
+ my $pid = spawn(
+ exec => ['dpkg', '--status'],
+ to_pipe => $fh
+ );
+ unless (defined $pid) {
+ croak("Unable to run 'dpkg --status': $!");
+ }
+
+ $self->parse($fh, 'dpkg --status');
+
+ wait_child($pid, cmdline => 'dpkg --status', nocheck => 1);
+
+ return $self;
+}
+
+# Internal functions
+
+my $multiarch;
+
+sub multiarch () {
+ if (!defined $multiarch) {
+ $multiarch
+ = (system('dpkg --assert-multi-arch >/dev/null 2>&1') >> 8) == 0;
+ }
+ return $multiarch;
+}
+
+sub parse ($$$) {
+ my $self = shift;
+ my $fh = shift;
+ my $filename = shift;
+
+ my $ctrl;
+ PACKAGE_ENTRY:
+ while (defined($ctrl = Dpkg::Control->new(type => CTRL_FILE_STATUS))
+ && $ctrl->parse($fh, $filename)) {
+
+ # So we've got a package
+ my $pkg = $ctrl->{Package};
+ my @deps = ();
+
+ if ($ctrl->{Status} =~ /^\S+\s+\S+\s+(\S+)$/) {
+ my $status = $1;
+ unless ($status eq 'installed' or $status eq 'unpacked') {
+ undef $ctrl;
+ next PACKAGE_ENTRY;
+ }
+ }
+
+ for my $dep (qw(Depends Pre-Depends)) {
+ if (exists $ctrl->{$dep}) {
+ my $value = $ctrl->{$dep};
+ $value =~ s/\([^)]+\)//g; # ignore versioning information
+ $value =~ tr/ \t//d; # remove spaces
+ my @dep_pkgs = split /,/, $value;
+ foreach my $dep_pkg (@dep_pkgs) {
+ my @dep_pkg_alts = split /\|/, $dep_pkg;
+ if (@dep_pkg_alts == 1) { push @deps, $dep_pkg_alts[0]; }
+ else { push @deps, \@dep_pkg_alts; }
+ }
+ }
+ }
+
+ $self->{$pkg} = \@deps;
+ if ($ctrl->{Architecture} ne 'all' && multiarch) {
+ my $arch = $ctrl->{Architecture};
+ @deps = map { "$_:$arch" } @deps;
+ $self->{"$pkg:$arch"} = \@deps;
+ }
+ undef $ctrl;
+ }
+}
+
+# Get direct dependency information for a specified package
+# Returns an array or array ref depending on context
+
+# Syntax: $obj->dependencies($package)
+
+sub dependencies ($$) {
+ my $self = shift;
+ my $pkg = shift;
+
+ if (!defined $pkg) {
+ croak("requires package as parameter");
+ }
+
+ if (!exists $self->{$pkg}) {
+ return undef;
+ }
+
+ return wantarray ? @{ $self->{$pkg} } : $self->{$pkg};
+}
+
+# Get full dependency information for a specified package or packages,
+# including the packages themselves.
+#
+# This only follows the first of sets of alternatives, and ignores
+# dependencies on packages which do not appear to exist.
+# Returns an array or array ref
+
+# Syntax: $obj->full_dependencies(@packages)
+
+sub full_dependencies ($@) {
+ my $self = shift;
+ my @toprocess = @_;
+ my %deps;
+
+ return wantarray ? () : [] unless @toprocess;
+
+ while (@toprocess) {
+ my $next = shift @toprocess;
+ $next = $$next[0] if ref $next;
+ # Already seen?
+ next if exists $deps{$next};
+ # Known package?
+ next unless exists $self->{$next};
+ # Mark it as a dependency
+ $deps{$next} = 1;
+ push @toprocess, @{ $self->{$next} };
+ }
+
+ return wantarray ? keys %deps : [keys %deps];
+}
+
+# Given a set of packages, find a minimal set with respect to the
+# pre-partial order of dependency.
+#
+# This is vaguely based on the dpkg-mindep script by
+# Bill Allombert <ballombe@debian.org>. It only follows direct
+# dependencies, and does not attempt to follow indirect dependencies.
+#
+# This respects the all packages in sets of alternatives.
+# Returns: (\@minimal_set, \%dependencies)
+# where the %dependencies hash is of the form
+# non-minimal package => depending package
+
+# Syntax: $obj->min_dependencies(@packages)
+
+sub min_dependencies ($@) {
+ my $self = shift;
+ my @pkgs = @_;
+ my @min_pkgs = ();
+ my %dep_pkgs = ();
+
+ return (\@min_pkgs, \%dep_pkgs) unless @pkgs;
+
+ # We create a directed graph: the %forward_deps hash records arrows
+ # pkg A depends on pkg B; the %reverse_deps hash records the
+ # reverse arrows
+ my %forward_deps;
+ my %reverse_deps;
+
+ # Initialise
+ foreach my $pkg (@pkgs) {
+ $forward_deps{$pkg} = {};
+ $reverse_deps{$pkg} = {};
+ }
+
+ foreach my $pkg (@pkgs) {
+ next unless exists $self->{$pkg};
+ my @pkg_deps = @{ $self->{$pkg} };
+ while (@pkg_deps) {
+ my $dep = shift @pkg_deps;
+ if (ref $dep) {
+ unshift @pkg_deps, @$dep;
+ next;
+ }
+ if (exists $forward_deps{$dep}) {
+ $forward_deps{$pkg}{$dep} = 1;
+ $reverse_deps{$dep}{$pkg} = 1;
+ }
+ }
+ }
+
+ # We start removing packages from the tree if they have no dependencies.
+ # Once we have no such packages left, we must have mutual or cyclic
+ # dependencies, so we pick a random one to remove and then start again.
+ # We continue this until there are no packages left in the graph.
+ PACKAGE:
+ while (scalar keys %forward_deps) {
+ foreach my $pkg (keys %forward_deps) {
+ if (scalar keys %{ $forward_deps{$pkg} } == 0) {
+ # Great, no dependencies!
+ if (scalar keys %{ $reverse_deps{$pkg} }) {
+ # This package is depended upon, so we can remove it
+ # with care
+ foreach my $dep_pkg (keys %{ $reverse_deps{$pkg} }) {
+ # take the first mentioned package for the
+ # recorded list of depended-upon packages
+ $dep_pkgs{$pkg} ||= $dep_pkg;
+ delete $forward_deps{$dep_pkg}{$pkg};
+ }
+ } else {
+ # This package is not depended upon, so it must
+ # go into our mindep list
+ push @min_pkgs, $pkg;
+ }
+ # Now remove this node
+ delete $forward_deps{$pkg};
+ delete $reverse_deps{$pkg};
+ next PACKAGE;
+ }
+ }
+
+ # Oh, we didn't find any package which didn't depend on any other.
+ # We'll pick a random one, then. At least *some* package must
+ # be depended upon in this situation; let's pick one of these.
+ foreach my $pkg (keys %forward_deps) {
+ next unless scalar keys %{ $reverse_deps{$pkg} } > 0;
+
+ foreach my $dep_pkg (keys %{ $forward_deps{$pkg} }) {
+ delete $reverse_deps{$dep_pkg}{$pkg};
+ }
+ foreach my $dep_pkg (keys %{ $reverse_deps{$pkg} }) {
+ # take the first mentioned package for the
+ # recorded list of depended-upon packages
+ $dep_pkgs{$pkg} ||= $dep_pkg;
+ delete $forward_deps{$dep_pkg}{$pkg};
+ }
+
+ # Now remove this node
+ delete $forward_deps{$pkg};
+ delete $reverse_deps{$pkg};
+ # And onto the next package
+ goto PACKAGE;
+ }
+
+ # Ouch! We shouldn't ever get here
+ croak("Couldn't determine mindeps; this can't happen!");
+ }
+
+ return (\@min_pkgs, \%dep_pkgs);
+}
+
+1;
diff --git a/lib/Devscripts/Packages.pm b/lib/Devscripts/Packages.pm
new file mode 100644
index 0000000..75acb45
--- /dev/null
+++ b/lib/Devscripts/Packages.pm
@@ -0,0 +1,313 @@
+#! /usr/bin/perl
+
+# Copyright Bill Allombert <ballombe@debian.org> 2001.
+# Modifications copyright 2002 Julian Gilbey <jdg@debian.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+package Devscripts::Packages;
+
+use strict;
+use warnings;
+use Carp;
+use Dpkg::Control;
+use Dpkg::IPC;
+use FileHandle;
+
+BEGIN {
+ use Exporter ();
+ use vars qw(@EXPORT @ISA %EXPORT_TAGS);
+ @EXPORT
+ = qw(PackagesToFiles FilesToPackages PackagesMatch InstalledPackages);
+ @ISA = qw(Exporter);
+ %EXPORT_TAGS = ();
+}
+
+=head1 NAME
+
+Devscript::Packages - Interface to the dpkg package database
+
+=head1 SYNOPSIS
+
+use Devscript::Packages;
+
+@files=PackagesToFiles(@packages);
+
+@packages=FilesToPackages(@files);
+
+@packages=PackagesMatch($regexp);
+
+$packages_hashref=InstalledPackages($sources);
+
+=head1 DESCRIPTION
+
+
+PackagesToFiles: Return a list of files contained in a list of packages.
+
+FilesToPackages: Return a list of packages containing at least
+one file in a list of files, taking care to handle diversions correctly.
+
+PackagesMatch: list of packages whose status match regexp.
+
+InstalledPackages: ref to hash with keys being installed packages
+(status = install ok installed). If $sources is true, then include
+the corresponding source packages as well in the list.
+
+=cut
+
+my $multiarch;
+
+sub multiarch () {
+ if (!defined $multiarch) {
+ $multiarch
+ = (system('dpkg --assert-multi-arch >/dev/null 2>&1') >> 8) == 0;
+ }
+ return $multiarch;
+}
+
+# input: a list of packages names.
+# output: list of files they contain.
+
+sub PackagesToFiles (@) {
+ return () if @_ == 0;
+
+ my %files = ();
+
+ # We fork and use an exec, so that we don't have to worry how long an
+ # input string the shell can handle.
+
+ my $pid;
+ my $sleep_count = 0;
+ do {
+ $pid = open(DPKG, "-|");
+ unless (defined $pid) {
+ carp("cannot fork: $!");
+ croak("bailing out") if $sleep_count++ > 6;
+ sleep 10;
+ }
+ } until defined $pid;
+
+ if ($pid) { # parent
+ while (<DPKG>) {
+ chomp;
+ next if /^package diverts others to: / or -d $_;
+ $files{$_} = 1;
+ }
+ close DPKG or croak("dpkg -L failed: $!");
+ } else { # child
+ # We must use C locale, else diversion messages may be translated.
+ $ENV{'LC_ALL'} = 'C';
+ exec('dpkg', '-L', @_)
+ or croak("can't exec dpkg -L: $!");
+ }
+
+ return keys %files;
+}
+
+# This basically runs a dpkg -S with a few bells and whistles
+#
+# input: a list of files.
+# output: list of packages they belong to.
+
+sub FilesToPackages (@) {
+ return () if @_ == 0;
+
+ # We fork and use an exec, so that we don't have to worry how long an
+ # input string the shell can handle.
+
+ my @dpkg_out;
+ my $pid;
+ my $sleep_count = 0;
+ do {
+ $pid = open(DPKG, "-|");
+ unless (defined $pid) {
+ carp("cannot fork: $!");
+ croak("bailing out") if $sleep_count++ > 6;
+ sleep 10;
+ }
+ } until defined $pid;
+
+ if ($pid) { # parent
+ while (<DPKG>) {
+ # We'll process it later
+ chomp;
+ push @dpkg_out, $_;
+ }
+ if (!close DPKG) {
+ # exit status of 1 just indicates unrecognised files
+ if ($? & 0xff || $? >> 8 != 1) {
+ carp( "warning: dpkg -S exited with signal "
+ . ($? & 0xff)
+ . " and status "
+ . ($? >> 8));
+ }
+ }
+ } else { # child
+ # We must use C locale, else diversion messages may be translated.
+ $ENV{'LC_ALL'} = 'C';
+ open STDERR, '>& STDOUT'; # Capture STDERR as well
+ exec('dpkg', '-S', @_)
+ or croak("can't exec dpkg -S: $!");
+ }
+
+ my %packages = ();
+ foreach my $curfile (@_) {
+ my $pkgfrom;
+ foreach my $line (@dpkg_out) {
+ # We want to handle diversions nicely.
+ # Ignore local diversions
+ if ($line =~ /^local diversion from: /) {
+ # Do nothing
+ } elsif ($line =~ /^local diversion to: (.+)$/) {
+ if ($curfile eq $1) {
+ last;
+ }
+ } elsif ($line =~ /^diversion by (\S+) from: (.+)$/) {
+ if ($curfile eq $2) {
+ # So the file we're looking has been diverted
+ $pkgfrom = $1;
+ }
+ } elsif ($line =~ /^diversion by (\S+) to: (.+)$/) {
+ if ($curfile eq $2) {
+ # So the file we're looking is a diverted file
+ # We shouldn't see it again
+ $packages{$1} = 1;
+ last;
+ }
+ } elsif ($line =~ /^dpkg: \Q$curfile\E not found\.$/) {
+ last;
+ } elsif ($line
+ =~ /^dpkg-query: no path found matching pattern \Q$curfile\E\.$/
+ ) {
+ last;
+ } elsif ($line =~ /^(.*): \Q$curfile\E$/) {
+ my @pkgs = split /,\s+/, $1;
+ if (@pkgs == 1 || !grep /:/, @pkgs) {
+ # Only one package, or all Multi-Arch packages
+ map { $packages{$_} = 1 } @pkgs;
+ } else {
+ # We've got a file which has been diverted by some package
+ # or is Multi-Arch and so is listed in two packages. If it
+ # was diverted, the *diverting* package is the one with the
+ # file that was actually used.
+ my $found = 0;
+ foreach my $pkg (@pkgs) {
+ if ($pkg eq $pkgfrom) {
+ $packages{$pkgfrom} = 1;
+ $found = 1;
+ last;
+ }
+ }
+ if (!$found) {
+ carp(
+"Something wicked happened to the output of dpkg -S $curfile"
+ );
+ }
+ }
+ # Prepare for the next round
+ last;
+ }
+ }
+ }
+
+ return keys %packages;
+}
+
+# Return a list of packages whose status entries match a given pattern
+
+sub PackagesMatch ($) {
+ my $match = $_[0];
+ my @matches = ();
+
+ my $fout = FileHandle->new;
+ my $pid = spawn(
+ exec => ['dpkg', '--status'],
+ to_pipe => $fout
+ );
+ unless (defined $pid) {
+ croak("Unable to run \"dpkg --status\": $!");
+ }
+
+ my $ctrl;
+ while (defined($ctrl = Dpkg::Control->new())
+ && $ctrl->parse($fout, 'dpkg --status')) {
+ if ("$ctrl" =~ m/$match/m) {
+ my $package = $ctrl->{Package};
+ if ($ctrl->{Architecture} ne 'all' && multiarch) {
+ $package .= ":$ctrl->{Architecture}";
+ }
+ push @matches, $package;
+ }
+ undef $ctrl;
+ }
+
+ wait_child($pid, cmdline => 'dpkg --status', nocheck => 1);
+ return @matches;
+}
+
+# Which packages are installed (Package and Source)?
+
+sub InstalledPackages ($) {
+ my $source = $_[0];
+
+ my $fout = FileHandle->new;
+ my $pid = spawn(
+ exec => ['dpkg', '--status'],
+ to_pipe => $fout
+ );
+ unless (defined $pid) {
+ croak("Unable to run \"dpkg --status\": $!");
+ }
+
+ my $ctrl;
+ my %matches;
+ while (defined($ctrl = Dpkg::Control->new(type => CTRL_FILE_STATUS))
+ && $ctrl->parse($fout, 'dpkg --status')) {
+ if ($ctrl->{Status} !~ /^install\s+ok\s+installed$/) {
+ next;
+ }
+ if ($source) {
+ if (exists $ctrl->{Source}) {
+ $matches{ $ctrl->{Source} } = 1;
+ }
+ }
+ if (exists $ctrl->{Package}) {
+ $matches{ $ctrl->{Package} } = 1;
+ if ($ctrl->{Architecture} ne 'all' && multiarch) {
+ $matches{"$ctrl->{Package}:$ctrl->{Architecture}"} = 1;
+ }
+ }
+ undef $ctrl;
+ }
+
+ wait_child($pid, cmdline => 'dpkg --status', nocheck => 1);
+
+ return \%matches;
+}
+
+1;
+
+=head1 AUTHOR
+
+Bill Allombert <ballombe@debian.org>
+
+=head1 COPYING
+
+Copyright 2001 Bill Allombert <ballombe@debian.org>
+Modifications copyright 2002 Julian Gilbey <jdg@debian.org>
+dpkg-depcheck is free software, covered by the GNU General Public License, and
+you are welcome to change it and/or distribute copies of it under
+certain conditions. There is absolutely no warranty for dpkg-depcheck.
+
+=cut
diff --git a/lib/Devscripts/Salsa.pm b/lib/Devscripts/Salsa.pm
new file mode 100755
index 0000000..5c02b16
--- /dev/null
+++ b/lib/Devscripts/Salsa.pm
@@ -0,0 +1,410 @@
+package Devscripts::Salsa;
+
+=head1 NAME
+
+Devscripts::Salsa - salsa(1) base object
+
+=head1 SYNOPSIS
+
+ use Devscripts::Salsa;
+ exit Devscripts::Salsa->new->run
+
+=head1 DESCRIPTION
+
+Devscripts::Salsa provides salsa(1) command launcher and some common utilities
+methods.
+
+=cut
+
+use strict;
+
+use Devscripts::Output;
+use Devscripts::Salsa::Config;
+
+BEGIN {
+ eval "use GitLab::API::v4;use GitLab::API::v4::Constants qw(:all)";
+ if ($@) {
+ print STDERR "You must install GitLab::API::v4\n";
+ exit 1;
+ }
+}
+use Moo;
+use File::Basename;
+use File::Path qw(make_path);
+
+# Command aliases
+use constant cmd_aliases => {
+ ci => 'last_ci_status',
+ co => 'checkout',
+ ls => 'list_repos',
+ search => 'search_project',
+ search_repo => 'search_project',
+ mr => 'merge_request',
+ mrs => 'merge_requests',
+ pipe => 'pipeline_schedule',
+ pipeline => 'pipeline_schedule', # preferred name
+ schedule => 'pipeline_schedule',
+ pipes => 'pipeline_schedules',
+ pipelines => 'pipeline_schedules', # preferred name
+ schedules => 'pipeline_schedules',
+};
+
+=head1 ACCESSORS
+
+=over
+
+=item B<config> : Devscripts::Salsa::Config object (parsed)
+
+=cut
+
+has config => (
+ is => 'rw',
+ default => sub { Devscripts::Salsa::Config->new->parse },
+);
+
+=item B<cache> : Devscripts::JSONCache object
+
+=cut
+
+# File cache to avoid polling Gitlab too much
+# (used to store ids, paths and names)
+has _cache => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ return {} unless ($_[0]->config->cache_file);
+ my %h;
+ eval {
+ my ($cache_file, $cache_dir) = fileparse $_[0]->config->cache_file;
+ if (!-d $cache_dir) {
+ make_path $cache_dir;
+ }
+ require Devscripts::JSONCache;
+ tie %h, 'Devscripts::JSONCache', $_[0]->config->cache_file;
+ ds_debug "Cache opened";
+ };
+ if ($@) {
+ ds_verbose "Unable to create cache object: $@";
+ return {};
+ }
+ return \%h;
+ },
+);
+has cache => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ $_[0]->_cache->{ $_[0]->config->api_url } //= {};
+ return $_[0]->_cache->{ $_[0]->config->api_url };
+ },
+);
+
+# In memory cache (used to avoid querying the project id twice when using
+# update_safe
+has projectCache => (
+ is => 'rw',
+ default => sub { {} },
+);
+
+=item B<api>: GitLab::API::v4 object
+
+=cut
+
+has api => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ my $r = GitLab::API::v4->new(
+ url => $_[0]->config->api_url,
+ (
+ $_[0]->config->private_token
+ ? (private_token => $_[0]->config->private_token)
+ : ()
+ ),
+ );
+ $r or ds_die "Unable to create GitLab::API::v4 object";
+ return $r;
+ },
+);
+
+=item User or group in use
+
+=over
+
+=item B<username>
+
+=item B<user_id>
+
+=item B<group_id>
+
+=item B<group_path>
+
+=back
+
+=cut
+
+# Accessors that resolve names, ids or paths
+has username => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->id2username });
+
+has user_id => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ $_[0]->config->user_id || $_[0]->username2id;
+ },
+);
+
+has group_id => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->group_id || $_[0]->group2id },
+);
+
+has group_path => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ my ($self) = @_;
+ return undef unless ($self->group_id);
+ return $self->cache->{group_path}->{ $self->{group_id} }
+ if $self->cache->{group_path}->{ $self->{group_id} };
+ return $self->{group_path} if ($self->{group_path}); # Set if --group
+ eval {
+ $self->{group_path}
+ = $self->api->group_without_projects($self->group_id)
+ ->{full_path};
+ $self->cache->{group_path}->{ $self->{group_id} }
+ = $self->{group_path};
+ };
+ if ($@) {
+ ds_verbose $@;
+ ds_warn "Unexistent group " . $self->group_id;
+ return undef;
+ }
+ return $self->{group_path};
+ },
+);
+
+=back
+
+=head1 METHODS
+
+=over
+
+=item B<run>: main method, load and run command and return Unix result code.
+
+=cut
+
+sub run {
+ my ($self, $args) = @_;
+ binmode STDOUT, ':utf8';
+
+ # Check group or user id
+ my $command = $self->config->command;
+ if (my $tmp = cmd_aliases->{$command}) {
+ $command = $tmp;
+ }
+ eval { with "Devscripts::Salsa::$command" };
+ if ($@) {
+ ds_verbose $@;
+ ds_die "Unknown command $command";
+ return 1;
+ }
+ return $self->$command(@ARGV);
+}
+
+=back
+
+=head2 Utilities
+
+=over
+
+=item B<levels_name>, B<levels_code>: convert strings to GitLab level codes
+(owner, maintainer, developer, reporter and guest)
+
+=cut
+
+sub levels_name {
+ my $res = {
+
+ # needs GitLab::API::v4::Constants 0.11
+ # no_access => $GITLAB_ACCESS_LEVEL_NO_ACCESS,
+ guest => $GITLAB_ACCESS_LEVEL_GUEST,
+ reporter => $GITLAB_ACCESS_LEVEL_REPORTER,
+ developer => $GITLAB_ACCESS_LEVEL_DEVELOPER,
+ maintainer => $GITLAB_ACCESS_LEVEL_MASTER,
+ owner => $GITLAB_ACCESS_LEVEL_OWNER,
+ }->{ $_[1] };
+ ds_die "Unknown access level '$_[1]'" unless ($res);
+ return $res;
+}
+
+sub levels_code {
+ return {
+ $GITLAB_ACCESS_LEVEL_GUEST => 'guest',
+ $GITLAB_ACCESS_LEVEL_REPORTER => 'reporter',
+ $GITLAB_ACCESS_LEVEL_DEVELOPER => 'developer',
+ $GITLAB_ACCESS_LEVEL_MASTER => 'maintainer',
+ $GITLAB_ACCESS_LEVEL_OWNER => 'owner',
+ }->{ $_[1] };
+}
+
+=item B<username2id>, B<id2username>: convert username to an id an reverse
+
+=cut
+
+sub username2id {
+ my ($self, $user) = @_;
+ $user ||= $self->config->user || $self->api->current_user->{id};
+ unless ($user) {
+ return ds_warn "Token seems invalid";
+ return 1;
+ }
+ unless ($user =~ /^\d+$/) {
+ return $self->cache->{user_id}->{$user}
+ if $self->cache->{user_id}->{$user};
+ my $users = $self->api->users({ username => $user });
+ return ds_die "Username '$user' not found"
+ unless ($users and @$users);
+ ds_verbose "$user id is $users->[0]->{id}";
+ $self->cache->{user_id}->{$user} = $users->[0]->{id};
+ return $users->[0]->{id};
+ }
+ return $user;
+}
+
+sub id2username {
+ my ($self, $id) = @_;
+ $id ||= $self->config->user_id || $self->api->current_user->{id};
+ return $self->cache->{user}->{$id} if $self->cache->{user}->{$id};
+ my $res = eval { $self->api->user($id)->{username} };
+ if ($@) {
+ ds_verbose $@;
+ return ds_die "$id not found";
+ }
+ ds_verbose "$id is $res";
+ $self->cache->{user}->{$id} = $res;
+ return $res;
+}
+
+=item B<group2id>: convert group name to id
+
+=cut
+
+sub group2id {
+ my ($self, $name) = @_;
+ $name ||= $self->config->group;
+ return unless $name;
+ if ($self->cache->{group_id}->{$name}) {
+ $self->group_path($self->cache->{group_id}->{$name}->{path});
+ return $self->group_id($self->cache->{group_id}->{$name}->{id});
+ }
+ my $groups = $self->api->group_without_projects($name);
+ if ($groups) {
+ $groups = [$groups];
+ } else {
+ $self->api->groups({ search => $name });
+ }
+ return ds_die "No group found" unless ($groups and @$groups);
+ if (scalar @$groups > 1) {
+ ds_warn "More than one group found:";
+ foreach (@$groups) {
+ print <<END;
+Id : $_->{id}
+Name : $_->{name}
+Full name: $_->{full_name}
+Full path: $_->{full_path}
+
+END
+ }
+ return ds_die "Set the chosen group id using --group-id.";
+ }
+ ds_verbose "$name id is $groups->[0]->{id}";
+ $self->cache->{group_id}->{$name}->{path}
+ = $self->group_path($groups->[0]->{full_path});
+ $self->cache->{group_id}->{$name}->{id} = $groups->[0]->{id};
+ return $self->group_id($groups->[0]->{id});
+}
+
+=item B<project2id>: get id of a project.
+
+=cut
+
+sub project2id {
+ my ($self, $project) = @_;
+ return $project if ($project =~ /^\d+$/);
+ my $res;
+ $project = $self->project2path($project);
+ if ($self->projectCache->{$project}) {
+ ds_debug "use cached id for $project";
+ return $self->projectCache->{$project};
+ }
+ unless ($project =~ /^\d+$/) {
+ eval { $res = $self->api->project($project)->{id}; };
+ if ($@) {
+ ds_debug $@;
+ ds_warn "Project $project not found:";
+ return undef;
+ }
+ }
+ ds_verbose "$project id is $res";
+ $self->projectCache->{$project} = $res;
+ return $res;
+}
+
+=item B<project2path>: get full path of a project
+
+=cut
+
+sub project2path {
+ my ($self, $project) = @_;
+ return $project if ($project =~ m#/#);
+ my $path = $self->main_path;
+ return undef unless ($path);
+ ds_verbose "Project $project => $path/$project";
+ return "$path/$project";
+}
+
+=item B<main_path>: build path using given group or user
+
+=cut
+
+sub main_path {
+ my ($self) = @_;
+ my $path;
+ if ($self->config->path) {
+ $path = $self->config->path;
+ } elsif (my $tmp = $self->group_path) {
+ $path = $tmp;
+ } elsif ($self->user_id) {
+ $path = $self->username;
+ } else {
+ ds_warn "Unable to determine project path";
+ return undef;
+ }
+ return $path;
+}
+
+# GitLab::API::v4 does not permit to call /groups/:id with parameters.
+# It takes too much time for the "debian" group, since it returns the list of
+# all projects together with all the details of the projects
+sub GitLab::API::v4::group_without_projects {
+ my $self = shift;
+ return $self->_call_rest_client('GET', 'groups/:group_id', [@_],
+ { query => { with_custom_attributes => 0, with_projects => 0 } });
+}
+
+1;
+
+=back
+
+=head1 AUTHOR
+
+Xavier Guimard E<lt>yadd@debian.orgE<gt>
+
+=head1 COPYRIGHT AND LICENSE
+
+Copyright 2018, Xavier Guimard E<lt>yadd@debian.orgE<gt>
diff --git a/lib/Devscripts/Salsa/Config.pm b/lib/Devscripts/Salsa/Config.pm
new file mode 100755
index 0000000..8c9ccbd
--- /dev/null
+++ b/lib/Devscripts/Salsa/Config.pm
@@ -0,0 +1,460 @@
+# Salsa configuration (inherits from Devscripts::Config)
+package Devscripts::Salsa::Config;
+
+use strict;
+use Devscripts::Output;
+use Moo;
+
+extends 'Devscripts::Config';
+
+# Declare accessors for each option
+foreach (qw(
+ all api_url cache_file command desc desc_pattern dest_branch rename_head
+ disable_irker disable_kgb disable_tagpending irc_channel irker wiki
+ snippets pages releases auto_devops request_acc issues mr repo forks
+ lfs packages jobs container analytics requirements irker_server_url
+ irker_host irker_port kgb kgb_server_url kgb_options mr_allow_squash
+ mr_desc mr_dst_branch mr_dst_project mr_remove_source_branch mr_src_branch
+ mr_src_project mr_title no_fail path private_token skip source_branch
+ group group_id user user_id tagpending tagpending_server_url email
+ email_recipient disable_email ci_config_path archived build_timeout
+ enable_remove_branch disable_remove_branch all_archived git_server_url
+ schedule_desc schedule_ref schedule_cron schedule_tz
+ schedule_enable schedule_disable schedule_run schedule_delete
+ avatar_path request_access
+ )
+) {
+ has $_ => (is => 'rw');
+}
+
+my $cacheDir;
+
+our @kgbOpt = qw(push_events issues_events confidential_issues_events
+ confidential_comments_events merge_requests_events tag_push_events
+ note_events job_events pipeline_events wiki_page_events
+ confidential_note_events enable_ssl_verification);
+
+BEGIN {
+ $cacheDir = $ENV{XDG_CACHE_HOME} || $ENV{HOME} . '/.cache';
+}
+
+# Options
+use constant keys => [
+
+ # General options
+ [
+ 'C|chdir=s', undef,
+ sub { return (chdir($_[1]) ? 1 : (0, "$_[1] doesn't exist")) }
+ ],
+ [
+ 'cache-file',
+ 'SALSA_CACHE_FILE',
+ sub {
+ $_[0]->cache_file($_[1] ? $_[1] : undef);
+ },
+ "$cacheDir/salsa.json"
+ ],
+ [
+ 'no-cache',
+ 'SALSA_NO_CACHE',
+ sub {
+ $_[0]->cache_file(undef)
+ if ($_[1] !~ /^(?:no|0+)$/i);
+ return 1;
+ }
+ ],
+ ['debug', undef, sub { $verbose = 2 }],
+ ['info|i', 'SALSA_INFO', sub { info(-1, 'SALSA_INFO', @_) }],
+ [
+ 'path=s',
+ 'SALSA_REPO_PATH',
+ sub {
+ $_ = $_[1];
+ s#/*(.*)/*#$1#;
+ $_[0]->path($_);
+ return /^[\w\d\-]+$/ ? 1 : (0, "Bad path $_");
+ }
+ ],
+ ['group=s', 'SALSA_GROUP', qr/^[\/\-\w]+$/],
+ ['group-id=s', 'SALSA_GROUP_ID', qr/^\d+$/],
+ ['token', 'SALSA_TOKEN', sub { $_[0]->private_token($_[1]) }],
+ [
+ 'token-file',
+ 'SALSA_TOKEN_FILE',
+ sub {
+ my ($self, $v) = @_;
+ return (0, "Unable to open token file") unless (-r $v);
+ open F, $v;
+ my $s = join '', <F>;
+ close F;
+ if ($s
+ =~ m/^[^#]*(?:SALSA_(?:PRIVATE_)?TOKEN)\s*=\s*(["'])?([-\w]+)\1?$/m
+ ) {
+ $self->private_token($2);
+ return 1;
+ } else {
+ return (0, "No token found in file $v");
+ }
+ }
+ ],
+ ['user=s', 'SALSA_USER', qr/^[\-\w]+$/],
+ ['user-id=s', 'SALSA_USER_ID', qr/^\d+$/],
+ ['verbose', 'SALSA_VERBOSE', sub { $verbose = 1 }],
+ ['yes!', 'SALSA_YES', sub { info(1, "SALSA_YES", @_) },],
+
+ # Update/create repo options
+ ['all'],
+ ['all-archived'],
+ ['skip=s', 'SALSA_SKIP', undef, sub { [] }],
+ [
+ 'skip-file=s',
+ 'SALSA_SKIP_FILE',
+ sub {
+ return 1 unless $_[1];
+ return (0, "Unable to read $_[1]") unless (-r $_[1]);
+ open my $fh, $_[1];
+ push @{ $_[0]->skip }, (map { chomp $_; ($_ ? $_ : ()) } <$fh>);
+ return 1;
+ }
+ ],
+ ['no-skip', undef, sub { $_[0]->skip([]); $_[0]->skip_file(undef); }],
+ ['build-timeout=s', 'SALSA_BUILD_TIMEOUT', qr/^\d+$/, '3600'],
+ ['ci-config-path=s', 'SALSA_CI_CONFIG_PATH', qr/\./],
+ ['desc!', 'SALSA_DESC', 'bool'],
+ ['desc-pattern=s', 'SALSA_DESC_PATTERN', qr/\w/, 'Debian package %p'],
+ [
+ 'enable-remove-source-branch!',
+ undef,
+ sub {
+ !$_[1]
+ or $_[0]
+ ->enable('yes', 'enable_remove_branch', 'disable_remove_branch');
+ }
+ ],
+ [
+ 'disable-remove-source-branch!',
+ undef,
+ sub {
+ !$_[1]
+ or $_[0]
+ ->enable('no', 'enable_remove_branch', 'disable_remove_branch');
+ }
+ ],
+ [
+ undef,
+ 'SALSA_REMOVE_SOURCE_BRANCH',
+ sub {
+ $_[0]
+ ->enable($_[1], 'enable_remove_branch', 'disable_remove_branch');
+ }
+ ],
+ [
+ 'issues=s', 'SALSA_ENABLE_ISSUES',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'repo=s', 'SALSA_ENABLE_REPO',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'mr=s', 'SALSA_ENABLE_MR',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'forks=s', 'SALSA_ENABLE_FORKS',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'lfs=s', 'SALSA_ENABLE_LFS',
+ qr/y(es)?|true|enabled?|no?|false|disabled?/
+ ],
+ [
+ 'packages=s',
+ 'SALSA_ENABLE_PACKAGES',
+ qr/y(es)?|true|enabled?|no?|false|disabled?/
+ ],
+ [
+ 'jobs=s', 'SALSA_ENABLE_JOBS',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'container=s', 'SALSA_ENABLE_CONTAINER',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'analytics=s', 'SALSA_ENABLE_ANALYTICS',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'requirements=s',
+ 'SALSA_ENABLE_REQUIREMENTS',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'wiki=s', 'SALSA_ENABLE_WIKI',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'snippets=s', 'SALSA_ENABLE_SNIPPETS',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'pages=s', 'SALSA_ENABLE_PAGES',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'releases=s', 'SALSA_ENABLE_RELEASES',
+ qr/y(es)?|true|enabled?|private|no?|false|disabled?/
+ ],
+ [
+ 'auto-devops=s',
+ 'SALSA_ENABLE_AUTO_DEVOPS',
+ qr/y(es)?|true|enabled?|no?|false|disabled?/
+ ],
+ [
+ 'request-acc=s',
+ 'SALSA_ENABLE_REQUEST_ACC',
+ qr/y(es)?|true|enabled?|no?|false|disabled?/
+ ],
+ [
+ 'email!', undef,
+ sub { !$_[1] or $_[0]->enable('yes', 'email', 'disable_email'); }
+ ],
+ [
+ 'disable-email!', undef,
+ sub { !$_[1] or $_[0]->enable('no', 'email', 'disable_email'); }
+ ],
+ [
+ undef, 'SALSA_EMAIL',
+ sub { $_[0]->enable($_[1], 'email', 'disable_email'); }
+ ],
+ ['email-recipient=s', 'SALSA_EMAIL_RECIPIENTS', undef, sub { [] },],
+ ['irc-channel|irc=s', 'SALSA_IRC_CHANNEL', undef, sub { [] }],
+ [
+ 'irker!', undef,
+ sub { !$_[1] or $_[0]->enable('yes', 'irker', 'disable_irker'); }
+ ],
+ [
+ 'disable-irker!', undef,
+ sub { !$_[1] or $_[0]->enable('no', 'irker', 'disable_irker'); }
+ ],
+ [
+ undef, 'SALSA_IRKER',
+ sub { $_[0]->enable($_[1], 'irker', 'disable_irker'); }
+ ],
+ ['irker-host=s', 'SALSA_IRKER_HOST', undef, 'ruprecht.snow-crash.org'],
+ ['irker-port=s', 'SALSA_IRKER_PORT', qr/^\d*$/],
+ [
+ 'kgb!', undef,
+ sub { !$_[1] or $_[0]->enable('yes', 'kgb', 'disable_kgb'); }
+ ],
+ [
+ 'disable-kgb!', undef,
+ sub { !$_[1] or $_[0]->enable('no', 'kgb', 'disable_kgb'); }
+ ],
+ [undef, 'SALSA_KGB', sub { $_[0]->enable($_[1], 'kgb', 'disable_kgb'); }],
+ [
+ 'kgb-options=s',
+ 'SALSA_KGB_OPTIONS',
+ qr/\w/,
+ 'push_events,issues_events,merge_requests_events,tag_push_events,'
+ . 'note_events,pipeline_events,wiki_page_events,'
+ . 'enable_ssl_verification'
+ ],
+
+ ['no-fail', 'SALSA_NO_FAIL', 'bool'],
+ ['rename-head!', 'SALSA_RENAME_HEAD', 'bool'],
+ ['avatar-path=s', 'SALSA_AVATAR_PATH', undef],
+ ['source-branch=s', 'SALSA_SOURCE_BRANCH', undef, 'master'],
+ ['dest-branch=s', 'SALSA_DEST_BRANCH', undef, 'debian/master'],
+ [
+ 'tagpending!',
+ undef,
+ sub {
+ !$_[1]
+ or $_[0]->enable('yes', 'tagpending', 'disable_tagpending');
+ }
+ ],
+ [
+ 'disable-tagpending!',
+ undef,
+ sub {
+ !$_[1] or $_[0]->enable('no', 'tagpending', 'disable_tagpending');
+ }
+ ],
+ [
+ undef, 'SALSA_TAGPENDING',
+ sub { $_[0]->enable($_[1], 'tagpending', 'disable_tagpending'); }
+ ],
+
+ # Pipeline schedules options
+ ['schedule-desc=s', 'SALSA_SCHEDULE_DESC', qr/\w/],
+ ['schedule-ref=s', 'SALSA_SCHEDULE_REF'],
+ ['schedule-cron=s', 'SALSA_SCHEDULE_CRON'],
+ ['schedule-tz=s', 'SALSA_SCHEDULE_TZ'],
+ ['schedule-enable!', 'SALSA_SCHEDULE_ENABLE', 'bool'],
+ ['schedule-disable!', 'SALSA_SCHEDULE_DISABLE', 'bool'],
+ ['schedule-run!', 'SALSA_SCHEDULE_RUN', 'bool'],
+ ['schedule-delete!', 'SALSA_SCHEDULE_DELETE', 'bool'],
+
+ # Merge requests options
+ ['mr-allow-squash!', 'SALSA_MR_ALLOW_SQUASH', 'bool', 1],
+ ['mr-desc=s'],
+ ['mr-dst-branch=s', undef, undef, 'master'],
+ ['mr-dst-project=s'],
+ ['mr-remove-source-branch!', 'SALSA_MR_REMOVE_SOURCE_BRANCH', 'bool', 0],
+ ['mr-src-branch=s'],
+ ['mr-src-project=s'],
+ ['mr-title=s'],
+
+ # Options to manage other Gitlab instances
+ [
+ 'api-url=s', 'SALSA_API_URL',
+ qr#^https?://#, 'https://salsa.debian.org/api/v4'
+ ],
+ [
+ 'git-server-url=s', 'SALSA_GIT_SERVER_URL',
+ qr/^\S+\@\S+/, 'git@salsa.debian.org:'
+ ],
+ [
+ 'irker-server-url=s', 'SALSA_IRKER_SERVER_URL',
+ qr'^ircs?://', 'ircs://irc.oftc.net:6697/'
+ ],
+ [
+ 'kgb-server-url=s', 'SALSA_KGB_SERVER_URL',
+ qr'^https?://', 'http://kgb.debian.net:9418/webhook/?channel='
+ ],
+ [
+ 'tagpending-server-url=s',
+ 'SALSA_TAGPENDING_SERVER_URL',
+ qr'^https?://',
+ 'https://webhook.salsa.debian.org/tagpending/'
+ ],
+
+ [
+ 'request-access=s',
+ 'SALSA_REQUEST_ACCESS',
+ qr/y(es)?|true|enabled?|1|no?|false|disabled?|0/
+ ],
+
+ # List/search options
+ ['archived!', 'SALSA_ARCHIVED', 'bool', 0],
+];
+
+# Consistency rules
+use constant rules => [
+ # Reject unless token exists
+ sub {
+ return (1,
+"SALSA_TOKEN not set in configuration files. Some commands may fail"
+ ) unless ($_[0]->private_token);
+ },
+ # Get command
+ sub {
+ return (0, "No command given, aborting") unless (@ARGV);
+ $_[0]->command(shift @ARGV);
+ return (0, "Malformed command: " . $_[0]->command)
+ unless ($_[0]->command =~ /^[a-z_]+$/);
+ return 1;
+ },
+ sub {
+ if ( ($_[0]->group or $_[0]->group_id)
+ and ($_[0]->user_id or $_[0]->user)) {
+ ds_warn(
+ "Both --user-id and --group-id are set, ignore --group-id");
+ $_[0]->group(undef);
+ $_[0]->group_id(undef);
+ }
+ return 1;
+ },
+ sub {
+ if ($_[0]->group and $_[0]->group_id) {
+ ds_warn("Both --group-id and --group are set, ignore --group");
+ $_[0]->group(undef);
+ }
+ return 1;
+ },
+ sub {
+ if ($_[0]->user and $_[0]->user_id) {
+ ds_warn("Both --user-id and --user are set, ignore --user");
+ $_[0]->user(undef);
+ }
+ return 1;
+ },
+ sub {
+ if ($_[0]->email and not @{ $_[0]->email_recipient }) {
+ return (0, '--email-recipient needed with --email');
+ }
+ return 1;
+ },
+ sub {
+ if (@{ $_[0]->irc_channel }) {
+ foreach (@{ $_[0]->irc_channel }) {
+ if (/^#/) {
+ return (1,
+"# found in --irc-channel, assuming double hash is wanted"
+ );
+ }
+ }
+ if ($_[0]->irc_channel->[1] and $_[0]->kgb) {
+ return (0, "Only one IRC channel is accepted with --kgb");
+ }
+ }
+ return 1;
+ },
+ sub {
+ $_[0]->kgb_options([sort split ',\s*', $_[0]->kgb_options]);
+ my @err;
+ foreach my $o (@{ $_[0]->kgb_options }) {
+ unless (grep { $_ eq $o } @kgbOpt) {
+ push @err, $o;
+ }
+ }
+ return (0, 'Unknown KGB options: ' . join(', ', @err))
+ if @err;
+ return 1;
+ },
+];
+
+sub usage {
+ print <<END;
+usage: salsa <command> <parameters> <options>
+
+Most used commands:
+ - checkout, co: clone repo in current dir
+ - fork : fork a project
+ - mr : create a merge request
+ - push_repo : push local git repo to upstream repository
+ - whoami : gives information on the token owner
+
+See salsa(1) manpage for more.
+END
+}
+
+sub info {
+ my ($num, $key, undef, $nv) = @_;
+ $nv = (
+ $nv =~ /^yes|1$/ ? $num
+ : $nv =~ /^no|0$/i ? 0
+ : return (0, "Bad $key value"));
+ $ds_yes = $nv;
+}
+
+sub enable {
+ my ($self, $v, $en, $dis) = @_;
+ $v = lc($v);
+ if ($v eq 'ignore') {
+ $self->{$en} = $self->{$dis} = 0;
+ } elsif ($v eq 'yes') {
+ $self->{$en} = 1;
+ $self->{$dis} = 0;
+ } elsif ($v eq 'no') {
+ $self->{$en} = 0;
+ $self->{$dis} = 1;
+ } else {
+ return (0, "Bad value for SALSA_" . uc($en));
+ }
+ return 1;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/Hooks.pm b/lib/Devscripts/Salsa/Hooks.pm
new file mode 100644
index 0000000..95a2f47
--- /dev/null
+++ b/lib/Devscripts/Salsa/Hooks.pm
@@ -0,0 +1,303 @@
+# Common hooks library
+package Devscripts::Salsa::Hooks;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub add_hooks {
+ my ($self, $repo_id, $repo) = @_;
+ if ( $self->config->kgb
+ or $self->config->disable_kgb
+ or $self->config->tagpending
+ or $self->config->disable_tagpending
+ or $self->config->irker
+ or $self->config->disable_irker
+ or $self->config->email
+ or $self->config->disable_email) {
+ my $hooks = $self->enabled_hooks($repo_id);
+ return 1 unless (defined $hooks);
+ # KGB hook (IRC)
+ if ($self->config->kgb or $self->config->disable_kgb) {
+ unless ($self->config->irc_channel->[0]
+ or $self->config->disable_kgb) {
+ ds_warn "--kgb needs --irc-channel";
+ return 1;
+ }
+ if ($self->config->irc_channel->[1]) {
+ ds_warn "KGB accepts only one --irc-channel value,";
+ }
+ if ($hooks->{kgb}) {
+ ds_warn "Deleting old kgb (was $hooks->{kgb}->{url})";
+ $self->api->delete_project_hook($repo_id, $hooks->{kgb}->{id});
+ }
+ if ($self->config->irc_channel->[0]
+ and not $self->config->disable_kgb) {
+ # TODO: if useful, add parameters for this options
+ eval {
+ $self->api->create_project_hook(
+ $repo_id,
+ {
+ url => $self->config->kgb_server_url
+ . $self->config->irc_channel->[0],
+ map { ($_ => 1) } @{ $self->config->kgb_options },
+ });
+ ds_verbose "KGB hook added to project $repo_id (channel: "
+ . $self->config->irc_channel->[0] . ')';
+ };
+ if ($@) {
+ ds_warn "Fail to add KGB hook: $@";
+ if (!$self->config->no_fail) {
+ return 1;
+ }
+ }
+ }
+ }
+ # Irker hook (IRC)
+ if ($self->config->irker or $self->config->disable_irker) {
+ unless ($self->config->irc_channel->[0]
+ or $self->config->disable_irker) {
+ ds_warn "--irker needs --irc-channel";
+ return 1;
+ }
+ if ($hooks->{irker}) {
+ no warnings;
+ ds_warn
+"Deleting old irker (redirected to $hooks->{irker}->{recipients})";
+ $self->api->delete_project_service($repo_id, 'irker');
+ }
+ if ($self->config->irc_channel->[0]
+ and not $self->config->disable_irker) {
+ # TODO: if useful, add parameters for this options
+ my $ch = join(' ',
+ map { '#' . $_ } @{ $self->config->irc_channel });
+ $self->api->edit_project_service(
+ $repo_id, 'irker',
+ {
+ active => 1,
+ server_host => $self->config->irker_host,
+ (
+ $self->config->irker_port
+ ? (server_port => $self->config->irker_port)
+ : ()
+ ),
+ default_irc_uri => $self->config->irker_server_url,
+ recipients => $ch,
+ colorize_messages => 1,
+ });
+ ds_verbose
+ "Irker hook added to project $repo_id (channel: $ch)";
+ }
+ }
+ # email on push
+ if ($self->config->email or $self->config->disable_email) {
+ if ($hooks->{email}) {
+ no warnings;
+ ds_warn
+"Deleting old email-on-push (redirected to $hooks->{email}->{recipients})";
+ $self->api->delete_project_service($repo_id, 'emails-on-push');
+ }
+ if (@{ $self->config->email_recipient }
+ and not $self->config->disable_email) {
+ # TODO: if useful, add parameters for this options
+ $self->api->edit_project_service(
+ $repo_id,
+ 'emails-on-push',
+ {
+ recipients => join(' ',
+ map { my $a = $_; $a =~ s/%p/$repo/; $a }
+ @{ $self->config->email_recipient }),
+ });
+ no warnings;
+ ds_verbose
+ "Email-on-push hook added to project $repo_id (recipients: "
+ . join(' ', @{ $self->config->email_recipient }) . ')';
+ }
+ }
+ # Tagpending hook
+ if ($self->config->tagpending or $self->config->disable_tagpending) {
+ if ($hooks->{tagpending}) {
+ ds_warn
+ "Deleting old tagpending (was $hooks->{tagpending}->{url})";
+ $self->api->delete_project_hook($repo_id,
+ $hooks->{tagpending}->{id});
+ }
+ my $repo_name = $self->api->project($repo_id)->{name};
+ unless ($self->config->disable_tagpending) {
+ eval {
+ $self->api->create_project_hook(
+ $repo_id,
+ {
+ url => $self->config->tagpending_server_url
+ . $repo_name,
+ push_events => 1,
+ });
+ ds_verbose "Tagpending hook added to project $repo_id";
+ };
+ if ($@) {
+ ds_warn "Fail to add Tagpending hook: $@";
+ if (!$self->config->no_fail) {
+ return 1;
+ }
+ }
+ }
+ }
+ }
+ return 0;
+}
+
+sub enabled_hooks {
+ my ($self, $repo_id) = @_;
+ my $hooks;
+ my $res = {};
+ if ( $self->config->kgb
+ or $self->config->disable_kgb
+ or $self->config->tagpending
+ or $self->config->disable_tagpending) {
+ $hooks = eval { $self->api->project_hooks($repo_id) };
+ if ($@) {
+ ds_warn "Unable to check hooks for project $repo_id";
+ return undef;
+ }
+ foreach my $h (@{$hooks}) {
+ $res->{kgb} = {
+ id => $h->{id},
+ url => $h->{url},
+ options => [grep { $h->{$_} and $h->{$_} eq 1 } keys %$h],
+ }
+ if $h->{url} =~ /\Q$self->{config}->{kgb_server_url}\E/;
+ $res->{tagpending} = {
+ id => $h->{id},
+ url => $h->{url},
+ }
+ if $h->{url} =~ /\Q$self->{config}->{tagpending_server_url}\E/;
+ }
+ }
+ if ( ($self->config->email or $self->config->disable_email)
+ and $_ = $self->api->project_service($repo_id, 'emails-on-push')
+ and $_->{active}) {
+ $res->{email} = $_->{properties};
+ }
+ if ( ($self->config->irker or $self->config->disable_irker)
+ and $_ = $self->api->project_service($repo_id, 'irker')
+ and $_->{active}) {
+ $res->{irker} = $_->{properties};
+ }
+ return $res;
+}
+
+sub _check_config {
+ my ($config, $key_name, $config_name, $can_be_private, $res_ref) = @_;
+ if (!$config) { return undef; }
+ for ($config) {
+ if ($can_be_private) {
+ if ($_ eq "private") {
+ push @$res_ref, $key_name => "private";
+ } elsif (qr/y(es)?|true|enabled?/) {
+ push @$res_ref, $key_name => "enabled";
+ } elsif (qr/no?|false|disabled?/) {
+ push @$res_ref, $key_name => "disabled";
+ } else {
+ print "error with SALSA_$config_name";
+ }
+ } else {
+ if (qr/y(es)?|true|enabled?/) {
+ push @$res_ref, $key_name => 1;
+ } elsif (qr/no?|false|disabled?/) {
+ push @$res_ref, $key_name => 0;
+ } else {
+ print "error with SALSA_$config_name";
+ }
+ }
+ }
+}
+
+sub desc {
+ my ($self, $repo) = @_;
+ my @res = ();
+ if ($self->config->desc) {
+ my $str = $self->config->desc_pattern;
+ $str =~ s/%P/$repo/g;
+ $repo =~ s#.*/##;
+ $str =~ s/%p/$repo/g;
+ push @res, description => $str;
+ }
+ if ($self->config->build_timeout) {
+ push @res, build_timeout => $self->config->build_timeout;
+ }
+
+ # Parameter: config value, key name, config name, has private
+ _check_config($self->config->issues, "issues_access_level",
+ "ENABLE_ISSUES", 1, \@res);
+ _check_config($self->config->repo, "repository_access_level",
+ "ENABLE_REPO", 1, \@res);
+ _check_config($self->config->mr, "merge_requests_access_level",
+ "ENABLE_MR", 1, \@res);
+ _check_config($self->config->forks, "forking_access_level",
+ "ENABLE_FORKS", 1, \@res);
+ _check_config($self->config->lfs, "lfs_enabled", "ENABLE_LFS", 0, \@res);
+ _check_config($self->config->packages,
+ "packages_enabled", "ENABLE_PACKAGES", 0, \@res);
+ _check_config($self->config->jobs, "builds_access_level", "ENABLE_JOBS",
+ 1, \@res);
+ _check_config(
+ $self->config->container,
+ "container_registry_access_level",
+ "ENABLE_CONTAINER", 1, \@res
+ );
+ _check_config($self->config->analytics,
+ "analytics_access_level", "ENABLE_ANALYTICS", 1, \@res);
+ _check_config($self->config->requirements,
+ "requirements_access_level", "ENABLE_REQUIREMENTS", 1, \@res);
+ _check_config($self->config->wiki, "wiki_access_level", "ENABLE_WIKI", 1,
+ \@res);
+ _check_config($self->config->snippets,
+ "snippets_access_level", "ENABLE_SNIPPETS", 1, \@res);
+ _check_config($self->config->pages, "pages_access_level", "ENABLE_PAGES",
+ 1, \@res);
+ _check_config($self->config->releases,
+ "releases_access_level", "ENABLE_RELEASES", 1, \@res);
+ _check_config($self->config->auto_devops,
+ "auto_devops_enabled", "ENABLE_AUTO_DEVOPS", 0, \@res);
+ _check_config($self->config->request_acc,
+ "request_access_enabled", "ENABLE_REQUEST_ACC", 0, \@res);
+
+ if ($self->config->disable_remove_branch) {
+ push @res, remove_source_branch_after_merge => 0;
+ } elsif ($self->config->enable_remove_branch) {
+ push @res, remove_source_branch_after_merge => 1;
+ }
+ if ($self->config->ci_config_path) {
+ push @res, ci_config_path => $self->config->ci_config_path;
+ }
+ if ($self->config->request_access) {
+ if ($self->config->request_access =~ qr/y(es)?|true|enabled?|1/) {
+ push @res, request_access_enabled => 1;
+ } else {
+ push @res, request_access_enabled => 0;
+ }
+ }
+ return @res;
+}
+
+sub desc_multipart {
+ my ($self, $repo) = @_;
+ my @res = ();
+ if ($self->config->avatar_path) {
+ my $str = $self->config->avatar_path;
+ $str =~ s/%p/$repo/g;
+ unless (-r $str) {
+ if (!$self->config->no_fail) {
+ ds_error("Unable to find: $str");
+ exit 1;
+ }
+ ds_warn "Unable to find: $str";
+ } else {
+ # avatar_path (salsa) -> avatar (GitLab API)
+ push @res, avatar => $str;
+ }
+ }
+ return @res;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/Repo.pm b/lib/Devscripts/Salsa/Repo.pm
new file mode 100755
index 0000000..0370810
--- /dev/null
+++ b/lib/Devscripts/Salsa/Repo.pm
@@ -0,0 +1,73 @@
+# Common method to get projects
+package Devscripts::Salsa::Repo;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+with "Devscripts::Salsa::Hooks";
+
+sub get_repo {
+ my ($self, $prompt, @reponames) = @_;
+ my @repos;
+ if (($self->config->all or $self->config->all_archived)
+ and @reponames == 0) {
+ ds_debug "--all is set";
+ my $options = {};
+ $options->{order_by} = 'name';
+ $options->{sort} = 'asc';
+ $options->{archived} = 'false' if not $self->config->all_archived;
+ my $projects;
+ # This rule disallow trying to configure all "Debian" projects:
+ # - Debian id is 2
+ # - next is 1987
+ if ($self->group_id) {
+ $projects
+ = $self->api->paginator('group_projects', $self->group_id,
+ $options)->all;
+ } elsif ($self->user_id) {
+ $projects
+ = $self->api->paginator('user_projects', $self->user_id,
+ $options)->all;
+ } else {
+ ds_warn "Missing or invalid token";
+ return 1;
+ }
+ unless ($projects) {
+ ds_warn "No projects found";
+ return 1;
+ }
+ @repos = map {
+ $self->projectCache->{ $_->{path_with_namespace} } = $_->{id};
+ [$_->{id}, $_->{path}]
+ } @$projects;
+ if (@{ $self->config->skip }) {
+ @repos = map {
+ my $res = 1;
+ foreach my $k (@{ $self->config->skip }) {
+ $res = 0 if ($_->[1] =~ m#(?:.*/)?\Q$k\E#);
+ }
+ $res ? $_ : ();
+ } @repos;
+ }
+ if ($ds_yes > 0 or !$prompt) {
+ ds_verbose "Found " . @repos . " projects";
+ } else {
+ unless (
+ ds_prompt(
+ "You're going to configure "
+ . @repos
+ . " projects. Continue (N/y) "
+ ) =~ accept
+ ) {
+ ds_warn "Aborting";
+ return 1;
+ }
+ }
+ } else {
+ @repos = map { [$self->project2id($_), $_] } @reponames;
+ }
+ return @repos;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/add_user.pm b/lib/Devscripts/Salsa/add_user.pm
new file mode 100644
index 0000000..3968fb3
--- /dev/null
+++ b/lib/Devscripts/Salsa/add_user.pm
@@ -0,0 +1,40 @@
+# Adds a user in a group with a role
+package Devscripts::Salsa::add_user;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub add_user {
+ my ($self, $level, $user) = @_;
+ unless ($level and $user) {
+ ds_warn "Usage $0 --group-id 1234 add_user <level> <userid>";
+ return 1;
+ }
+ unless ($self->group_id) {
+ ds_warn "Unable to add user without --group or --group-id";
+ return 1;
+ }
+
+ my $id = $self->username2id($user) or return 1;
+ my $al = $self->levels_name($level) or return 1;
+ return 1
+ if (
+ $ds_yes < 0
+ and ds_prompt(
+"You're going to accept $user as $level in group $self->{group_id}. Continue (Y/n) "
+ ) =~ refuse
+ );
+ $self->api->add_group_member(
+ $self->group_id,
+ {
+ user_id => $id,
+ access_level => $al,
+ });
+ ds_warn "User $user added to group "
+ . $self->group_id
+ . " with role $level";
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/check_repo.pm b/lib/Devscripts/Salsa/check_repo.pm
new file mode 100755
index 0000000..d22095b
--- /dev/null
+++ b/lib/Devscripts/Salsa/check_repo.pm
@@ -0,0 +1,205 @@
+# Parses repo to check if parameters are well set
+package Devscripts::Salsa::check_repo;
+
+use strict;
+use Devscripts::Output;
+use Digest::MD5 qw(md5_hex);
+use Digest::file qw(digest_file_hex);
+use LWP::UserAgent;
+use Moo::Role;
+
+with "Devscripts::Salsa::Repo";
+
+sub check_repo {
+ my $self = shift;
+ my ($res) = $self->_check_repo(@_);
+ return $res;
+}
+
+sub _url_md5_hex {
+ my $res = LWP::UserAgent->new->get(shift());
+ if (!$res->is_success) {
+ return undef;
+ }
+ return Digest::MD5::md5_hex($res->content);
+}
+
+sub _check_repo {
+ my ($self, @reponames) = @_;
+ my $res = 0;
+ my @fail;
+ unless (@reponames or $self->config->all or $self->config->all_archived) {
+ ds_warn "Usage $0 check_repo <--all|--all-archived|names>";
+ return 1;
+ }
+ if (@reponames and $self->config->all) {
+ ds_warn "--all with a reponame makes no sense";
+ return 1;
+ }
+ if (@reponames and $self->config->all_archived) {
+ ds_warn "--all-archived with a reponame makes no sense";
+ return 1;
+ }
+ # Get repo list from Devscripts::Salsa::Repo
+ my @repos = $self->get_repo(0, @reponames);
+ return @repos unless (ref $repos[0]);
+ foreach my $repo (@repos) {
+ my ($id, $name) = @$repo;
+ ds_debug "Checking $name ($id)";
+ my @err;
+ my $project = eval { $self->api->project($id) };
+ unless ($project) {
+ ds_debug $@;
+ ds_warn "Project $name not found";
+ next;
+ }
+ # check description
+ my %prms = $self->desc($name);
+ my %prms_multipart = $self->desc_multipart($name);
+ if ($self->config->desc) {
+ $project->{description} //= '';
+ push @err, "bad description: $project->{description}"
+ if ($prms{description} ne $project->{description});
+ }
+ # check build timeout
+ if ($self->config->desc) {
+ $project->{build_timeout} //= '';
+ push @err, "bad build_timeout: $project->{build_timeout}"
+ if ($prms{build_timeout} ne $project->{build_timeout});
+ }
+ # check features (w/permission) & ci config
+ foreach (
+ qw(analytics_access_level
+ auto_devops_enabled
+ builds_access_level
+ container_registry_access_level
+ forking_access_level
+ issues_access_level
+ lfs_enabled
+ merge_requests_access_level
+ packages_enabled
+ pages_access_level
+ releases_access_level
+ repository_access_level
+ request_access_enabled
+ requirements_access_level
+ snippets_access_level
+ wiki_access_level
+ remove_source_branch_after_merge
+ ci_config_path
+ request_access_enabled)
+ ) {
+ push @err, "$_ should be $prms{$_}"
+ if (defined $prms{$_}
+ and (!defined($project->{$_}) or $project->{$_} ne $prms{$_}));
+ }
+ # only public projects are accepted
+ push @err, "private" unless ($project->{visibility} eq "public");
+ # Default branch
+ if ($self->config->rename_head) {
+ push @err, "Default branch is $project->{default_branch}"
+ if ($project->{default_branch} ne $self->config->dest_branch);
+ }
+ # Webhooks (from Devscripts::Salsa::Hooks)
+ my $hooks = $self->enabled_hooks($id);
+ unless (defined $hooks) {
+ ds_warn "Unable to get $name hooks";
+ next;
+ }
+ # check avatar's path
+ if ($self->config->avatar_path) {
+ my ($md5_file, $md5_url) = "";
+ if ($prms_multipart{avatar}) {
+ ds_verbose "Calculating local checksum";
+ $md5_file = digest_file_hex($prms_multipart{avatar}, "MD5")
+ or die "$prms_multipart{avatar} failed md5: $!";
+ if ($project->{avatar_url}) {
+ ds_verbose "Calculating remote checksum";
+ $md5_url = _url_md5_hex($project->{avatar_url})
+ or die "$project->{avatar_url} failed md5: $!";
+ }
+ push @err, "Will set the avatar to be: $prms_multipart{avatar}"
+ if ($md5_file ne $md5_url);
+ }
+ }
+ # KGB
+ if ($self->config->kgb and not $hooks->{kgb}) {
+ push @err, "kgb missing";
+ } elsif ($self->config->disable_kgb and $hooks->{kgb}) {
+ push @err, "kgb enabled";
+ } elsif ($self->config->kgb) {
+ push @err,
+ "bad irc channel: "
+ . substr($hooks->{kgb}->{url},
+ length($self->config->kgb_server_url))
+ if $hooks->{kgb}->{url} ne $self->config->kgb_server_url
+ . $self->config->irc_channel->[0];
+ my @wopts = @{ $self->config->kgb_options };
+ my @gopts = sort @{ $hooks->{kgb}->{options} };
+ my $i = 0;
+ while (@gopts and @wopts) {
+ my $a;
+ $a = ($wopts[0] cmp $gopts[0]);
+ if ($a == -1) {
+ push @err, "Missing KGB option " . shift(@wopts);
+ } elsif ($a == 1) {
+ push @err, 'Unwanted KGB option ' . shift(@gopts);
+ } else {
+ shift @wopts;
+ shift @gopts;
+ }
+ }
+ push @err, map { "Missing KGB option $_" } @wopts;
+ push @err, map { "Unwanted KGB option $_" } @gopts;
+ }
+ # Email-on-push
+ if ($self->config->email
+ and not($hooks->{email} and %{ $hooks->{email} })) {
+ push @err, "email-on-push missing";
+ } elsif (
+ $self->config->email
+ and $hooks->{email}->{recipients} ne join(
+ ' ',
+ map {
+ my $a = $_;
+ my $b = $name;
+ $b =~ s#.*/##;
+ $a =~ s/%p/$b/;
+ $a
+ } @{ $self->config->email_recipient })
+ ) {
+ push @err, "bad email recipients " . $hooks->{email}->{recipients};
+ } elsif ($self->config->disable_email and $hooks->{kgb}) {
+ push @err, "email-on-push enabled";
+ }
+ # Irker
+ if ($self->config->irker and not $hooks->{irker}) {
+ push @err, "irker missing";
+ } elsif ($self->config->irker
+ and $hooks->{irker}->{recipients} ne
+ join(' ', map { "#$_" } @{ $self->config->irc_channel })) {
+ push @err, "bad irc channel: " . $hooks->{irker}->{recipients};
+ } elsif ($self->config->disable_irker and $hooks->{irker}) {
+ push @err, "irker enabled";
+ }
+ # Tagpending
+ if ($self->config->tagpending and not $hooks->{tagpending}) {
+ push @err, "tagpending missing";
+ } elsif ($self->config->disable_tagpending
+ and $hooks->{tagpending}) {
+ push @err, "tagpending enabled";
+ }
+ # report errors
+ if (@err) {
+ $res++;
+ push @fail, $name;
+ print "$name:\n";
+ print "\t$_\n" foreach (@err);
+ } else {
+ ds_verbose "$name: OK";
+ }
+ }
+ return ($res, \@fail);
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/checkout.pm b/lib/Devscripts/Salsa/checkout.pm
new file mode 100644
index 0000000..c3b5fe5
--- /dev/null
+++ b/lib/Devscripts/Salsa/checkout.pm
@@ -0,0 +1,79 @@
+# Clones or updates a repository using gbp
+# TODO: git-dpm ?
+package Devscripts::Salsa::checkout;
+
+use strict;
+use Devscripts::Output;
+use Devscripts::Utils;
+use Dpkg::IPC;
+use Moo::Role;
+
+with "Devscripts::Salsa::Repo";
+
+sub checkout {
+ my ($self, @repos) = @_;
+ unless (@repos or $self->config->all or $self->config->all_archived) {
+ ds_warn "Usage $0 checkout <--all|--all-archived|names>";
+ return 1;
+ }
+ if (@repos and $self->config->all) {
+ ds_warn "--all with a reponame makes no sense";
+ return 1;
+ }
+ if (@repos and $self->config->all_archived) {
+ ds_warn "--all-archived with a reponame makes no sense";
+ return 1;
+ }
+ # If --all is asked, launch all projects
+ @repos = map { $_->[1] } $self->get_repo(0, @repos) unless (@repos);
+ my $cdir = `pwd`;
+ chomp $cdir;
+ my $res = 0;
+ foreach (@repos) {
+ my $path = $self->project2path($_);
+ s#.*/##;
+ if (-d $_) {
+ chdir $_;
+ ds_verbose "Updating existing checkout in $_";
+ spawn(
+ exec => ['gbp', 'pull', '--pristine-tar'],
+ wait_child => 1,
+ nocheck => 1,
+ );
+ if ($?) {
+ if ($self->config->no_fail) {
+ print STDERR "gbp pull fails in $_, "
+ . "continuing since --no-fail is set\n";
+ $res++;
+ } else {
+ ds_warn "gbp pull failed in $_\n";
+ return 1;
+ }
+ }
+ chdir $cdir;
+ } else {
+ spawn(
+ exec => [
+ 'gbp', 'clone',
+ '--all', $self->config->git_server_url . $path . ".git"
+ ],
+ wait_child => 1,
+ nocheck => 1,
+ );
+ if ($?) {
+ if ($self->config->no_fail) {
+ print STDERR "gbp clone fails in $_, "
+ . "continuing since --no-fail is set\n";
+ $res++;
+ } else {
+ ds_warn "gbp clone failed for $_\n";
+ return 1;
+ }
+ }
+ ds_warn "$_ ready in $_/";
+ }
+ }
+ return $res;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/create_repo.pm b/lib/Devscripts/Salsa/create_repo.pm
new file mode 100644
index 0000000..4640ae2
--- /dev/null
+++ b/lib/Devscripts/Salsa/create_repo.pm
@@ -0,0 +1,47 @@
+# Creates repo using name or path
+package Devscripts::Salsa::create_repo;
+
+use strict;
+use Devscripts::Output;
+use Dpkg::IPC;
+use Moo::Role;
+
+with "Devscripts::Salsa::Hooks";
+
+sub create_repo {
+ my ($self, $reponame) = @_;
+ unless ($reponame) {
+ ds_warn "Repository name is missing";
+ return 1;
+ }
+ # Get parameters from Devscripts::Salsa::Repo
+ my $opts = {
+ name => $reponame,
+ path => $reponame,
+ visibility => 'public',
+ $self->desc($reponame),
+ };
+ if ($self->group_id) {
+ $opts->{namespace_id} = $self->group_id;
+ }
+ return 1
+ if (
+ $ds_yes < 0
+ and ds_prompt(
+ "You're going to create $reponame in "
+ . ($self->group_id ? $self->group_path : 'your namespace')
+ . ". Continue (Y/n) "
+ ) =~ refuse
+ );
+ my $repo = eval { $self->api->create_project($opts) };
+ if ($@ or !$repo) {
+ ds_warn "Project not created: $@";
+ return 1;
+ }
+ ds_warn "Project $repo->{web_url} created";
+ $reponame =~ s#^.*/##;
+ $self->add_hooks($repo->{id}, $reponame);
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/del_repo.pm b/lib/Devscripts/Salsa/del_repo.pm
new file mode 100644
index 0000000..74700e0
--- /dev/null
+++ b/lib/Devscripts/Salsa/del_repo.pm
@@ -0,0 +1,26 @@
+# Deletes a repository
+package Devscripts::Salsa::del_repo;
+
+use strict;
+use Devscripts::Output;
+use Dpkg::IPC;
+use Moo::Role;
+
+sub del_repo {
+ my ($self, $reponame) = @_;
+ unless ($reponame) {
+ ds_warn "Repository name or path is missing";
+ return 1;
+ }
+ my $id = $self->project2id($reponame) or return 1;
+ my $path = $self->project2path($reponame);
+ return 1
+ if ($ds_yes < 0
+ and ds_prompt("You're going to delete $path. Continue (Y/n) ")
+ =~ refuse);
+ $self->api->delete_project($id);
+ ds_warn "Project $path deleted";
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/del_user.pm b/lib/Devscripts/Salsa/del_user.pm
new file mode 100644
index 0000000..a29dbbe
--- /dev/null
+++ b/lib/Devscripts/Salsa/del_user.pm
@@ -0,0 +1,32 @@
+# Removes a user from a group
+package Devscripts::Salsa::del_user;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub del_user {
+ my ($self, $user) = @_;
+ unless ($user) {
+ ds_warn "Usage $0 del_user <user>";
+ return 1;
+ }
+ unless ($self->group_id) {
+ ds_warn "Unable to del user without --group-id";
+ return 1;
+ }
+
+ my $id = $self->username2id($user) or return 1;
+ return 1
+ if (
+ $ds_yes < 0
+ and ds_prompt(
+"You're going to remove $user from group $self->{group_id}. Continue (Y/n) "
+ ) =~ refuse
+ );
+ $self->api->remove_group_member($self->group_id, $id);
+ ds_warn "User $user removed from group " . $self->group_id;
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/fork.pm b/lib/Devscripts/Salsa/fork.pm
new file mode 100644
index 0000000..ca5559e
--- /dev/null
+++ b/lib/Devscripts/Salsa/fork.pm
@@ -0,0 +1,36 @@
+# Forks a project given by full path into group/user namespace
+package Devscripts::Salsa::fork;
+
+use strict;
+use Devscripts::Output;
+use Dpkg::IPC;
+use Moo::Role;
+
+with 'Devscripts::Salsa::checkout';
+
+sub fork {
+ my ($self, $project) = @_;
+ unless ($project) {
+ ds_warn "Project to fork is missing";
+ return 1;
+ }
+ my $path = $self->main_path or return 1;
+ $self->api->fork_project($project, { namespace => $path });
+ my $p = $project;
+ $p =~ s#.*/##;
+ if ($self->checkout($p)) {
+ ds_warn "Failed to checkout $project";
+ return 1;
+ }
+ chdir $p;
+ spawn(
+ exec => [
+ qw(git remote add upstream),
+ $self->config->git_server_url . $project
+ ],
+ wait_child => 1
+ );
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/forks.pm b/lib/Devscripts/Salsa/forks.pm
new file mode 100644
index 0000000..18b1d0c
--- /dev/null
+++ b/lib/Devscripts/Salsa/forks.pm
@@ -0,0 +1,45 @@
+# Lists forks of a project
+package Devscripts::Salsa::forks;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub forks {
+ my ($self, @reponames) = @_;
+ my $res = 0;
+ unless (@reponames) {
+ ds_warn "Repository name is missing";
+ return 1;
+ }
+ foreach my $p (@reponames) {
+ my $id = $self->project2id($p);
+ unless ($id) {
+ ds_warn "Project $_ not found";
+ $res++;
+ next;
+ }
+ print "$p\n";
+ my $forks = $self->api->paginator(
+ 'project_forks',
+ $id,
+ {
+ state => 'opened',
+ });
+ unless ($forks) {
+ print "\n";
+ next;
+ }
+ while ($_ = $forks->next) {
+ print <<END;
+\tId : $_->{id}
+\tName: $_->{path_with_namespace}
+\tURL : $_->{web_url}
+
+END
+ }
+ }
+ return $res;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/group.pm b/lib/Devscripts/Salsa/group.pm
new file mode 100644
index 0000000..cb14741
--- /dev/null
+++ b/lib/Devscripts/Salsa/group.pm
@@ -0,0 +1,35 @@
+# Lists members of a group
+package Devscripts::Salsa::group;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub group {
+ my ($self) = @_;
+ my $count = 0;
+ unless ($self->group_id) {
+ ds_warn "Usage $0 --group-id 1234 group";
+ return 1;
+ }
+ my $users = $self->api->paginator('group_members', $self->group_id);
+ while ($_ = $users->next) {
+ $count++;
+ my $access_level = $self->levels_code($_->{access_level});
+ print <<END;
+Id : $_->{id}
+Username : $_->{username}
+Name : $_->{name}
+Access level: $access_level
+State : $_->{state}
+
+END
+ }
+ unless ($count) {
+ ds_warn "No users found";
+ return 1;
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/join.pm b/lib/Devscripts/Salsa/join.pm
new file mode 100644
index 0000000..319e107
--- /dev/null
+++ b/lib/Devscripts/Salsa/join.pm
@@ -0,0 +1,20 @@
+# Launch request to join a group
+package Devscripts::Salsa::join;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub join {
+ my ($self, $group) = @_;
+ unless ($group ||= $self->config->group || $self->config->group_id) {
+ ds_warn "Group is missing";
+ return 1;
+ }
+ my $gid = $self->group2id($group);
+ $self->api->group_access_requests($gid);
+ ds_warn "Request launched to group $group ($gid)";
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/last_ci_status.pm b/lib/Devscripts/Salsa/last_ci_status.pm
new file mode 100644
index 0000000..c275efb
--- /dev/null
+++ b/lib/Devscripts/Salsa/last_ci_status.pm
@@ -0,0 +1,74 @@
+package Devscripts::Salsa::last_ci_status;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+with "Devscripts::Salsa::Repo";
+
+use constant OK => 'success';
+use constant SKIPPED => 'skipped';
+use constant FAILED => 'failed';
+
+sub last_ci_status {
+ my ($self, @repos) = @_;
+ unless (@repos or $self->config->all or $self->config->all_archived) {
+ ds_warn "Usage $0 ci_status <--all|--all-archived|names>";
+ return 1;
+ }
+ if (@repos and $self->config->all) {
+ ds_warn "--all with a reponame makes no sense";
+ return 1;
+ }
+ if (@repos and $self->config->all_archived) {
+ ds_warn "--all-archived with a reponame makes no sense";
+ return 1;
+ }
+ # If --all is asked, launch all projects
+ @repos = map { $_->[1] } $self->get_repo(0, @repos) unless (@repos);
+ my $ret = 0;
+ foreach my $repo (@repos) {
+ my $id = $self->project2id($repo) or return 1;
+ my $pipelines = $self->api->pipelines($id);
+ unless ($pipelines and @$pipelines) {
+ ds_warn "No pipelines for $repo";
+ $ret++;
+ return 1 unless $self->config->no_fail;
+ } else {
+ my $status = $pipelines->[0]->{status};
+ if ($status eq OK) {
+ print "Last result for $repo: $status\n";
+ } else {
+ print STDERR "Last result for $repo: $status\n";
+ my $jobs
+ = $self->api->pipeline_jobs($id, $pipelines->[0]->{id});
+ my %jres;
+ foreach my $job (sort { $a->{id} <=> $b->{id} } @$jobs) {
+ next if $job->{status} eq SKIPPED;
+ push @{ $jres{ $job->{status} } }, $job->{name};
+ }
+ if ($jres{ OK() }) {
+ print STDERR ' success: '
+ . join(', ', @{ $jres{ OK() } }) . "\n";
+ delete $jres{ OK() };
+ }
+ foreach my $k (sort keys %jres) {
+ print STDERR ' '
+ . uc($k) . ': '
+ . join(', ', @{ $jres{$k} }) . "\n";
+ }
+ print STDERR "\n See: " . $pipelines->[0]->{web_url} . "\n\n";
+ if ($status eq FAILED) {
+ $ret++;
+ unless ($self->config->no_fail) {
+ ds_verbose "Use --no-fail to continue";
+ return 1;
+ }
+ }
+ }
+ }
+ }
+ return $ret;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/list_groups.pm b/lib/Devscripts/Salsa/list_groups.pm
new file mode 100644
index 0000000..903cd1e
--- /dev/null
+++ b/lib/Devscripts/Salsa/list_groups.pm
@@ -0,0 +1,40 @@
+# Lists subgroups of a group or groups of a user
+package Devscripts::Salsa::list_groups;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub list_groups {
+ my ($self, $match) = @_;
+ my $groups;
+ my $count = 0;
+ my $opts = {
+ order_by => 'name',
+ sort => 'asc',
+ ($match ? (search => $match) : ()),
+ };
+ if ($self->group_id) {
+ $groups
+ = $self->api->paginator('group_subgroups', $self->group_id, $opts);
+ } else {
+ $groups = $self->api->paginator('groups', $opts);
+ }
+ while ($_ = $groups->next) {
+ $count++;
+ my $parent = $_->{parent_id} ? "Parent id: $_->{parent_id}\n" : '';
+ print <<END;
+Id : $_->{id}
+Name : $_->{name}
+Full path: $_->{full_path}
+$parent
+END
+ }
+ unless ($count) {
+ ds_warn "No groups found";
+ return 1;
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/list_repos.pm b/lib/Devscripts/Salsa/list_repos.pm
new file mode 100644
index 0000000..34ae05a
--- /dev/null
+++ b/lib/Devscripts/Salsa/list_repos.pm
@@ -0,0 +1,42 @@
+# Lists repositories of group/user
+package Devscripts::Salsa::list_repos;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub list_repos {
+ my ($self, $match) = @_;
+ my $projects;
+ my $count = 0;
+ my $opts = {
+ order_by => 'name',
+ sort => 'asc',
+ simple => 1,
+ archived => $self->config->archived,
+ ($match ? (search => $match) : ()),
+ };
+ if ($self->group_id) {
+ $projects
+ = $self->api->paginator('group_projects', $self->group_id, $opts);
+ } else {
+ $projects
+ = $self->api->paginator('user_projects', $self->user_id, $opts);
+ }
+ while ($_ = $projects->next) {
+ $count++;
+ print <<END;
+Id : $_->{id}
+Name: $_->{name}
+URL : $_->{web_url}
+
+END
+ }
+ unless ($count) {
+ ds_warn "No projects found";
+ return 1;
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/merge_request.pm b/lib/Devscripts/Salsa/merge_request.pm
new file mode 100644
index 0000000..b76c6e8
--- /dev/null
+++ b/lib/Devscripts/Salsa/merge_request.pm
@@ -0,0 +1,174 @@
+# Creates a merge request from current directory (or using parameters)
+package Devscripts::Salsa::merge_request;
+
+use strict;
+use Devscripts::Output;
+use Dpkg::IPC;
+use Moo::Role;
+
+with 'Devscripts::Salsa::search_project';
+
+sub merge_request {
+ my ($self, $dst_project, $dst_branch) = @_;
+ my $src_branch = $self->config->mr_src_branch;
+ my $src_project = $self->config->mr_src_project;
+ $dst_project ||= $self->config->mr_dst_project;
+ $dst_branch ||= $self->config->mr_dst_branch;
+ my $title = $self->config->mr_title;
+ my $desc = $self->config->mr_desc;
+
+ if ($src_branch) {
+ unless ($src_project and $dst_project) {
+ ds_warn "--mr-src-project and --mr-src-project "
+ . "are required when --mr-src-branch is set";
+ return 1;
+ }
+ unless ($src_project =~ m#/#) {
+ $src_project = $self->project2path($src_project);
+ }
+ } else { # Use current repository to find elements
+ ds_verbose "using current branch as source";
+ my $out;
+ unless ($src_project) {
+ # 1. Verify that repo is ready
+ spawn(
+ exec => [qw(git status -s -b -uno)],
+ wait_child => 1,
+ to_string => \$out
+ );
+ chomp $out;
+ # Case "rebased"
+ if ($out =~ /\[/) {
+ ds_warn "Current branch isn't pushed, aborting:\n";
+ return 1;
+ }
+ # Case else: nothing after src...dst
+ unless ($out =~ /\s(\S+)\.\.\.(\S+)/s) {
+ ds_warn
+ "Current branch as no origin or isn't pushed, aborting";
+ return 1;
+ }
+ # 2. Set source branch to current branch
+ $src_branch ||= $1;
+ ds_verbose "Found current branch: $src_branch";
+ }
+ unless ($src_project and $dst_project) {
+ # Check remote links
+ spawn(
+ exec => [qw(git remote --verbose show)],
+ wait_child => 1,
+ to_string => \$out,
+ );
+ my $origin = $self->config->api_url;
+ $origin =~ s#api/v4$##;
+ # 3. Set source project using "origin" target
+ unless ($src_project) {
+ if ($out
+ =~ /origin\s+(?:\Q$self->{config}->{git_server_url}\E|\Q$origin\E)(\S*)/m
+ ) {
+ $src_project = $1;
+ $src_project =~ s/\.git$//;
+ } else {
+ ds_warn
+"Unable to find project origin, set it using --mr-src-project";
+ return 1;
+ }
+ }
+ # 4. Steps to find destination project:
+ # - command-line
+ # - GitLab API (search for "forked_from_project"
+ # - "upstream" in git remote
+ # - use source project as destination project
+
+ # 4.1. Stop if dest project has been given in command line
+ unless ($dst_project) {
+ my $project = $self->api->project($src_project);
+
+ # 4.2. Search original project from GitLab API
+ if ($project->{forked_from_project}) {
+ $dst_project
+ = $project->{forked_from_project}->{path_with_namespace};
+ }
+ if ($dst_project) {
+ ds_verbose "Project was forked from $dst_project";
+
+ # 4.3. Search for an "upstream" target in `git remote`
+ } elsif ($out
+ =~ /upstream\s+(?:\Q$self->{config}->{git_server_url}\E|\Q$origin\E)(\S*)/m
+ ) {
+ $dst_project = $1;
+ $dst_project =~ s/\.git$//;
+ ds_verbose 'Use "upstream" target as dst project';
+ # 4.4. Use source project as destination
+ } else {
+ ds_warn
+"No upstream target found, using current project as target";
+ $dst_project = $src_project;
+ }
+ ds_verbose "Use $dst_project as dest project";
+ }
+ }
+ # 5. Search for MR title and desc
+ unless ($title) {
+ ds_warn "Title not set, using last commit";
+ spawn(
+ exec => ['git', 'show', '--format=format:%s###%b'],
+ wait_child => 1,
+ to_string => \$out,
+ );
+ $out =~ s/\ndiff.*$//s;
+ my ($t, $d) = split /###/, $out;
+ chomp $d;
+ $title = $t;
+ ds_verbose "Title set to $title";
+ $desc ||= $d;
+ # Replace all bug links by markdown links
+ if ($desc) {
+ $desc =~ s@#(\d{6,})\b@[#$1](https://bugs.debian.org/$1)@mg;
+ ds_verbose "Desc set to $desc";
+ }
+ }
+ }
+ if ($dst_project eq 'same') {
+ $dst_project = $src_project;
+ }
+ my $src = $self->api->project($src_project);
+ unless ($title) {
+ ds_warn "Title is required";
+ return 1;
+ }
+ unless ($src and $src->{id}) {
+ ds_warn "Target project not found $src_project";
+ return 1;
+ }
+ my $dst;
+ if ($dst_project) {
+ $dst = $self->api->project($dst_project);
+ unless ($dst and $dst->{id}) {
+ ds_warn "Target project not found";
+ return 1;
+ }
+ }
+ return 1
+ if (
+ ds_prompt(
+"You're going to push an MR to $dst_project:$dst_branch. Continue (Y/n)"
+ ) =~ refuse
+ );
+ my $res = $self->api->create_merge_request(
+ $src->{id},
+ {
+ source_branch => $src_branch,
+ target_branch => $dst_branch,
+ title => $title,
+ remove_source_branch => $self->config->mr_remove_source_branch,
+ squash => $self->config->mr_allow_squash,
+ ($dst ? (target_project_id => $dst->{id}) : ()),
+ ($desc ? (description => $desc) : ()),
+ });
+ ds_warn "MR '$title' posted:";
+ ds_warn $res->{web_url};
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/merge_requests.pm b/lib/Devscripts/Salsa/merge_requests.pm
new file mode 100644
index 0000000..e722390
--- /dev/null
+++ b/lib/Devscripts/Salsa/merge_requests.pm
@@ -0,0 +1,49 @@
+# Lists merge requests proposed to a project
+package Devscripts::Salsa::merge_requests;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub merge_requests {
+ my ($self, @reponames) = @_;
+ my $res = 1;
+ unless (@reponames) {
+ ds_warn "Repository name is missing";
+ return 1;
+ }
+ foreach my $p (@reponames) {
+ my $id = $self->project2id($p);
+ my $count = 0;
+ unless ($id) {
+ ds_warn "Project $_ not found";
+ return 1;
+ }
+ print "$p\n";
+ my $mrs = $self->api->paginator(
+ 'merge_requests',
+ $id,
+ {
+ state => 'opened',
+ });
+ while ($_ = $mrs->next) {
+ $res = 0;
+ my $status = $_->{work_in_progress} ? 'WIP' : $_->{merge_status};
+ print <<END;
+\tId : $_->{id}
+\tTitle : $_->{title}
+\tAuthor: $_->{author}->{username}
+\tStatus: $status
+\tUrl : $_->{web_url}
+
+END
+ }
+ unless ($count) {
+ print "\n";
+ next;
+ }
+ }
+ return $res;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/pipeline_schedule.pm b/lib/Devscripts/Salsa/pipeline_schedule.pm
new file mode 100755
index 0000000..51c1e09
--- /dev/null
+++ b/lib/Devscripts/Salsa/pipeline_schedule.pm
@@ -0,0 +1,124 @@
+# Create a pipeline schedule using parameters
+package Devscripts::Salsa::pipeline_schedule;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+# For --all
+with "Devscripts::Salsa::Repo";
+
+sub pipeline_schedule {
+ my ($self, @repos) = @_;
+ my $ret = 0;
+ my $desc = $self->config->schedule_desc;
+ my $ref = $self->config->schedule_ref;
+ my $cron = $self->config->schedule_cron;
+ my $tz = $self->config->schedule_tz;
+ my $active = $self->config->schedule_enable;
+ $active
+ = ($self->config->schedule_disable)
+ ? "0"
+ : $active;
+ my $run = $self->config->schedule_run;
+ my $delete = $self->config->schedule_delete;
+
+ unless (@repos or $self->config->all) {
+ ds_warn "Usage $0 pipeline <project|--all>";
+ return 1;
+ }
+ if (@repos and $self->config->all) {
+ ds_warn "--all with a project (@repos) makes no sense";
+ return 1;
+ }
+
+ unless ($desc) {
+ ds_warn "--schedule-desc / SALSA_SCHEDULE_DESC is missing";
+ ds_warn "Are you looking for: $0 pipelines <project|--all>";
+ return 1;
+ }
+
+ # If --all is asked, launch all projects
+ @repos = map { $_->[1] } $self->get_repo(0, @repos) unless (@repos);
+
+ foreach my $repo (sort @repos) {
+ my $id = $self->project2id($repo);
+ unless ($id) {
+#ds_warn "Project $repo not found"; # $self->project2id($repo) shows this error
+ $ret++;
+ return 1 unless $self->config->no_fail;
+ } else {
+ my @pipe_id = ();
+ $desc =~ s/%p/$repo/g;
+ my $options = {};
+ $options->{ref} = $ref if defined $ref;
+ $options->{cron} = $cron if defined $cron;
+ $options->{cron_timezone} = $tz if defined $tz;
+ $options->{active} = $active if defined $active;
+
+# REF: https://docs.gitlab.com/ee/api/pipeline_schedules.html#get-all-pipeline-schedules
+# $self->api->pipeline_schedules($id)
+ my $pipelines
+ = $self->api->paginator('pipeline_schedules', $id)->all();
+ ds_verbose "No pipelines scheduled for $repo" unless @$pipelines;
+
+ foreach (@$pipelines) {
+ push @pipe_id, $_->{id}
+ if ($_->{description} eq $desc);
+ }
+
+ ds_warn "More than 1 scheduled pipeline matches: $desc ("
+ . ++$#pipe_id . ")"
+ if ($pipe_id[1]);
+
+ if (!@pipe_id) {
+ ds_warn "--schedule-ref / SALSA_SCHEDULE_REF is required"
+ unless ($ref);
+ ds_warn "--schedule-cron / SALSA_SCHEDULE_CRON is required"
+ unless ($cron);
+ return 1
+ unless ($ref && $cron);
+
+ $options->{description} = $desc if defined $desc;
+
+ ds_verbose "No scheduled pipelines matching: $desc. Creating!";
+ my $schedule
+ = $self->api->create_pipeline_schedule($id, $options);
+
+ @pipe_id = $schedule->{id};
+ } elsif (keys %$options) {
+ ds_verbose "Editing scheduled pipelines matching: $desc";
+ foreach (@pipe_id) {
+ next if !$_;
+
+ my $schedule
+ = $self->api->edit_pipeline_schedule($id, $_, $options);
+ }
+ }
+
+ if ($run) {
+ ds_verbose "Running scheduled pipelines matching: $desc";
+
+ foreach (@pipe_id) {
+ next if !$_;
+
+ my $schedule = $self->api->run_pipeline_schedule($id, $_);
+ }
+ }
+
+ if ($delete) {
+ ds_verbose "Deleting scheduled pipelines matching: $desc";
+
+ foreach (@pipe_id) {
+ next if !$_;
+
+ my $schedule
+ = $self->api->delete_pipeline_schedule($id, $_);
+ }
+ }
+ }
+ }
+ return $ret;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/pipeline_schedules.pm b/lib/Devscripts/Salsa/pipeline_schedules.pm
new file mode 100755
index 0000000..47c7904
--- /dev/null
+++ b/lib/Devscripts/Salsa/pipeline_schedules.pm
@@ -0,0 +1,70 @@
+# Lists pipeline schedules of a project
+package Devscripts::Salsa::pipeline_schedules;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+# For --all
+with "Devscripts::Salsa::Repo";
+
+sub pipeline_schedules {
+ my ($self, @repo) = @_;
+ my $ret = 0;
+
+ unless (@repo or $self->config->all) {
+ ds_warn "Usage $0 pipelines <project|--all>";
+ return 1;
+ }
+ if (@repo and $self->config->all) {
+ ds_warn "--all with a project (@repo) makes no sense";
+ return 1;
+ }
+
+ # If --all is asked, launch all projects
+ @repo = map { $_->[1] } $self->get_repo(0, @repo) unless (@repo);
+
+ foreach my $p (sort @repo) {
+ my $id = $self->project2id($p);
+ my $count = 0;
+ unless ($id) {
+ #ds_warn "Project $p not found"; # $self->project2id($p) shows this error
+ $ret++;
+ return 1 unless $self->config->no_fail;
+ } else {
+ my $projects = $self->api->project($id);
+ if ($projects->{jobs_enabled} == 0) {
+ print "$p has disabled CI/CD\n";
+ next;
+ }
+
+ my $pipelines
+ = $self->api->paginator('pipeline_schedules', $id)->all();
+
+ print "$p\n" if @$pipelines;
+
+ foreach (@$pipelines) {
+ my $status = $_->{active} ? 'Enabled' : 'Disabled';
+ print <<END;
+\tID : $_->{id}
+\tDescription: $_->{description}
+\tStatus : $status
+\tRef : $_->{ref}
+\tCron : $_->{cron}
+\tTimezone : $_->{cron_timezone}
+\tCreated : $_->{created_at}
+\tUpdated : $_->{updated_at}
+\tNext run : $_->{next_run_at}
+\tOwner : $_->{owner}->{username}
+
+END
+ }
+ }
+ unless ($count) {
+ next;
+ }
+ }
+ return $ret;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/protect_branch.pm b/lib/Devscripts/Salsa/protect_branch.pm
new file mode 100644
index 0000000..5451818
--- /dev/null
+++ b/lib/Devscripts/Salsa/protect_branch.pm
@@ -0,0 +1,43 @@
+# Protects a branch
+package Devscripts::Salsa::protect_branch;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+use constant levels => {
+ o => 50,
+ owner => 50,
+ m => 40,
+ maintainer => 40,
+ d => 30,
+ developer => 30,
+ r => 20,
+ reporter => 20,
+ g => 10,
+ guest => 10,
+};
+
+sub protect_branch {
+ my ($self, $reponame, $branch, $merge, $push) = @_;
+ unless ($reponame and $branch) {
+ ds_warn "usage: $0 protect_branch repo branch merge push";
+ return 1;
+ }
+ if (defined $merge and $merge =~ /^(?:no|0)$/i) {
+ $self->api->unprotect_branch($self->project2id($reponame), $branch);
+ return 0;
+ }
+ unless (levels->{$merge} and levels->{$push}) {
+ ds_warn
+ "usage: $0 protect_branch repo branch <merge level> <push level>";
+ return 1;
+ }
+ my $opts = { name => $branch };
+ $opts->{push_access_level} = (levels->{$push});
+ $opts->{merge_access_level} = (levels->{$merge});
+ $self->api->protect_branch($self->project2id($reponame), $opts);
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/protected_branches.pm b/lib/Devscripts/Salsa/protected_branches.pm
new file mode 100644
index 0000000..cd0cd0e
--- /dev/null
+++ b/lib/Devscripts/Salsa/protected_branches.pm
@@ -0,0 +1,27 @@
+# Displays protected branches of a project
+package Devscripts::Salsa::protected_branches;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub protected_branches {
+ my ($self, $reponame) = @_;
+ unless ($reponame) {
+ ds_warn "Repository name is missing";
+ return 1;
+ }
+ my $branches
+ = $self->api->protected_branches($self->project2id($reponame));
+ if ($branches and @$branches) {
+ printf " %-20s | %-25s | %-25s\n", 'Branch', 'Merge', 'Push';
+ foreach (@$branches) {
+ printf " %-20s | %-25s | %-25s\n", $_->{name},
+ $_->{merge_access_levels}->[0]->{access_level_description},
+ $_->{push_access_levels}->[0]->{access_level_description};
+ }
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/purge_cache.pm b/lib/Devscripts/Salsa/purge_cache.pm
new file mode 100644
index 0000000..187f698
--- /dev/null
+++ b/lib/Devscripts/Salsa/purge_cache.pm
@@ -0,0 +1,15 @@
+# Empties the Devscripts::JSONCache
+package Devscripts::Salsa::purge_cache;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub purge_cache {
+ my @keys = keys %{ $_[0]->_cache };
+ delete $_[0]->_cache->{$_} foreach (@keys);
+ ds_verbose "Cache empty";
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/push.pm b/lib/Devscripts/Salsa/push.pm
new file mode 100644
index 0000000..fa3b6cc
--- /dev/null
+++ b/lib/Devscripts/Salsa/push.pm
@@ -0,0 +1,106 @@
+# Push local work. Like gbp push but able to push incomplete work
+package Devscripts::Salsa::push;
+use strict;
+use Devscripts::Output;
+use Devscripts::Utils;
+use Dpkg::Source::Format;
+use Moo::Role;
+use Dpkg::IPC;
+
+sub readGbpConf {
+ my ($self) = @_;
+ my $res = '';
+ foreach my $gbpconf (qw(.gbp.conf debian/gbp.conf .git/gbp.conf)) {
+ if (-e $gbpconf) {
+ open(my $f, $gbpconf);
+ while (<$f>) {
+ $res .= $_;
+ if (/^\s*(debian|upstream)\-(branch|tag)\s*=\s*(.*\S)/) {
+ $self->{"$1_$2"} = $3;
+ }
+ }
+ close $f;
+ last;
+ }
+ }
+ if ($self->{debian_tag}) {
+ $self->{debian_tag} =~ s/%\(version\)s/.*/g;
+ $self->{debian_tag} =~ s/^/^/;
+ $self->{debian_tag} =~ s/$/\$/;
+ } else {
+ my @tmp
+ = Dpkg::Source::Format->new(filename => 'debian/source/format')->get;
+ $self->{debian_tag} = $tmp[2] eq 'native' ? '.*' : '^debian/.*$';
+ }
+ if ($self->{upstream_tag}) {
+ $self->{upstream_tag} =~ s/%\(version\)s/.*/g;
+ $self->{upstream_tag} =~ s/^/^/;
+ $self->{upstream_tag} =~ s/$/\$/;
+ } else {
+ $self->{upstream_tag} = '^upstream/.*$';
+ }
+ $self->{debian_branch} ||= 'master';
+ $self->{upstream_branch} ||= 'upstream';
+ return $res;
+}
+
+sub push {
+ my ($self) = @_;
+ $self->readGbpConf;
+ my @refs;
+ foreach (
+ $self->{debian_branch}, $self->{upstream_branch},
+ 'pristine-tar', 'refs/notes/commits'
+ ) {
+ if (ds_exec_no_fail(qw(git rev-parse --verify --quiet), $_) == 0) {
+ push @refs, $_;
+ }
+ }
+ my $out;
+ spawn(exec => ['git', 'tag'], wait_child => 1, to_string => \$out);
+ my @tags = grep /(?:$self->{debian_tag}|$self->{upstream_tag})/,
+ split(/\r?\n/, $out);
+ unless (
+ $ds_yes < 0
+ and ds_prompt(
+ "You're going to push :\n - "
+ . join(', ', @refs)
+ . "\nand check tags that match:\n - "
+ . join(', ', $self->{debian_tag}, $self->{upstream_tag})
+ . "\nContinue (Y/n) "
+ ) =~ refuse
+ ) {
+ my $origin;
+ eval {
+ spawn(
+ exec => ['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
+ wait_child => 1,
+ to_string => \$out,
+ );
+ chomp $out;
+ spawn(
+ exec =>
+ ['git', 'config', '--local', '--get', "branch.$out.remote"],
+ wait_child => 1,
+ to_string => \$origin,
+ );
+ chomp $origin;
+ };
+ if ($origin) {
+ ds_verbose 'Origin is ' . $origin;
+ } else {
+ ds_warn 'Unable to detect remote name, trying "origin"';
+ ds_verbose "Error: $@" if ($@);
+ $origin = 'origin';
+ }
+ ds_verbose "Execute 'git push $origin " . join(' ', @refs, '<tags>');
+ ds_debug "Tags are: " . join(' ', @tags);
+ spawn(
+ exec => ['git', 'push', $origin, @refs, @tags],
+ wait_child => 1
+ );
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/push_repo.pm b/lib/Devscripts/Salsa/push_repo.pm
new file mode 100644
index 0000000..94e8ff3
--- /dev/null
+++ b/lib/Devscripts/Salsa/push_repo.pm
@@ -0,0 +1,71 @@
+# Creates GitLab repo from local path
+package Devscripts::Salsa::push_repo;
+
+use strict;
+use Devscripts::Output;
+use Dpkg::IPC;
+use Moo::Role;
+
+with "Devscripts::Salsa::create_repo";
+
+sub push_repo {
+ my ($self, $reponame) = @_;
+ unless ($reponame) {
+ ds_warn "Repository path is missing";
+ return 1;
+ }
+ unless (-d $reponame) {
+ ds_warn "$reponame isn't a directory";
+ return 1;
+ }
+ chdir $reponame;
+ eval {
+ spawn(
+ exec => ['dpkg-parsechangelog', '--show-field', 'Source'],
+ to_string => \$reponame,
+ wait_child => 1,
+ );
+ };
+ if ($@) {
+ ds_warn $@;
+ return 1;
+ }
+ chomp $reponame;
+ my $out;
+ spawn(
+ exec => ['git', 'remote', 'show'],
+ to_string => \$out,
+ wait_child => 1,
+ );
+ if ($out =~ /^origin$/m) {
+ ds_warn "git origin is already configured:\n$out";
+ return 1;
+ }
+ my $path = $self->project2path('') or return 1;
+ my $url = $self->config->git_server_url . "$path$reponame";
+ spawn(
+ exec => ['git', 'remote', 'add', 'origin', $url],
+ wait_child => 1,
+ );
+ my $res = $self->create_repo($reponame);
+ if ($res) {
+ return 1
+ unless (
+ ds_prompt(
+"Project already exists, do you want to try to push local repo? (y/N) "
+ ) =~ accept
+ );
+ }
+ spawn(
+ exec =>
+ ['git', 'push', '--all', '--verbose', '--set-upstream', 'origin'],
+ wait_child => 1,
+ );
+ spawn(
+ exec => ['git', 'push', '--tags', '--verbose', 'origin'],
+ wait_child => 1,
+ );
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/rename_branch.pm b/lib/Devscripts/Salsa/rename_branch.pm
new file mode 100644
index 0000000..d908080
--- /dev/null
+++ b/lib/Devscripts/Salsa/rename_branch.pm
@@ -0,0 +1,45 @@
+package Devscripts::Salsa::rename_branch;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+with "Devscripts::Salsa::Repo";
+
+our $prompt = 1;
+
+sub rename_branch {
+ my ($self, @reponames) = @_;
+ my $res = 0;
+ my @repos = $self->get_repo($prompt, @reponames);
+ return @repos unless (ref $repos[0]); # get_repo returns 1 when fails
+ foreach (@repos) {
+ my $id = $_->[0];
+ ds_verbose "Configuring $_->[1]";
+ my $project = $self->api->project($_->[0]);
+ eval {
+ $self->api->create_branch(
+ $id,
+ {
+ ref => $self->config->source_branch,
+ branch => $self->config->dest_branch,
+ });
+ $self->api->delete_branch($id, $self->config->source_branch);
+ };
+ if ($@) {
+ $res++;
+ if ($self->config->no_fail) {
+ ds_verbose $@;
+ ds_warn
+"Branch rename has failed for $_->[1]. Use --verbose to see errors\n";
+ next;
+ } else {
+ ds_warn $@;
+ return 1;
+ }
+ }
+ }
+ return $res;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/search_group.pm b/lib/Devscripts/Salsa/search_group.pm
new file mode 100644
index 0000000..2fd047b
--- /dev/null
+++ b/lib/Devscripts/Salsa/search_group.pm
@@ -0,0 +1,37 @@
+# Searches groups using given string
+package Devscripts::Salsa::search_group;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub search_group {
+ my ($self, $group) = @_;
+ unless ($group) {
+ ds_warn "Searched string is missing";
+ return 1;
+ }
+ my $groups = $self->api->group_without_projects($group);
+ if ($groups) {
+ $groups = [$groups];
+ } else {
+ $groups = $self->api->paginator('groups',
+ { search => $group, order_by => 'name' })->all;
+ }
+ unless ($groups and @$groups) {
+ ds_warn "No group found";
+ return 1;
+ }
+ foreach (@$groups) {
+ print <<END;
+Id : $_->{id}
+Name : $_->{name}
+Full name: $_->{full_name}
+Full path: $_->{full_path}
+
+END
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/search_project.pm b/lib/Devscripts/Salsa/search_project.pm
new file mode 100644
index 0000000..34584bf
--- /dev/null
+++ b/lib/Devscripts/Salsa/search_project.pm
@@ -0,0 +1,57 @@
+# Searches projects using given string
+package Devscripts::Salsa::search_project;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub search_project {
+ my ($self, $project) = @_;
+ unless ($project) {
+ ds_warn "Searched string is missing";
+ return 1;
+ }
+ my $projects = $self->api->project($project);
+ if ($projects) {
+ $projects = [$projects];
+ } else {
+ $projects = $self->api->paginator(
+ 'projects',
+ {
+ search => $project,
+ order_by => 'name',
+ archived => $self->config->archived
+ })->all();
+ }
+ unless ($projects and @$projects) {
+ ds_warn "No projects found";
+ return 1;
+ }
+ foreach (@$projects) {
+ print <<END;
+Id : $_->{id}
+Name : $_->{name}
+Full path: $_->{path_with_namespace}
+END
+ print(
+ $_->{namespace}->{kind} eq 'group'
+ ? "Group id : "
+ : "User id : "
+ );
+ print "$_->{namespace}->{id}\n";
+ print(
+ $_->{namespace}->{kind} eq 'group'
+ ? "Group : "
+ : "User : "
+ );
+ print "$_->{namespace}->{name}\n";
+ if ($_->{forked_from_project} and $_->{forked_from_project}->{id}) {
+ print
+ "Fork of : $_->{forked_from_project}->{name_with_namespace}\n";
+ }
+ print "\n";
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/search_user.pm b/lib/Devscripts/Salsa/search_user.pm
new file mode 100644
index 0000000..2a14580
--- /dev/null
+++ b/lib/Devscripts/Salsa/search_user.pm
@@ -0,0 +1,36 @@
+# Searches users using given string
+package Devscripts::Salsa::search_user;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub search_user {
+ my ($self, $user) = @_;
+ unless ($user) {
+ ds_warn "User name is missing";
+ return 1;
+ }
+ my $users = $self->api->user($user);
+ if ($users) {
+ $users = [$users];
+ } else {
+ $users = $self->api->paginator('users', { search => $user })->all();
+ }
+ unless ($users and @$users) {
+ ds_warn "No user found";
+ return 1;
+ }
+ foreach (@$users) {
+ print <<END;
+Id : $_->{id}
+Username : $_->{username}
+Name : $_->{name}
+State : $_->{state}
+
+END
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/update_repo.pm b/lib/Devscripts/Salsa/update_repo.pm
new file mode 100644
index 0000000..7902313
--- /dev/null
+++ b/lib/Devscripts/Salsa/update_repo.pm
@@ -0,0 +1,140 @@
+# Updates repositories
+package Devscripts::Salsa::update_repo;
+
+use strict;
+use Devscripts::Output;
+use GitLab::API::v4::Constants qw(:all);
+use Moo::Role;
+
+with "Devscripts::Salsa::Repo";
+
+our $prompt = 1;
+
+sub update_repo {
+ my ($self, @reponames) = @_;
+ if ($ds_yes < 0 and $self->config->command eq 'update_repo') {
+ ds_warn
+ "update_repo can't be launched when -i is set, use update_safe";
+ return 1;
+ }
+ unless (@reponames or $self->config->all or $self->config->all_archived) {
+ ds_warn "Usage $0 update_repo <--all|--all-archived|names>";
+ return 1;
+ }
+ if (@reponames and $self->config->all) {
+ ds_warn "--all with a reponame makes no sense";
+ return 1;
+ }
+ if (@reponames and $self->config->all_archived) {
+ ds_warn "--all-archived with a reponame makes no sense";
+ return 1;
+ }
+ return $self->_update_repo(@reponames);
+}
+
+sub _update_repo {
+ my ($self, @reponames) = @_;
+ my $res = 0;
+ # Common options
+ my $configparams = {};
+ # visibility can be modified only by group owners
+ $configparams->{visibility} = 'public'
+ if $self->access_level >= $GITLAB_ACCESS_LEVEL_OWNER;
+ # get repo list using Devscripts::Salsa::Repo
+ my @repos = $self->get_repo($prompt, @reponames);
+ return @repos unless (ref $repos[0]); # get_repo returns 1 when fails
+ foreach my $repo (@repos) {
+ ds_verbose "Configuring $repo->[1]";
+ my $id = $repo->[0];
+ my $str = $repo->[1];
+ eval {
+ # apply new parameters
+ $self->api->edit_project(
+ $id,
+ {
+ %$configparams,
+ $self->desc($repo->[1]),
+ $self->desc_multipart($repo->[1]) });
+ # add hooks if needed
+ $str =~ s#^.*/##;
+ $self->add_hooks($id, $str);
+ };
+ if ($@) {
+ $res++;
+ if ($self->config->no_fail) {
+ ds_verbose $@;
+ ds_warn
+"update_repo has failed for $repo->[1]. Use --verbose to see errors\n";
+ next;
+ } else {
+ ds_warn $@;
+ return 1;
+ }
+ } elsif ($self->config->rename_head) {
+ # 1 - creates new branch if --rename-head
+ my $project = $self->api->project($id);
+ if ($project->{default_branch} ne $self->config->dest_branch) {
+ eval {
+ $self->api->create_branch(
+ $id,
+ {
+ ref => $self->config->source_branch,
+ branch => $self->config->dest_branch,
+ });
+ };
+ if ($@) {
+ ds_debug $@ if ($@);
+ $project = undef;
+ }
+
+ eval {
+ $self->api->edit_project($id,
+ { default_branch => $self->config->dest_branch });
+ # delete old branch only if "create_branch" succeed
+ if ($project) {
+ $self->api->delete_branch($id,
+ $self->config->source_branch);
+ }
+ };
+ if ($@) {
+ $res++;
+ if ($self->config->no_fail) {
+ ds_verbose $@;
+ ds_warn
+"Branch rename has failed for $repo->[1]. Use --verbose to see errors\n";
+ next;
+ } else {
+ ds_warn $@;
+ return 1;
+ }
+ }
+ } else {
+ ds_verbose "Head already renamed for $str";
+ }
+ }
+ ds_verbose "Project $str updated";
+ }
+ return $res;
+}
+
+sub access_level {
+ my ($self) = @_;
+ my $user_id = $self->api->current_user()->{id};
+ if ($self->group_id) {
+ my $tmp = $self->api->group_member($self->group_id, $user_id);
+ unless ($tmp) {
+ my $members
+ = $self->api->paginator('all_group_members', $self->group_id,
+ { query => $user_id });
+ while ($_ = $members->next) {
+ return $_->{access_level} if ($_->{id} eq $user_id);
+ }
+ ds_warn "You're not member of this group";
+ return 0;
+ }
+ return $tmp->{access_level};
+ }
+ return $GITLAB_ACCESS_LEVEL_OWNER;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/update_safe.pm b/lib/Devscripts/Salsa/update_safe.pm
new file mode 100644
index 0000000..6d32e88
--- /dev/null
+++ b/lib/Devscripts/Salsa/update_safe.pm
@@ -0,0 +1,22 @@
+# launches check_repo and launch uscan_repo if user agrees with this changes
+package Devscripts::Salsa::update_safe;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+with 'Devscripts::Salsa::check_repo';
+with 'Devscripts::Salsa::update_repo';
+
+sub update_safe {
+ my $self = shift;
+ my ($res, $fails) = $self->_check_repo(@_);
+ return 0 unless ($res);
+ return $res
+ if (ds_prompt("$res packages misconfigured, update them ? (Y/n) ")
+ =~ refuse);
+ $Devscripts::Salsa::update_repo::prompt = 0;
+ return $self->_update_repo(@$fails);
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/update_user.pm b/lib/Devscripts/Salsa/update_user.pm
new file mode 100644
index 0000000..f7dfeba
--- /dev/null
+++ b/lib/Devscripts/Salsa/update_user.pm
@@ -0,0 +1,38 @@
+# Updates user role in a group
+package Devscripts::Salsa::update_user;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub update_user {
+ my ($self, $level, $user) = @_;
+ unless ($level and $user) {
+ ds_warn "Usage $0 update_user <level> <userid>";
+ return 1;
+ }
+ unless ($self->group_id) {
+ ds_warn "Unable to update user without --group-id";
+ return 1;
+ }
+
+ my $id = $self->username2id($user);
+ my $al = $self->levels_name($level);
+ return 1
+ if (
+ $ds_yes < 0
+ and ds_prompt(
+"You're going to accept $user as $level in group $self->{group_id}. Continue (Y/n) "
+ ) =~ refuse
+ );
+ $self->api->update_group_member(
+ $self->group_id,
+ $id,
+ {
+ access_level => $al,
+ });
+ ds_warn "User $user removed from group " . $self->group_id;
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Salsa/whoami.pm b/lib/Devscripts/Salsa/whoami.pm
new file mode 100644
index 0000000..176e591
--- /dev/null
+++ b/lib/Devscripts/Salsa/whoami.pm
@@ -0,0 +1,24 @@
+# Gives information on token owner
+package Devscripts::Salsa::whoami;
+
+use strict;
+use Devscripts::Output;
+use Moo::Role;
+
+sub whoami {
+ my ($self) = @_;
+ my $current_user = $self->api->current_user;
+ print <<END;
+Id : $current_user->{id}
+Username: $current_user->{username}
+Name : $current_user->{name}
+Email : $current_user->{email}
+State : $current_user->{state}
+END
+ $self->cache->{user}->{ $current_user->{id} } = $current_user->{username};
+ $self->cache->{user_id}->{ $current_user->{username} }
+ = $current_user->{id};
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Set.pm b/lib/Devscripts/Set.pm
new file mode 100644
index 0000000..9ad4e1f
--- /dev/null
+++ b/lib/Devscripts/Set.pm
@@ -0,0 +1,126 @@
+# Copyright Bill Allombert <ballombe@debian.org> 2001.
+# Modifications copyright 2002 Julian Gilbey <jdg@debian.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+package Devscripts::Set;
+
+use strict;
+
+BEGIN {
+ use Exporter ();
+ use vars qw(@EXPORT @ISA %EXPORT_TAGS);
+ @EXPORT = qw(SetMinus SetInter SetUnion);
+ @ISA = qw(Exporter);
+ %EXPORT_TAGS = ();
+}
+
+# Several routines to work with arrays whose elements are unique
+# (here called sets)
+
+=head1 NAME
+
+Devscripts::Set - Functions for handling sets.
+
+=head1 SYNOPSIS
+
+use Devscripts::Set;
+
+@set=ListToSet(@list);
+
+@setdiff=SetMinus(\@set1,\@set2);
+
+@setinter=SetInter(\@set1,\@set2);
+
+@setunion=SetUnion(\@set1,\@set2);
+
+=head1 DESCRIPTION
+
+ListToSet: Make a set (array with duplicates removed) from a list of
+items given by an array.
+
+SetMinus, SetInter, SetUnion: Compute the set theoretic difference,
+intersection, union of two sets given as arrays.
+
+=cut
+
+# Transforms a list to a set, removing duplicates
+# input: list
+# output: set
+
+sub ListToSet (@) {
+ my %items;
+
+ grep $items{$_}++, @_;
+
+ return keys %items;
+}
+
+# Compute the set-theoretic difference of two sets.
+# input: ref to Set 1, ref to Set 2
+# output: set
+
+sub SetMinus ($$) {
+ my ($set1, $set2) = @_;
+ my %items;
+
+ grep $items{$_}++, @$set1;
+ grep $items{$_}--, @$set2;
+
+ return grep $items{$_} > 0, keys %items;
+}
+
+# Compute the set-theoretic intersection of two sets.
+# input: ref to Set 1, ref to Set 2
+# output: set
+
+sub SetInter ($$) {
+ my ($set1, $set2) = @_;
+ my %items;
+
+ grep $items{$_}++, @$set1;
+ grep $items{$_}++, @$set2;
+
+ return grep $items{$_} == 2, keys %items;
+}
+
+#Compute the set-theoretic union of two sets.
+#input: ref to Set 1, ref to Set 2
+#output: set
+
+sub SetUnion ($$) {
+ my ($set1, $set2) = @_;
+ my %items;
+
+ grep $items{$_}++, @$set1;
+ grep $items{$_}++, @$set2;
+
+ return grep $items{$_} > 0, keys %items;
+}
+
+1;
+
+=head1 AUTHOR
+
+Bill Allombert <ballombe@debian.org>
+
+=head1 COPYING
+
+Copyright 2001 Bill Allombert <ballombe@debian.org>
+Modifications Copyright 2002 Julian Gilbey <jdg@debian.org>
+dpkg-depcheck is free software, covered by the GNU General Public License, and
+you are welcome to change it and/or distribute copies of it under
+certain conditions. There is absolutely no warranty for dpkg-depcheck.
+
+=cut
diff --git a/lib/Devscripts/Uscan/CatchRedirections.pm b/lib/Devscripts/Uscan/CatchRedirections.pm
new file mode 100644
index 0000000..28f99ca
--- /dev/null
+++ b/lib/Devscripts/Uscan/CatchRedirections.pm
@@ -0,0 +1,27 @@
+# dummy subclass used to store all the redirections for later use
+package Devscripts::Uscan::CatchRedirections;
+
+use base 'LWP::UserAgent';
+
+my @uscan_redirections;
+
+sub redirect_ok {
+ my $self = shift;
+ my ($request) = @_;
+ if ($self->SUPER::redirect_ok(@_)) {
+ push @uscan_redirections, $request->uri;
+ return 1;
+ }
+ return 0;
+}
+
+sub get_redirections {
+ return \@uscan_redirections;
+}
+
+sub clear_redirections {
+ undef @uscan_redirections;
+ return;
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/Config.pm b/lib/Devscripts/Uscan/Config.pm
new file mode 100644
index 0000000..5ea97e1
--- /dev/null
+++ b/lib/Devscripts/Uscan/Config.pm
@@ -0,0 +1,392 @@
+
+=head1 NAME
+
+Devscripts::Uscan::Config - uscan configuration object
+
+=head1 SYNOPSIS
+
+ use Devscripts::Uscan::Config;
+ my $config = Devscripts::Uscan::Config->new->parse;
+
+=head1 DESCRIPTION
+
+Uscan configuration object. It can scan configuration files
+(B</etc/devscripts.conf> and B<~/.devscripts>) and command line arguments.
+
+=cut
+
+package Devscripts::Uscan::Config;
+
+use strict;
+
+use Devscripts::Uscan::Output;
+use Exporter 'import';
+use Moo;
+
+extends 'Devscripts::Config';
+
+our $CURRENT_WATCHFILE_VERSION = 4;
+
+use constant default_user_agent => "Debian uscan"
+ . ($main::uscan_version ? " $main::uscan_version" : '');
+
+our @EXPORT = (qw($CURRENT_WATCHFILE_VERSION));
+
+# I - ACCESSORS
+
+# Options + default values
+
+has bare => (is => 'rw');
+has check_dirname_level => (is => 'rw');
+has check_dirname_regex => (is => 'rw');
+has compression => (is => 'rw');
+has copyright_file => (is => 'rw');
+has destdir => (is => 'rw');
+has download => (is => 'rw');
+has download_current_version => (is => 'rw');
+has download_debversion => (is => 'rw');
+has download_version => (is => 'rw');
+has exclusion => (is => 'rw');
+has log => (is => 'rw');
+has orig => (is => 'rw');
+has package => (is => 'rw');
+has pasv => (is => 'rw');
+has http_header => (is => 'rw', default => sub { {} });
+
+# repack to .tar.$zsuffix if 1
+has repack => (is => 'rw');
+has safe => (is => 'rw');
+has signature => (is => 'rw');
+has symlink => (is => 'rw');
+has timeout => (is => 'rw');
+has user_agent => (is => 'rw');
+has uversion => (is => 'rw');
+has watchfile => (is => 'rw');
+
+# II - Options
+
+use constant keys => [
+ # 2.1 - Simple parameters that can be set in ~/.devscripts and command line
+ [
+ 'check-dirname-level=s', 'DEVSCRIPTS_CHECK_DIRNAME_LEVEL',
+ qr/^[012]$/, 1
+ ],
+ [
+ 'check-dirname-regex=s', 'DEVSCRIPTS_CHECK_DIRNAME_REGEX',
+ undef, 'PACKAGE(-.+)?'
+ ],
+ ['dehs!', 'USCAN_DEHS_OUTPUT', sub { $dehs = $_[1]; 1 }],
+ [
+ 'destdir=s',
+ 'USCAN_DESTDIR',
+ sub {
+ if (-d $_[1]) {
+ $_[0]->destdir($_[1]) if (-d $_[1]);
+ return 1;
+ }
+ return (0,
+ "The directory to store downloaded files(\$destdir): $_[1]");
+ },
+ '..'
+ ],
+ ['exclusion!', 'USCAN_EXCLUSION', 'bool', 1],
+ ['timeout=i', 'USCAN_TIMEOUT', qr/^\d+$/, 20],
+ [
+ 'user-agent|useragent=s',
+ 'USCAN_USER_AGENT',
+ qr/\w/,
+ sub {
+ default_user_agent;
+ }
+ ],
+ ['repack', 'USCAN_REPACK', 'bool'],
+ # 2.2 - Simple command line args
+ ['bare', undef, 'bool', 0],
+ ['compression=s'],
+ ['copyright-file=s'],
+ ['download-current-version', undef, 'bool'],
+ ['download-version=s'],
+ ['download-debversion|dversion=s'],
+ ['log', undef, 'bool'],
+ ['package=s'],
+ ['uversion|upstream-version=s'],
+ ['watchfile=s'],
+ # 2.3 - More complex options
+ # http headers (#955268)
+ ['http-header=s', 'USCAN_HTTP_HEADER', undef, sub { {} }],
+
+ # "download" and its aliases
+ [
+ undef,
+ 'USCAN_DOWNLOAD',
+ sub {
+ return (1, 'Bad USCAN_DOWNLOAD value, skipping')
+ unless ($_[1] =~ /^(?:yes|(no))$/i);
+ $_[0]->download(0) if $1;
+ return 1;
+ }
+ ],
+ [
+ 'download|d+',
+ undef,
+ sub {
+ $_[1] =~ s/^yes$/1/i;
+ $_[1] =~ s/^no$/0/i;
+ return (0, "Wrong number of -d")
+ unless ($_[1] =~ /^[0123]$/);
+ $_[0]->download($_[1]);
+ return 1;
+ },
+ 1
+ ],
+ [
+ 'force-download',
+ undef,
+ sub {
+ $_[0]->download(2);
+ }
+ ],
+ ['no-download', undef, sub { $_[0]->download(0); return 1; }],
+ ['overwrite-download', undef, sub { $_[0]->download(3) }],
+
+ # "pasv"
+ [
+ 'pasv|passive',
+ 'USCAN_PASV',
+ sub {
+ return $_[0]->pasv('default')
+ unless ($_[1] =~ /^(yes|0|1|no)$/);
+ $_[0]->pasv({
+ yes => 1,
+ 1 => 1,
+ no => 0,
+ 0 => 0,
+ }->{$1});
+ return 1;
+ },
+ 0
+ ],
+
+ # "safe" and "symlink" and their aliases
+ ['safe|report', 'USCAN_SAFE', 'bool', 0],
+ [
+ 'report-status',
+ undef,
+ sub {
+ $_[0]->safe(1);
+ $verbose ||= 1;
+ }
+ ],
+ ['copy', undef, sub { $_[0]->symlink('copy') }],
+ ['rename', undef, sub { $_[0]->symlink('rename') if ($_[1]); 1; }],
+ [
+ 'symlink!',
+ 'USCAN_SYMLINK',
+ sub {
+ $_[0]->symlink(
+ $_[1] =~ /^(no|0|rename)$/ ? $1
+ : $_[1] =~ /^(yes|1|symlink)$/ ? 'symlink'
+ : 'no'
+ );
+ return 1;
+ },
+ 'symlink'
+ ],
+ # "signature" and its aliases
+ ['signature!', undef, 'bool', 1],
+ ['skipsignature|skip-signature', undef, sub { $_[0]->signature(-1) }],
+ # "verbose" and its aliases
+ ['debug', undef, sub { $verbose = 2 }],
+ ['extra-debug', undef, sub { $verbose = 3 }],
+ ['no-verbose', undef, sub { $verbose = 0; return 1; }],
+ [
+ 'verbose|v+',
+ 'USCAN_VERBOSE',
+ sub {
+ $verbose = ($_[1] =~ /^yes$/i ? 1 : $_[1] =~ /^(\d)$/ ? $1 : 0);
+ return 1;
+ }
+ ],
+ # Display version
+ [
+ 'version',
+ undef,
+ sub {
+ if ($_[1]) { $_[0]->version; exit 0 }
+ }
+ ]];
+
+use constant rules => [
+ sub {
+ my $self = shift;
+ if ($self->package) {
+ $self->download(0)
+ unless ($self->download > 1); # compatibility
+ return (0,
+"The --package option requires to set the --watchfile option, too."
+ ) unless defined $self->watchfile;
+ }
+ $self->download(0) if ($self->safe == 1 and $self->download == 1);
+ return 1;
+ },
+ # $signature: -1 = no downloading signature and no verifying signature,
+ # 0 = no downloading signature but verifying signature,
+ # 1 = downloading signature and verifying signature
+ sub {
+ my $self = shift;
+ $self->signature(-1)
+ if $self->download == 0; # Change default 1 -> -1
+ return 1;
+ },
+ sub {
+ if (defined $_[0]->watchfile and @ARGV) {
+ return (0, "Can't have directory arguments if using --watchfile");
+ }
+ return 1;
+ },
+];
+
+# help methods
+sub usage {
+ my ($self) = @_;
+ print <<"EOF";
+Usage: $progname [options] [dir ...]
+ Process watch files in all .../debian/ subdirs of those listed (or the
+ current directory if none listed) to check for upstream releases.
+Options:
+ --no-conf, --noconf
+ Don\'t read devscripts config files;
+ must be the first option given
+ --no-verbose Don\'t report verbose information.
+ --verbose, -v Report verbose information.
+ --debug, -vv Report verbose information including the downloaded
+ web pages as processed to STDERR for debugging.
+ --extra-debug, -vvv Report also remote content during "search" step
+ --dehs Send DEHS style output (XML-type) to STDOUT, while
+ send all other uscan output to STDERR.
+ --no-dehs Use only traditional uscan output format (default)
+ --download, -d
+ Download the new upstream release (default)
+ --force-download, -dd
+ Download the new upstream release, even if up-to-date
+ (may not overwrite the local file)
+ --overwrite-download, -ddd
+ Download the new upstream release, even if up-to-date
+ (may overwrite the local file)
+ --no-download, --nodownload
+ Don\'t download and report information.
+ Previously downloaded tarballs may be used.
+ Change default to --skip-signature.
+ --signature Download signature and verify (default)
+ --no-signature Don\'t download signature but verify if already downloaded.
+ --skip-signature
+ Don\'t bother download signature nor verify it.
+ --safe, --report
+ avoid running unsafe scripts by skipping both the repacking
+ of the downloaded package and the updating of the new
+ source tree. Change default to --no-download and
+ --skip-signature.
+ --report-status (= --safe --verbose)
+ --download-version VERSION
+ Specify the version which the upstream release must
+ match in order to be considered, rather than using the
+ release with the highest version
+ --download-debversion VERSION
+ Specify the Debian package version to download the
+ corresponding upstream release version. The
+ dversionmangle and uversionmangle rules are
+ considered.
+ --download-current-version
+ Download the currently packaged version
+ --check-dirname-level N
+ Check parent directory name?
+ N=0 never check parent directory name
+ N=1 only when $progname changes directory (default)
+ N=2 always check parent directory name
+ --check-dirname-regex REGEX
+ What constitutes a matching directory name; REGEX is
+ a Perl regular expression; the string \`PACKAGE\' will
+ be replaced by the package name; see manpage for details
+ (default: 'PACKAGE(-.+)?')
+ --destdir Path of directory to which to download.
+ --package PACKAGE
+ Specify the package name rather than examining
+ debian/changelog; must use --upstream-version and
+ --watchfile with this option, no directory traversing
+ will be performed, no actions (even downloading) will be
+ carried out
+ --upstream-version VERSION
+ Specify the current upstream version in use rather than
+ parsing debian/changelog to determine this
+ --watchfile FILE
+ Specify the watch file rather than using debian/watch;
+ no directory traversing will be done in this case
+ --bare Disable all site specific special case codes to perform URL
+ redirections and page content alterations.
+ --no-exclusion Disable automatic exclusion of files mentioned in
+ debian/copyright field Files-Excluded and Files-Excluded-*
+ --pasv Use PASV mode for FTP connections
+ --no-pasv Don\'t use PASV mode for FTP connections (default)
+ --no-symlink Don\'t rename nor repack upstream tarball
+ --timeout N Specifies how much time, in seconds, we give remote
+ servers to respond (default 20 seconds)
+ --user-agent, --useragent
+ Override the default user agent string
+ --log Record md5sum changes of repackaging
+ --help Show this message
+ --version Show version information
+
+Options passed on to mk-origtargz:
+ --symlink Create a correctly named symlink to downloaded file (default)
+ --rename Rename instead of symlinking
+ --copy Copy instead of symlinking
+ --repack Repack downloaded archives to change compression
+ --compression [ gzip | bzip2 | lzma | xz ]
+ When the upstream sources are repacked, use compression COMP
+ for the resulting tarball (default: gzip)
+ --copyright-file FILE
+ Remove files matching the patterns found in FILE
+
+Default settings modified by devscripts configuration files:
+$self->{modified_conf_msg}
+EOF
+}
+
+sub version {
+ print <<"EOF";
+This is $progname, from the Debian devscripts package, version $main::uscan_version
+This code is copyright 1999-2006 by Julian Gilbey and 2018 by Xavier Guimard,
+all rights reserved.
+Original code by Christoph Lameter.
+This program comes with ABSOLUTELY NO WARRANTY.
+You are free to redistribute this code under the terms of the
+GNU General Public License, version 2 or later.
+EOF
+}
+
+1;
+__END__
+=head1 SEE ALSO
+
+L<uscan>, L<Devscripts::Config>
+
+=head1 AUTHOR
+
+B<uscan> was originally written by Christoph Lameter
+E<lt>clameter@debian.orgE<gt> (I believe), modified by Julian Gilbey
+E<lt>jdg@debian.orgE<gt>. HTTP support was added by Piotr Roszatycki
+E<lt>dexter@debian.orgE<gt>. B<uscan> was rewritten in Perl by Julian Gilbey.
+Xavier Guimard E<lt>yadd@debian.orgE<gt> rewrote uscan in object
+oriented Perl.
+
+=head1 COPYRIGHT AND LICENSE
+
+Copyright 2002-2006 by Julian Gilbey <jdg@debian.org>,
+2018 by Xavier Guimard <yadd@debian.org>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+
+=cut
diff --git a/lib/Devscripts/Uscan/Ctype/nodejs.pm b/lib/Devscripts/Uscan/Ctype/nodejs.pm
new file mode 100644
index 0000000..6a89063
--- /dev/null
+++ b/lib/Devscripts/Uscan/Ctype/nodejs.pm
@@ -0,0 +1,36 @@
+package Devscripts::Uscan::Ctype::nodejs;
+
+use strict;
+
+use Moo;
+use JSON;
+use Devscripts::Uscan::Output;
+
+has dir => (is => 'ro');
+has pkg => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ $_[0]->{dir} . '/package.json';
+ });
+
+sub version {
+ my ($self) = @_;
+ return unless $self->dir and -d $self->dir;
+ unless (-r $self->pkg) {
+ uscan_warn "Unable to read $self->{pkg}, skipping current version";
+ return;
+ }
+ my ($version, $content);
+ {
+ local $/ = undef;
+ open my $f, $self->pkg;
+ $content = <$f>;
+ close $f;
+ }
+ eval { $version = decode_json($content)->{version}; };
+ uscan_warn $@ if $@;
+ return $version;
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/Ctype/perl.pm b/lib/Devscripts/Uscan/Ctype/perl.pm
new file mode 100644
index 0000000..ea06cfb
--- /dev/null
+++ b/lib/Devscripts/Uscan/Ctype/perl.pm
@@ -0,0 +1,36 @@
+package Devscripts::Uscan::Ctype::perl;
+
+use strict;
+
+use Moo;
+use JSON;
+use Devscripts::Uscan::Output;
+
+has dir => (is => 'ro');
+has pkg => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ $_[0]->{dir} . '/META.json';
+ });
+
+sub version {
+ my ($self) = @_;
+ return unless $self->dir and -d $self->dir;
+ unless (-r $self->pkg) {
+ uscan_warn "Unable to read $self->{pkg}, skipping current version";
+ return;
+ }
+ my ($version, $content);
+ {
+ local $/ = undef;
+ open my $f, $self->pkg;
+ $content = <$f>;
+ close $f;
+ }
+ eval { $version = decode_json($content)->{version}; };
+ uscan_warn $@ if $@;
+ return $version;
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/Downloader.pm b/lib/Devscripts/Uscan/Downloader.pm
new file mode 100644
index 0000000..292884f
--- /dev/null
+++ b/lib/Devscripts/Uscan/Downloader.pm
@@ -0,0 +1,303 @@
+package Devscripts::Uscan::Downloader;
+
+use strict;
+use Cwd qw/cwd abs_path/;
+use Devscripts::Uscan::CatchRedirections;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Dpkg::IPC;
+use File::DirList;
+use File::Find;
+use File::Temp qw/tempdir/;
+use File::Touch;
+use Moo;
+use URI;
+
+our $haveSSL;
+
+has git_upstream => (is => 'rw');
+
+BEGIN {
+ eval { require LWP::UserAgent; };
+ if ($@) {
+ my $progname = basename($0);
+ if ($@ =~ /^Can\'t locate LWP\/UserAgent\.pm/) {
+ die "$progname: you must have the libwww-perl package installed\n"
+ . "to use this script";
+ } else {
+ die "$progname: problem loading the LWP::UserAgent module:\n $@\n"
+ . "Have you installed the libwww-perl package?";
+ }
+ }
+ eval { require LWP::Protocol::https; };
+ $haveSSL = $@ ? 0 : 1;
+}
+
+has agent =>
+ (is => 'rw', default => sub { "Debian uscan $main::uscan_version" });
+has timeout => (is => 'rw');
+has pasv => (
+ is => 'rw',
+ default => 'default',
+ trigger => sub {
+ my ($self, $nv) = @_;
+ if ($nv) {
+ uscan_verbose "Set passive mode: $self->{pasv}";
+ $ENV{'FTP_PASSIVE'} = $self->pasv;
+ } elsif ($ENV{'FTP_PASSIVE'}) {
+ uscan_verbose "Unset passive mode";
+ delete $ENV{'FTP_PASSIVE'};
+ }
+ });
+has destdir => (is => 'rw');
+
+# 0: no repo, 1: shallow clone, 2: full clone
+has gitrepo_state => (
+ is => 'rw',
+ default => sub { 0 });
+has git_export_all => (
+ is => 'rw',
+ default => sub { 0 });
+has user_agent => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ my ($self) = @_;
+ my $user_agent
+ = Devscripts::Uscan::CatchRedirections->new(env_proxy => 1);
+ $user_agent->timeout($self->timeout);
+ $user_agent->agent($self->agent);
+
+ # Strip Referer header for Sourceforge to avoid SF sending back a
+ # "200 OK" with a <meta refresh=...> redirect
+ $user_agent->add_handler(
+ 'request_prepare' => sub {
+ my ($request, $ua, $h) = @_;
+ $request->remove_header('Referer');
+ },
+ m_hostname => 'sourceforge.net',
+ );
+ $self->{user_agent} = $user_agent;
+ });
+
+has ssl => (is => 'rw', default => sub { $haveSSL });
+
+has headers => (
+ is => 'ro',
+ default => sub { {} });
+
+sub download ($$$$$$$$) {
+ my (
+ $self, $url, $fname, $optref, $base,
+ $pkg_dir, $pkg, $mode, $gitrepo_dir
+ ) = @_;
+ my ($request, $response);
+ $mode ||= $optref->mode;
+ if ($mode eq 'http') {
+ if ($url =~ /^https/ and !$self->ssl) {
+ uscan_die "$progname: you must have the "
+ . "liblwp-protocol-https-perl package installed\n"
+ . "to use https URLs";
+ }
+
+ # substitute HTML entities
+ # Is anything else than "&amp;" required? I doubt it.
+ uscan_verbose "Requesting URL:\n $url";
+ my $headers = HTTP::Headers->new;
+ $headers->header('Accept' => '*/*');
+ $headers->header('Referer' => $base);
+ my $uri_o = URI->new($url);
+ foreach my $k (keys %{ $self->headers }) {
+ if ($k =~ /^(.*?)@(.*)$/) {
+ my $baseUrl = $1;
+ my $hdr = $2;
+ if ($url =~ m#^\Q$baseUrl\E(?:/.*)?$#) {
+ $headers->header($hdr => $self->headers->{$k});
+ uscan_verbose "Set per-host custom header $hdr for $url";
+ } else {
+ uscan_debug "$url does not start with $1";
+ }
+ } else {
+ uscan_warn "Malformed http-header: $k";
+ }
+ }
+ $request = HTTP::Request->new('GET', $url, $headers);
+ $response = $self->user_agent->request($request, $fname);
+ if (!$response->is_success) {
+ uscan_warn((defined $pkg_dir ? "In directory $pkg_dir, d" : "D")
+ . "ownloading\n $url failed: "
+ . $response->status_line);
+ return 0;
+ }
+ } elsif ($mode eq 'ftp') {
+ uscan_verbose "Requesting URL:\n $url";
+ $request = HTTP::Request->new('GET', "$url");
+ $response = $self->user_agent->request($request, $fname);
+ if (!$response->is_success) {
+ uscan_warn(
+ (defined $pkg_dir ? "In directory $pkg_dir, d" : "D")
+ . "ownloading\n $url failed: "
+ . $response->status_line);
+ return 0;
+ }
+ } else { # elsif ($$optref{'mode'} eq 'git')
+ my $destdir = $self->destdir;
+ my $curdir = cwd();
+ $fname =~ m%(.*)/$pkg-([^_/]*)\.tar\.(gz|xz|bz2|lzma|zstd?)%;
+ my $dst = $1;
+ my $abs_dst = abs_path($dst);
+ my $ver = $2;
+ my $suffix = $3;
+ my ($gitrepo, $gitref) = split /[[:space:]]+/, $url, 2;
+ my $clean = sub {
+ uscan_exec_no_fail('rm', '-fr', $gitrepo_dir);
+ };
+ my $clean_and_die = sub {
+ $clean->();
+ uscan_die @_;
+ };
+
+ if ($mode eq 'svn') {
+ my $tempdir = tempdir(CLEANUP => 1);
+ my $old_umask = umask(oct('022'));
+ uscan_exec('svn', 'export', $url, "$tempdir/$pkg-$ver");
+ umask($old_umask);
+ find({
+ wanted => sub {
+ return if !-d $File::Find::name;
+ my ($newest) = grep { $_ ne '.' && $_ ne '..' }
+ map { $_->[13] } @{ File::DirList::list($_, 'M') };
+ return if !$newest;
+ my $touch
+ = File::Touch->new(reference => $_ . '/' . $newest);
+ $touch->touch($_);
+ },
+ bydepth => 1,
+ no_chdir => 1,
+ },
+ "$tempdir/$pkg-$ver"
+ );
+ uscan_exec(
+ 'tar', '-C',
+ $tempdir, '--sort=name',
+ '--owner=root', '--group=root',
+ '-cvf', "$abs_dst/$pkg-$ver.tar",
+ "$pkg-$ver"
+ );
+ } elsif ($self->git_upstream) {
+ my ($infodir, $attr_file, $attr_bkp);
+ if ($self->git_export_all) {
+ # override any export-subst and export-ignore attributes
+ spawn(
+ exec => [qw|git rev-parse --git-path info/|],
+ to_string => \$infodir,
+ );
+ chomp $infodir;
+ mkdir $infodir unless -e $infodir;
+ spawn(
+ exec => [qw|git rev-parse --git-path info/attributes|],
+ to_string => \$attr_file,
+ );
+ chomp $attr_file;
+ spawn(
+ exec =>
+ [qw|git rev-parse --git-path info/attributes-uscan|],
+ to_string => \$attr_bkp,
+ );
+ chomp $attr_bkp;
+ rename $attr_file, $attr_bkp if -e $attr_file;
+ my $attr_fh;
+
+ unless (open($attr_fh, '>', $attr_file)) {
+ rename $attr_bkp, $attr_file if -e $attr_bkp;
+ uscan_die("could not open $attr_file for writing");
+ }
+ print $attr_fh "* -export-subst\n* -export-ignore\n";
+ close $attr_fh;
+ }
+
+ uscan_exec_no_fail('git', 'archive', '--format=tar',
+ "--prefix=$pkg-$ver/", "--output=$abs_dst/$pkg-$ver.tar",
+ $gitref) == 0
+ or $clean_and_die->("git archive failed");
+
+ if ($self->git_export_all) {
+ # restore attributes
+ if (-e $attr_bkp) {
+ rename $attr_bkp, $attr_file;
+ } else {
+ unlink $attr_file;
+ }
+ }
+ } else {
+ if ($self->gitrepo_state == 0) {
+ if ($optref->gitmode eq 'shallow') {
+ my $tag = $gitref;
+ $tag =~ s#^refs/(?:tags|heads)/##;
+ uscan_exec('git', 'clone', '--bare', '--depth=1', '-b',
+ $tag, $base, "$destdir/$gitrepo_dir");
+ $self->gitrepo_state(1);
+ } else {
+ uscan_exec('git', 'clone', '--bare', $base,
+ "$destdir/$gitrepo_dir");
+ $self->gitrepo_state(2);
+ }
+ }
+ if ($self->git_export_all) {
+ # override any export-subst and export-ignore attributes
+ my ($infodir, $attr_file);
+ spawn(
+ exec => [
+ 'git', "--git-dir=$destdir/$gitrepo_dir",
+ 'rev-parse', '--git-path', 'info/'
+ ],
+ to_string => \$infodir,
+ );
+ chomp $infodir;
+ mkdir $infodir unless -e $infodir;
+ spawn(
+ exec => [
+ 'git', "--git-dir=$destdir/$gitrepo_dir",
+ 'rev-parse', '--git-path',
+ 'info/attributes'
+ ],
+ to_string => \$attr_file,
+ );
+ chomp $attr_file;
+ my $attr_fh;
+ $clean_and_die->("could not open $attr_file for writing")
+ unless open($attr_fh, '>', $attr_file);
+ print $attr_fh "* -export-subst\n* -export-ignore\n";
+ close $attr_fh;
+ }
+
+ uscan_exec_no_fail(
+ 'git', "--git-dir=$destdir/$gitrepo_dir",
+ 'archive', '--format=tar',
+ "--prefix=$pkg-$ver/", "--output=$abs_dst/$pkg-$ver.tar",
+ $gitref
+ ) == 0
+ or $clean_and_die->("git archive failed");
+ }
+
+ chdir "$abs_dst" or $clean_and_die->("Unable to chdir($abs_dst): $!");
+ if ($suffix eq 'gz') {
+ uscan_exec("gzip", "-n", "-9", "$pkg-$ver.tar");
+ } elsif ($suffix eq 'xz') {
+ uscan_exec("xz", "$pkg-$ver.tar");
+ } elsif ($suffix eq 'bz2') {
+ uscan_exec("bzip2", "$pkg-$ver.tar");
+ } elsif ($suffix eq 'lzma') {
+ uscan_exec("lzma", "$pkg-$ver.tar");
+ #} elsif ($suffix =~ /^zstd?$/) {
+ # uscan_exec("zstd", "$pkg-$ver.tar");
+ } else {
+ $clean_and_die->("Unknown suffix file to repack: $suffix");
+ }
+ chdir "$curdir" or $clean_and_die->("Unable to chdir($curdir): $!");
+ $clean->();
+ }
+ return 1;
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/FindFiles.pm b/lib/Devscripts/Uscan/FindFiles.pm
new file mode 100644
index 0000000..3f8f8b3
--- /dev/null
+++ b/lib/Devscripts/Uscan/FindFiles.pm
@@ -0,0 +1,257 @@
+
+=head1 NAME
+
+Devscripts::Uscan::FindFiles - watchfile finder
+
+=head1 SYNOPSIS
+
+ use Devscripts::Uscan::Config;
+ use Devscripts::Uscan::FindFiles;
+
+ # Get config
+ my $config = Devscripts::Uscan::Config->new->parse;
+
+ # Search watchfiles
+ my @wf = find_watch_files($config);
+
+=head1 DESCRIPTION
+
+This package exports B<find_watch_files()> function. This function search
+Debian watchfiles following configuration parameters.
+
+=head1 SEE ALSO
+
+L<uscan>, L<Devscripts::Uscan::WatchFile>, L<Devscripts::Uscan::Config>
+
+=head1 AUTHOR
+
+B<uscan> was originally written by Christoph Lameter
+E<lt>clameter@debian.orgE<gt> (I believe), modified by Julian Gilbey
+E<lt>jdg@debian.orgE<gt>. HTTP support was added by Piotr Roszatycki
+E<lt>dexter@debian.orgE<gt>. B<uscan> was rewritten in Perl by Julian Gilbey.
+Xavier Guimard E<lt>yadd@debian.orgE<gt> rewrote uscan in object
+oriented Perl.
+
+=head1 COPYRIGHT AND LICENSE
+
+Copyright 2002-2006 by Julian Gilbey <jdg@debian.org>,
+2018 by Xavier Guimard <yadd@debian.org>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+
+=cut
+
+package Devscripts::Uscan::FindFiles;
+
+use strict;
+use filetest 'access';
+use Cwd qw/cwd/;
+use Exporter 'import';
+use Devscripts::Uscan::Output;
+use Devscripts::Versort;
+use Dpkg::Changelog::Parse qw(changelog_parse);
+use File::Basename;
+
+our @EXPORT = ('find_watch_files');
+
+sub find_watch_files {
+ my ($config) = @_;
+ my $opwd = cwd();
+
+ # when --watchfile is used
+ if (defined $config->watchfile) {
+ uscan_verbose "Option --watchfile=$config->{watchfile} used";
+ my ($config) = (@_);
+
+ # no directory traversing then, and things are very simple
+ if (defined $config->package) {
+
+ # no need to even look for a changelog!
+ return (
+ ['.', $config->package, $config->uversion, $config->watchfile]
+ );
+ } else {
+ # Check for debian/changelog file
+ until (-r 'debian/changelog') {
+ chdir '..' or uscan_die "can't chdir ..: $!";
+ if (cwd() eq '/') {
+ uscan_die "Are you in the source code tree?\n"
+ . " Cannot find readable debian/changelog anywhere!";
+ }
+ }
+
+ my ($package, $debversion, $uversion)
+ = scan_changelog($config, $opwd, 1);
+
+ return ([cwd(), $package, $uversion, $config->watchfile]);
+ }
+ }
+
+ # when --watchfile is not used, scan watch files
+ push @ARGV, '.' if !@ARGV;
+ {
+ local $, = ',';
+ uscan_verbose "Scan watch files in @ARGV";
+ }
+
+ # Run find to find the directories. We will handle filenames with spaces
+ # correctly, which makes this code a little messier than it would be
+ # otherwise.
+ my @dirs;
+ open FIND, '-|', 'find', @ARGV,
+ qw{-follow -type d ( -name .git -prune -o -name debian -print ) }
+ or uscan_die "Couldn't exec find: $!";
+
+ while (<FIND>) {
+ chomp;
+ push @dirs, $_;
+ uscan_debug "Found $_";
+ }
+ close FIND;
+
+ uscan_die "No debian directories found" unless @dirs;
+
+ my @debdirs = ();
+
+ my $origdir = cwd;
+ for my $dir (@dirs) {
+ $dir =~ s%/debian$%%;
+
+ unless (chdir $origdir) {
+ uscan_warn "Couldn't chdir back to $origdir, skipping: $!";
+ next;
+ }
+ unless (chdir $dir) {
+ uscan_warn "Couldn't chdir $dir, skipping: $!";
+ next;
+ }
+
+ uscan_verbose "Check debian/watch and debian/changelog in $dir";
+
+ # Check for debian/watch file
+ if (-r 'debian/watch') {
+ unless (-r 'debian/changelog') {
+ uscan_warn
+ "Problems reading debian/changelog in $dir, skipping";
+ next;
+ }
+ my ($package, $debversion, $uversion)
+ = scan_changelog($config, $opwd);
+ next unless ($package);
+
+ uscan_verbose
+ "package=\"$package\" version=\"$uversion\" (no epoch/revision)";
+ push @debdirs, [$debversion, $dir, $package, $uversion];
+ }
+ }
+
+ uscan_warn "No watch file found" unless @debdirs;
+
+ # Was there a --upstream-version option?
+ if (defined $config->uversion) {
+ if (@debdirs == 1) {
+ $debdirs[0][3] = $config->uversion;
+ } else {
+ uscan_warn
+"ignoring --upstream-version as more than one debian/watch file found";
+ }
+ }
+
+ # Now sort the list of directories, so that we process the most recent
+ # directories first, as determined by the package version numbers
+ @debdirs = Devscripts::Versort::deb_versort(@debdirs);
+
+ # Now process the watch files in order. If a directory d has
+ # subdirectories d/sd1/debian and d/sd2/debian, which each contain watch
+ # files corresponding to the same package, then we only process the watch
+ # file in the package with the latest version number.
+ my %donepkgs;
+ my @results;
+ for my $debdir (@debdirs) {
+ shift @$debdir; # don't need the Debian version number any longer
+ my $dir = $$debdir[0];
+ my $parentdir = dirname($dir);
+ my $package = $$debdir[1];
+ my $version = $$debdir[2];
+
+ if (exists $donepkgs{$parentdir}{$package}) {
+ uscan_warn
+"Skipping $dir/debian/watch\n as this package has already been found";
+ next;
+ }
+
+ unless (chdir $origdir) {
+ uscan_warn "Couldn't chdir back to $origdir, skipping: $!";
+ next;
+ }
+ unless (chdir $dir) {
+ uscan_warn "Couldn't chdir $dir, skipping: $!";
+ next;
+ }
+
+ uscan_verbose
+"$dir/debian/changelog sets package=\"$package\" version=\"$version\"";
+ push @results, [$dir, $package, $version, "debian/watch", cwd];
+ }
+ unless (chdir $origdir) {
+ uscan_die "Couldn't chdir back to $origdir! $!";
+ }
+ return @results;
+}
+
+sub scan_changelog {
+ my ($config, $opwd, $die) = @_;
+ my $out
+ = $die
+ ? sub { uscan_die(@_) }
+ : sub { uscan_warn($_[0] . ', skipping'); return undef; };
+
+ # Figure out package info we need
+ my $changelog = eval { changelog_parse(); };
+ if ($@) {
+ return $out->("Problems parsing debian/changelog");
+ }
+
+ my ($package, $debversion, $uversion);
+ $package = $changelog->{Source};
+ return $out->("Problem determining the package name from debian/changelog")
+ unless defined $package;
+ $debversion = $changelog->{Version};
+ return $out->("Problem determining the version from debian/changelog")
+ unless defined $debversion;
+ uscan_verbose
+"package=\"$package\" version=\"$debversion\" (as seen in debian/changelog)";
+
+ # Check the directory is properly named for safety
+ if ($config->check_dirname_level == 2
+ or ($config->check_dirname_level == 1 and cwd() ne $opwd)) {
+ my $good_dirname;
+ my $re = $config->check_dirname_regex;
+ $re =~ s/PACKAGE/\Q$package\E/g;
+ if ($re =~ m%/%) {
+ $good_dirname = (cwd() =~ m%^$re$%);
+ } else {
+ $good_dirname = (basename(cwd()) =~ m%^$re$%);
+ }
+ return $out->("The directory name "
+ . basename(cwd())
+ . " doesn't match the requirement of\n"
+ . " --check-dirname-level=$config->{check_dirname_level} --check-dirname-regex=$re .\n"
+ . " Set --check-dirname-level=0 to disable this sanity check feature."
+ ) unless $good_dirname;
+ }
+
+ # Get current upstream version number
+ if (defined $config->uversion) {
+ $uversion = $config->uversion;
+ } else {
+ $uversion = $debversion;
+ $uversion =~ s/-[^-]+$//; # revision
+ $uversion =~ s/^\d+://; # epoch
+ }
+ return ($package, $debversion, $uversion);
+}
+1;
diff --git a/lib/Devscripts/Uscan/Keyring.pm b/lib/Devscripts/Uscan/Keyring.pm
new file mode 100644
index 0000000..0af6088
--- /dev/null
+++ b/lib/Devscripts/Uscan/Keyring.pm
@@ -0,0 +1,168 @@
+package Devscripts::Uscan::Keyring;
+
+use strict;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Dpkg::IPC;
+use File::Copy qw/copy move/;
+use File::Which;
+use File::Path qw/make_path remove_tree/;
+use File::Temp qw/tempfile tempdir/;
+use List::Util qw/first/;
+
+sub new {
+ my ($class) = @_;
+ my $keyring;
+ my $havegpgv = first { which $_ } qw(gpgv2 gpgv);
+ my $havegpg = first { which $_ } qw(gpg2 gpg);
+ uscan_die("Please install gpgv or gpgv2.") unless defined $havegpgv;
+ uscan_die("Please install gnupg or gnupg2.") unless defined $havegpg;
+
+ # upstream/signing-key.pgp and upstream-signing-key.pgp are deprecated
+ # but supported
+ if (-r "debian/upstream/signing-key.asc") {
+ $keyring = "debian/upstream/signing-key.asc";
+ } else {
+ my $binkeyring = first { -r $_ } qw(
+ debian/upstream/signing-key.pgp
+ debian/upstream-signing-key.pgp
+ );
+ if (defined $binkeyring) {
+ make_path('debian/upstream', { mode => 0700, verbose => 'true' });
+
+ # convert to the policy complying armored key
+ uscan_verbose(
+ "Found upstream binary signing keyring: $binkeyring");
+
+ # Need to convert to an armored key
+ $keyring = "debian/upstream/signing-key.asc";
+ uscan_warn "Found deprecated binary keyring ($binkeyring). "
+ . "Please save it in armored format in $keyring. For example:\n"
+ . " gpg --output $keyring --enarmor $binkeyring";
+ spawn(
+ exec => [
+ $havegpg,
+ '--homedir' => "/dev/null",
+ '--no-options', '-q', '--batch', '--no-default-keyring',
+ '--output' => $keyring,
+ '--enarmor', $binkeyring
+ ],
+ wait_child => 1
+ );
+ uscan_warn("Generated upstream signing keyring: $keyring");
+ move $binkeyring, "$binkeyring.backup";
+ uscan_verbose(
+ "Renamed upstream binary signing keyring: $binkeyring.backup");
+ }
+ }
+
+ # Need to convert an armored key to binary for use by gpgv
+ my $gpghome;
+ if (defined $keyring) {
+ uscan_verbose("Found upstream signing keyring: $keyring");
+ if ($keyring =~ m/\.asc$/) { # always true
+ $gpghome = tempdir(CLEANUP => 1);
+ my $newkeyring = "$gpghome/trustedkeys.gpg";
+ spawn(
+ exec => [
+ $havegpg,
+ '--homedir' => $gpghome,
+ '--no-options', '-q', '--batch', '--no-default-keyring',
+ '--output' => $newkeyring,
+ '--dearmor', $keyring
+ ],
+ wait_child => 1
+ );
+ $keyring = $newkeyring;
+ }
+ }
+
+ # Return undef if not key found
+ else {
+ return undef;
+ }
+ my $self = bless {
+ keyring => $keyring,
+ gpghome => $gpghome,
+ gpgv => $havegpgv,
+ gpg => $havegpg,
+ }, $class;
+ return $self;
+}
+
+sub verify {
+ my ($self, $sigfile, $newfile) = @_;
+ uscan_verbose(
+ "Verifying OpenPGP self signature of $newfile and extract $sigfile");
+ unless (
+ uscan_exec_no_fail(
+ $self->{gpgv},
+ '--homedir' => $self->{gpghome},
+ '--keyring' => $self->{keyring},
+ '-o' => "$sigfile",
+ "$newfile"
+ ) >> 8 == 0
+ ) {
+ uscan_die("OpenPGP signature did not verify.");
+ }
+}
+
+sub verifyv {
+ my ($self, $sigfile, $base) = @_;
+ uscan_verbose("Verifying OpenPGP signature $sigfile for $base");
+ unless (
+ uscan_exec_no_fail(
+ $self->{gpgv},
+ '--homedir' => '/dev/null',
+ '--keyring' => $self->{keyring},
+ $sigfile, $base
+ ) >> 8 == 0
+ ) {
+ uscan_die("OpenPGP signature did not verify.");
+ }
+}
+
+sub verify_git {
+ my ($self, $gitdir, $tag, $git_upstream) = @_;
+ my $commit;
+ my @dir = $git_upstream ? () : ('--git-dir', $gitdir);
+ spawn(
+ exec => ['git', @dir, 'show-ref', $tag],
+ to_string => \$commit
+ );
+ uscan_die "git tag not found" unless ($commit);
+ $commit =~ s/\s.*$//;
+ chomp $commit;
+ my $file;
+ spawn(
+ exec => ['git', @dir, 'cat-file', '-p', $commit],
+ to_string => \$file
+ );
+ my $dir;
+ spawn(exec => ['mktemp', '-d'], to_string => \$dir);
+ chomp $dir;
+
+ unless ($file =~ /^(.*?\n)(\-+\s*BEGIN PGP SIGNATURE\s*\-+.*)$/s) {
+ uscan_die "Tag $tag is not signed";
+ }
+ open F, ">$dir/txt" or die $!;
+ open S, ">$dir/sig" or die $!;
+ print F $1;
+ print S $2;
+ close F;
+ close S;
+
+ unless (
+ uscan_exec_no_fail(
+ $self->{gpgv},
+ '--homedir' => $self->{gpghome},
+ '--keyring' => $self->{keyring},
+ "$dir/sig", "$dir/txt"
+ ) >> 8 == 0
+ ) {
+ uscan_die("OpenPGP signature did not verify.");
+ }
+ remove_tree($dir);
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/Output.pm b/lib/Devscripts/Uscan/Output.pm
new file mode 100644
index 0000000..77126cf
--- /dev/null
+++ b/lib/Devscripts/Uscan/Output.pm
@@ -0,0 +1,129 @@
+package Devscripts::Uscan::Output;
+
+use strict;
+use Devscripts::Output;
+use Exporter 'import';
+use File::Basename;
+
+our @EXPORT = (
+ @Devscripts::Output::EXPORT, qw(
+ uscan_msg uscan_verbose dehs_verbose uscan_warn uscan_debug uscan_msg_raw
+ uscan_extra_debug uscan_die dehs_output $dehs $verbose $dehs_tags
+ $dehs_start_output $dehs_end_output $found
+ ));
+
+# ACCESSORS
+our ($dehs, $dehs_tags, $dehs_start_output, $dehs_end_output, $found)
+ = (0, {}, 0, 0);
+
+our $progname = basename($0);
+
+sub printwarn_raw {
+ my ($msg, $w) = @_;
+ if ($w or $dehs) {
+ print STDERR "$msg";
+ } else {
+ print "$msg";
+ }
+}
+
+sub printwarn {
+ my ($msg, $w) = @_;
+ chomp $msg;
+ printwarn_raw("$msg\n", $w);
+}
+
+sub uscan_msg_raw {
+ printwarn_raw($_[0]);
+}
+
+sub uscan_msg {
+ printwarn($_[0]);
+}
+
+sub uscan_verbose {
+ ds_verbose($_[0], $dehs);
+}
+
+sub uscan_debug {
+ ds_debug($_[0], $dehs);
+}
+
+sub uscan_extra_debug {
+ ds_extra_debug($_[0], $dehs);
+}
+
+sub dehs_verbose ($) {
+ my $msg = $_[0];
+ push @{ $dehs_tags->{'messages'} }, "$msg\n";
+ uscan_verbose($msg);
+}
+
+sub uscan_warn ($) {
+ my $msg = $_[0];
+ push @{ $dehs_tags->{'warnings'} }, $msg if $dehs;
+ printwarn("$progname warn: $msg" . &Devscripts::Output::who_called, 1);
+}
+
+sub uscan_die ($) {
+ my $msg = $_[0];
+ if ($dehs) {
+ $dehs_tags = { 'errors' => "$msg" };
+ $dehs_end_output = 1;
+ dehs_output();
+ }
+ $msg = "$progname die: $msg" . &Devscripts::Output::who_called;
+ if ($Devscripts::Output::die_on_error) {
+ die $msg;
+ }
+ printwarn($msg, 1);
+}
+
+sub dehs_output () {
+ return unless $dehs;
+
+ if (!$dehs_start_output) {
+ print "<dehs>\n";
+ $dehs_start_output = 1;
+ }
+
+ for my $tag (
+ qw(package debian-uversion debian-mangled-uversion
+ upstream-version upstream-url decoded-checksum
+ status target target-path messages warnings errors)
+ ) {
+ if (exists $dehs_tags->{$tag}) {
+ if (ref $dehs_tags->{$tag} eq "ARRAY") {
+ foreach my $entry (@{ $dehs_tags->{$tag} }) {
+ $entry =~ s/</&lt;/g;
+ $entry =~ s/>/&gt;/g;
+ $entry =~ s/&/&amp;/g;
+ print "<$tag>$entry</$tag>\n";
+ }
+ } else {
+ $dehs_tags->{$tag} =~ s/</&lt;/g;
+ $dehs_tags->{$tag} =~ s/>/&gt;/g;
+ $dehs_tags->{$tag} =~ s/&/&amp;/g;
+ print "<$tag>$dehs_tags->{$tag}</$tag>\n";
+ }
+ }
+ }
+ foreach my $cmp (@{ $dehs_tags->{'component-name'} }) {
+ print qq'<component id="$cmp">\n';
+ foreach my $tag (
+ qw(debian-uversion debian-mangled-uversion
+ upstream-version upstream-url target target-path)
+ ) {
+ my $v = shift @{ $dehs_tags->{"component-$tag"} };
+ print " <component-$tag>$v</component-$tag>\n" if $v;
+ }
+ print "</component>\n";
+ }
+ if ($dehs_end_output) {
+ print "</dehs>\n";
+ }
+
+ # Don't repeat output
+ $dehs_tags = {};
+}
+1;
diff --git a/lib/Devscripts/Uscan/Utils.pm b/lib/Devscripts/Uscan/Utils.pm
new file mode 100644
index 0000000..e93f240
--- /dev/null
+++ b/lib/Devscripts/Uscan/Utils.pm
@@ -0,0 +1,475 @@
+package Devscripts::Uscan::Utils;
+
+use strict;
+use Devscripts::Uscan::Output;
+use Devscripts::Utils;
+use Exporter 'import';
+
+our @EXPORT = (
+ qw(fix_href recursive_regex_dir newest_dir get_compression
+ get_suffix get_priority quoted_regex_parse safe_replace mangle
+ uscan_exec uscan_exec_no_fail)
+);
+
+#######################################################################
+# {{{ code 5: utility functions (download)
+#######################################################################
+sub fix_href ($) {
+ my ($href) = @_;
+
+ # Remove newline (code moved from outside fix_href)
+ $href =~ s/\n//g;
+
+ # Remove whitespace from URLs:
+ # https://www.w3.org/TR/html5/links.html#links-created-by-a-and-area-elements
+ $href =~ s/^\s+//;
+ $href =~ s/\s+$//;
+
+ return $href;
+}
+
+sub recursive_regex_dir ($$$$$$) {
+
+ # If return '', parent code to cause return 1
+ my ($line, $base, $dirversionmangle, $watchfile, $lineptr,
+ $download_version)
+ = @_;
+
+ $base =~ m%^(\w+://[^/]+)/(.*)$%;
+ my $site = $1;
+ my @dirs = ();
+ if (defined $2) {
+ @dirs = split /(\/)/, $2;
+ }
+ my $dir = '/';
+
+ foreach my $dirpattern (@dirs) {
+ if ($dirpattern =~ /\(.*\)/) {
+ uscan_verbose "dir=>$dir dirpattern=>$dirpattern";
+ my $newest_dir = newest_dir($line, $site, $dir, $dirpattern,
+ $dirversionmangle, $watchfile, $lineptr, $download_version);
+ uscan_verbose "newest_dir => '$newest_dir'";
+ if ($newest_dir ne '') {
+ $dir .= "$newest_dir";
+ } else {
+ uscan_debug "No \$newest_dir";
+ return '';
+ }
+ } else {
+ $dir .= "$dirpattern";
+ }
+ }
+ return $site . $dir;
+}
+
+# very similar to code above
+sub newest_dir ($$$$$$$$) {
+
+ # return string $newdir as success
+ # return string '' if error, to cause grand parent code to return 1
+ my ($line, $site, $dir, $pattern, $dirversionmangle, $watchfile,
+ $lineptr, $download_version)
+ = @_;
+ my ($newdir);
+ uscan_verbose "Requesting URL:\n $site$dir";
+ if ($site =~ m%^http(s)?://%) {
+ require Devscripts::Uscan::http;
+ $newdir = Devscripts::Uscan::http::http_newdir($1, @_);
+ } elsif ($site =~ m%^ftp://%) {
+ require Devscripts::Uscan::ftp;
+ $newdir = Devscripts::Uscan::ftp::ftp_newdir(@_);
+ } else {
+ # Neither HTTP nor FTP site
+ uscan_warn "neither HTTP nor FTP site, impossible case for newdir().";
+ $newdir = '';
+ }
+ return $newdir;
+}
+#######################################################################
+# }}} code 5: utility functions (download)
+#######################################################################
+
+#######################################################################
+# {{{ code 6: utility functions (compression)
+#######################################################################
+# Get legal values for compression
+sub get_compression ($) {
+ my $compression = $_[0];
+ my $canonical_compression;
+
+ # be liberal in what you accept...
+ my %opt2comp = (
+ gz => 'gzip',
+ gzip => 'gzip',
+ bz2 => 'bzip2',
+ bzip2 => 'bzip2',
+ lzma => 'lzma',
+ xz => 'xz',
+ zip => 'zip',
+ zst => 'zst',
+ zstd => 'zst',
+ );
+
+ # Normalize compression methods to the names used by Dpkg::Compression
+ if (exists $opt2comp{$compression}) {
+ $canonical_compression = $opt2comp{$compression};
+ } else {
+ uscan_die "$progname: invalid compression, $compression given.";
+ }
+ return $canonical_compression;
+}
+
+# Get legal values for compression suffix
+sub get_suffix ($) {
+ my $compression = $_[0];
+ my $canonical_suffix;
+
+ # be liberal in what you accept...
+ my %opt2suffix = (
+ gz => 'gz',
+ gzip => 'gz',
+ bz2 => 'bz2',
+ bzip2 => 'bz2',
+ lzma => 'lzma',
+ xz => 'xz',
+ zip => 'zip',
+ zst => 'zst',
+ zstd => 'zst',
+ );
+
+ # Normalize compression methods to the names used by Dpkg::Compression
+ if (exists $opt2suffix{$compression}) {
+ $canonical_suffix = $opt2suffix{$compression};
+ } elsif ($compression eq 'default') {
+ require Devscripts::MkOrigtargz::Config;
+ return &Devscripts::MkOrigtargz::Config::default_compression;
+ } else {
+ uscan_die "$progname: invalid suffix, $compression given.";
+ }
+ return $canonical_suffix;
+}
+
+# Get compression priority
+sub get_priority ($) {
+ my $href = $_[0];
+ my $priority = 0;
+ if ($href =~ m/\.tar\.gz/i) {
+ $priority = 1;
+ }
+ if ($href =~ m/\.tar\.bz2/i) {
+ $priority = 2;
+ }
+ if ($href =~ m/\.tar\.lzma/i) {
+ $priority = 3;
+ }
+ #if ($href =~ m/\.tar\.zstd?/i) {
+ # $priority = 4;
+ #}
+ if ($href =~ m/\.tar\.xz/i) {
+ $priority = 4;
+ }
+ return $priority;
+}
+#######################################################################
+# }}} code 6: utility functions (compression)
+#######################################################################
+
+#######################################################################
+# {{{ code 7: utility functions (regex)
+#######################################################################
+sub quoted_regex_parse($) {
+ my $pattern = shift;
+ my %closers = ('{', '}', '[', ']', '(', ')', '<', '>');
+
+ $pattern =~ /^(s|tr|y)(.)(.*)$/;
+ my ($sep, $rest) = ($2, $3 || '');
+ my $closer = $closers{$sep};
+
+ my $parsed_ok = 1;
+ my $regexp = '';
+ my $replacement = '';
+ my $flags = '';
+ my $open = 1;
+ my $last_was_escape = 0;
+ my $in_replacement = 0;
+
+ for my $char (split //, $rest) {
+ if ($char eq $sep and !$last_was_escape) {
+ $open++;
+ if ($open == 1) {
+ if ($in_replacement) {
+
+ # Separator after end of replacement
+ uscan_warn "Extra \"$sep\" after end of replacement.";
+ $parsed_ok = 0;
+ last;
+ } else {
+ $in_replacement = 1;
+ }
+ } else {
+ if ($open > 1) {
+ if ($in_replacement) {
+ $replacement .= $char;
+ } else {
+ $regexp .= $char;
+ }
+ }
+ }
+ } elsif ($char eq $closer and !$last_was_escape) {
+ $open--;
+ if ($open > 0) {
+ if ($in_replacement) {
+ $replacement .= $char;
+ } else {
+ $regexp .= $char;
+ }
+ } elsif ($open < 0) {
+ uscan_warn "Extra \"$closer\" after end of replacement.";
+ $parsed_ok = 0;
+ last;
+ }
+ } else {
+ if ($in_replacement) {
+ if ($open) {
+ $replacement .= $char;
+ } else {
+ $flags .= $char;
+ }
+ } else {
+ if ($open) {
+ $regexp .= $char;
+ } elsif ($char !~ m/\s/) {
+ uscan_warn
+ "Non-whitespace between <...> and <...> (or similars).";
+ $parsed_ok = 0;
+ last;
+ }
+
+ # skip if blanks between <...> and <...> (or similars)
+ }
+ }
+
+ # Don't treat \\ as an escape
+ $last_was_escape = ($char eq '\\' and !$last_was_escape);
+ }
+
+ unless ($in_replacement and $open == 0) {
+ uscan_warn "Empty replacement string.";
+ $parsed_ok = 0;
+ }
+
+ return ($parsed_ok, $regexp, $replacement, $flags);
+}
+
+sub safe_replace($$) {
+ my ($in, $pat) = @_;
+ eval "uscan_debug \"safe_replace input=\\\"\$\$in\\\"\\n\"";
+ $pat =~ s/^\s*(.*?)\s*$/$1/;
+
+ $pat =~ /^(s|tr|y)(.)/;
+ my ($op, $sep) = ($1, $2 || '');
+ my $esc = "\Q$sep\E";
+ my ($parsed_ok, $regexp, $replacement, $flags);
+
+ if ($sep eq '{' or $sep eq '(' or $sep eq '[' or $sep eq '<') {
+ ($parsed_ok, $regexp, $replacement, $flags) = quoted_regex_parse($pat);
+
+ unless ($parsed_ok) {
+ uscan_warn "stop mangling: rule=\"$pat\"\n"
+ . " mangling rule with <...>, (...), {...} failed.";
+ return 0;
+ }
+ } elsif ($pat
+ !~ /^(?:s|tr|y)$esc((?:\\.|[^\\$esc])*)$esc((?:\\.|[^\\$esc])*)$esc([a-z]*)$/
+ ) {
+ $sep = "/" if $sep eq '';
+ uscan_warn "stop mangling: rule=\"$pat\"\n"
+ . " rule doesn't match \"(s|tr|y)$sep.*$sep.*$sep\[a-z\]*\" (or similar).";
+ return 0;
+ } else {
+ ($regexp, $replacement, $flags) = ($1, $2, $3);
+ }
+
+ uscan_debug
+"safe_replace with regexp=\"$regexp\", replacement=\"$replacement\", and flags=\"$flags\"";
+ my $safeflags = $flags;
+ if ($op eq 'tr' or $op eq 'y') {
+ $safeflags =~ tr/cds//cd;
+ if ($safeflags ne $flags) {
+ uscan_warn "stop mangling: rule=\"$pat\"\n"
+ . " flags must consist of \"cds\" only.";
+ return 0;
+ }
+
+ $regexp =~ s/\\(.)/$1/g;
+ $replacement =~ s/\\(.)/$1/g;
+
+ $regexp =~ s/([^-])/'\\x' . unpack 'H*', $1/ge;
+ $replacement =~ s/([^-])/'\\x' . unpack 'H*', $1/ge;
+
+ eval "\$\$in =~ tr<$regexp><$replacement>$flags;";
+
+ if ($@) {
+ uscan_warn "stop mangling: rule=\"$pat\"\n"
+ . " mangling \"tr\" or \"y\" rule execution failed.";
+ return 0;
+ } else {
+ return 1;
+ }
+ } else {
+ $safeflags =~ tr/gix//cd;
+ if ($safeflags ne $flags) {
+ uscan_warn "stop mangling: rule=\"$pat\"\n"
+ . " flags must consist of \"gix\" only.";
+ return 0;
+ }
+
+ my $global = ($flags =~ s/g//);
+ $flags = "(?$flags)" if length $flags;
+
+ my $slashg;
+ if ($regexp =~ /(?<!\\)(\\\\)*\\G/) {
+ $slashg = 1;
+
+ # if it's not initial, it is too dangerous
+ if ($regexp =~ /^.*[^\\](\\\\)*\\G/) {
+ uscan_warn "stop mangling: rule=\"$pat\"\n"
+ . " dangerous use of \\G with regexp=\"$regexp\".";
+ return 0;
+ }
+ }
+
+ # Behave like Perl and treat e.g. "\." in replacement as "."
+ # We allow the case escape characters to remain and
+ # process them later
+ $replacement =~ s/(^|[^\\])\\([^luLUE])/$1$2/g;
+
+ # Unescape escaped separator characters
+ $replacement =~ s/\\\Q$sep\E/$sep/g;
+
+ # If bracketing quotes were used, also unescape the
+ # closing version
+ ### {{ ### (FOOL EDITOR for non-quoted kets)
+ $replacement =~ s/\\\Q}\E/}/g if $sep eq '{';
+ $replacement =~ s/\\\Q]\E/]/g if $sep eq '[';
+ $replacement =~ s/\\\Q)\E/)/g if $sep eq '(';
+ $replacement =~ s/\\\Q>\E/>/g if $sep eq '<';
+
+ # The replacement below will modify $replacement so keep
+ # a copy. We'll need to restore it to the current value if
+ # the global flag was set on the input pattern.
+ my $orig_replacement = $replacement;
+
+ my ($first, $last, $pos, $zerowidth, $matched, @captures) = (0, -1, 0);
+ while (1) {
+ eval {
+ # handle errors due to unsafe constructs in $regexp
+ no re 'eval';
+
+ # restore position
+ pos($$in) = $pos if $pos;
+
+ if ($zerowidth) {
+
+ # previous match was a zero-width match, simulate it to set
+ # the internal flag that avoids the infinite loop
+ $$in =~ /()/g;
+ }
+
+ # Need to use /g to make it use and save pos()
+ $matched = ($$in =~ /$flags$regexp/g);
+
+ if ($matched) {
+
+ # save position and size of the match
+ my $oldpos = $pos;
+ $pos = pos($$in);
+ ($first, $last) = ($-[0], $+[0]);
+
+ if ($slashg) {
+
+ # \G in the match, weird things can happen
+ $zerowidth = ($pos == $oldpos);
+
+ # For example, matching without a match
+ $matched = 0
+ if ( not defined $first
+ or not defined $last);
+ } else {
+ $zerowidth = ($last - $first == 0);
+ }
+ for my $i (0 .. $#-) {
+ $captures[$i] = substr $$in, $-[$i], $+[$i] - $-[$i];
+ }
+ }
+ };
+ if ($@) {
+ uscan_warn "stop mangling: rule=\"$pat\"\n"
+ . " mangling \"s\" rule execution failed.";
+ return 0;
+ }
+
+ # No match; leave the original string untouched but return
+ # success as there was nothing wrong with the pattern
+ return 1 unless $matched;
+
+ # Replace $X
+ $replacement
+ =~ s/[\$\\](\d)/defined $captures[$1] ? $captures[$1] : ''/ge;
+ $replacement
+ =~ s/\$\{(\d)\}/defined $captures[$1] ? $captures[$1] : ''/ge;
+ $replacement =~ s/\$&/$captures[0]/g;
+
+ # Make \l etc escapes work
+ $replacement =~ s/\\l(.)/lc $1/e;
+ $replacement =~ s/\\L(.*?)(\\E|\z)/lc $1/e;
+ $replacement =~ s/\\u(.)/uc $1/e;
+ $replacement =~ s/\\U(.*?)(\\E|\z)/uc $1/e;
+
+ # Actually do the replacement
+ substr $$in, $first, $last - $first, $replacement;
+
+ # Update position
+ $pos += length($replacement) - ($last - $first);
+
+ if ($global) {
+ $replacement = $orig_replacement;
+ } else {
+ last;
+ }
+ }
+
+ return 1;
+ }
+}
+
+# call this as
+# if mangle($watchfile, \$line, 'uversionmangle:',
+# \@{$options{'uversionmangle'}}, \$version) {
+# return 1;
+# }
+sub mangle($$$$$) {
+ my ($watchfile, $lineptr, $name, $rulesptr, $verptr) = @_;
+ foreach my $pat (@{$rulesptr}) {
+ if (!safe_replace($verptr, $pat)) {
+ uscan_warn "In $watchfile, potentially"
+ . " unsafe or malformed $name"
+ . " pattern:\n '$pat'"
+ . " found. Skipping watchline\n"
+ . " $$lineptr";
+ return 1;
+ }
+ uscan_debug "After $name $$verptr";
+ }
+ return 0;
+}
+
+*uscan_exec_no_fail = \&ds_exec_no_fail;
+
+*uscan_exec = \&ds_exec;
+
+#######################################################################
+# }}} code 7: utility functions (regex)
+#######################################################################
+
+1;
diff --git a/lib/Devscripts/Uscan/WatchFile.pm b/lib/Devscripts/Uscan/WatchFile.pm
new file mode 100644
index 0000000..06ab61d
--- /dev/null
+++ b/lib/Devscripts/Uscan/WatchFile.pm
@@ -0,0 +1,517 @@
+
+=head1 NAME
+
+Devscripts::Uscan::WatchFile - watchfile object for L<uscan>
+
+=head1 SYNOPSIS
+
+ use Devscripts::Uscan::Config;
+ use Devscripts::Uscan::WatchFile;
+
+ my $config = Devscripts::Uscan::Config->new({
+ # Uscan config parameters. Example:
+ destdir => '..',
+ });
+
+ # You can use Devscripts::Uscan::FindFiles to find watchfiles
+
+ my $wf = Devscripts::Uscan::WatchFile->new({
+ config => $config,
+ package => $package,
+ pkg_dir => $pkg_dir,
+ pkg_version => $version,
+ watchfile => $watchfile,
+ });
+ return $wf->status if ( $wf->status );
+
+ # Do the job
+ return $wf->process_lines;
+
+=head1 DESCRIPTION
+
+Uscan class to parse watchfiles.
+
+=head1 METHODS
+
+=head2 new() I<(Constructor)>
+
+Parse watch file and creates L<Devscripts::Uscan::WatchLine> objects for
+each line.
+
+=head3 Required parameters
+
+=over
+
+=item config: L<Devscripts::Uscan::Config> object
+
+=item package: Debian package name
+
+=item pkg_dir: Working directory
+
+=item pkg_version: Current Debian package version
+
+=back
+
+=head2 Main accessors
+
+=over
+
+=item watchlines: ref to the array that contains watchlines objects
+
+=item watch_version: format version of the watchfile
+
+=back
+
+=head2 process_lines()
+
+Method that launches Devscripts::Uscan::WatchLine::process() on each watchline.
+
+=head1 SEE ALSO
+
+L<uscan>, L<Devscripts::Uscan::WatchLine>, L<Devscripts::Uscan::Config>,
+L<Devscripts::Uscan::FindFiles>
+
+=head1 AUTHOR
+
+B<uscan> was originally written by Christoph Lameter
+E<lt>clameter@debian.orgE<gt> (I believe), modified by Julian Gilbey
+E<lt>jdg@debian.orgE<gt>. HTTP support was added by Piotr Roszatycki
+E<lt>dexter@debian.orgE<gt>. B<uscan> was rewritten in Perl by Julian Gilbey.
+Xavier Guimard E<lt>yadd@debian.orgE<gt> rewrote uscan in object
+oriented Perl.
+
+=head1 COPYRIGHT AND LICENSE
+
+Copyright 2002-2006 by Julian Gilbey <jdg@debian.org>,
+2018 by Xavier Guimard <yadd@debian.org>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+
+=cut
+
+package Devscripts::Uscan::WatchFile;
+
+use strict;
+use Devscripts::Uscan::Downloader;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::WatchLine;
+use Dpkg::Version;
+use File::Copy qw/copy move/;
+use List::Util qw/first/;
+use Moo;
+
+use constant {
+ ANY_VERSION => '(?:[-_]?v?(\d[\-+\.:\~\da-zA-Z]*))',
+ ARCHIVE_EXT =>
+ '(?i)(?:\.(?:tar\.xz|tar\.bz2|tar\.gz|tar\.zstd?|zip|tgz|tbz|txz))',
+ DEB_EXT => '(?:[\+~](debian|dfsg|ds|deb)(\.)?(\d+)?$)',
+};
+use constant SIGNATURE_EXT => ARCHIVE_EXT . '(?:\.(?:asc|pgp|gpg|sig|sign))';
+
+# Required new() parameters
+has config => (is => 'rw', required => 1);
+has package => (is => 'ro', required => 1); # Debian package
+has pkg_dir => (is => 'ro', required => 1);
+has pkg_version => (is => 'ro', required => 1);
+has bare => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->bare });
+has download => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->download });
+has downloader => (
+ is => 'ro',
+ lazy => 1,
+ default => sub {
+ Devscripts::Uscan::Downloader->new({
+ timeout => $_[0]->config->timeout,
+ agent => $_[0]->config->user_agent,
+ pasv => $_[0]->config->pasv,
+ destdir => $_[0]->config->destdir,
+ headers => $_[0]->config->http_header,
+ });
+ },
+);
+has signature => (
+ is => 'rw',
+ required => 1,
+ lazy => 1,
+ default => sub { $_[0]->config->signature });
+has watchfile => (is => 'ro', required => 1); # usually debian/watch
+
+# Internal attributes
+has group => (is => 'rw', default => sub { [] });
+has origcount => (is => 'rw');
+has origtars => (is => 'rw', default => sub { [] });
+has status => (is => 'rw', default => sub { 0 });
+has watch_version => (is => 'rw');
+has watchlines => (is => 'rw', default => sub { [] });
+
+# Values shared between lines
+has shared => (
+ is => 'rw',
+ lazy => 1,
+ default => \&new_shared,
+);
+
+sub new_shared {
+ return {
+ bare => $_[0]->bare,
+ components => [],
+ common_newversion => undef,
+ common_mangled_newversion => undef,
+ download => $_[0]->download,
+ download_version => undef,
+ origcount => undef,
+ origtars => [],
+ previous_download_available => undef,
+ previous_newversion => undef,
+ previous_newfile_base => undef,
+ previous_sigfile_base => undef,
+ signature => $_[0]->signature,
+ uscanlog => undef,
+ };
+}
+has keyring => (
+ is => 'ro',
+ default => sub { Devscripts::Uscan::Keyring->new });
+
+sub BUILD {
+ my ($self, $args) = @_;
+ my $watch_version = 0;
+ my $nextline;
+ $dehs_tags = {};
+
+ uscan_verbose "Process watch file at: $args->{watchfile}\n"
+ . " package = $args->{package}\n"
+ . " version = $args->{pkg_version}\n"
+ . " pkg_dir = $args->{pkg_dir}";
+
+ $self->origcount(0); # reset to 0 for each watch file
+ unless (open WATCH, $args->{watchfile}) {
+ uscan_warn "could not open $args->{watchfile}: $!";
+ return 1;
+ }
+
+ my $lineNumber = 0;
+ while (<WATCH>) {
+ next if /^\s*\#/;
+ next if /^\s*$/;
+ s/^\s*//;
+
+ CHOMP:
+
+ # Reassemble lines split using \
+ chomp;
+ if (s/(?<!\\)\\$//) {
+ if (eof(WATCH)) {
+ uscan_warn
+ "$args->{watchfile} ended with \\; skipping last line";
+ $self->status(1);
+ last;
+ }
+ if ($watch_version > 3) {
+
+ # drop leading \s only if version 4
+ $nextline = <WATCH>;
+ $nextline =~ s/^\s*//;
+ $_ .= $nextline;
+ } else {
+ $_ .= <WATCH>;
+ }
+ goto CHOMP;
+ }
+
+ # "version" must be the first field
+ if (!$watch_version) {
+
+ # Looking for "version" field.
+ if (/^version\s*=\s*(\d+)(\s|$)/) { # Found
+ $watch_version = $1;
+
+ # Note that version=1 watchfiles have no "version" field so
+ # authorizated values are >= 2 and <= CURRENT_WATCHFILE_VERSION
+ if ( $watch_version < 2
+ or $watch_version
+ > $Devscripts::Uscan::Config::CURRENT_WATCHFILE_VERSION) {
+ # "version" field found but has no authorizated value
+ uscan_warn
+"$args->{watchfile} version number is unrecognised; skipping watch file";
+ last;
+ }
+
+ # Next line
+ next;
+ }
+
+ # version=1 is deprecated
+ else {
+ $watch_version = 1;
+ }
+ }
+ if ($watch_version < 3) {
+ uscan_warn
+"$args->{watchfile} is an obsolete version $watch_version watch file;\n"
+ . " please upgrade to a higher version\n"
+ . " (see uscan(1) for details).";
+ }
+
+ # "version" is fixed, parsing lines now
+
+ # Are there any warnings from this part to give if we're using dehs?
+ dehs_output if ($dehs);
+
+ # Handle shell \\ -> \
+ s/\\\\/\\/g if $watch_version == 1;
+
+ # Handle @PACKAGE@ @ANY_VERSION@ @ARCHIVE_EXT@ substitutions
+ s/\@PACKAGE\@/$args->{package}/g;
+ s/\@ANY_VERSION\@/ANY_VERSION/ge;
+ s/\@ARCHIVE_EXT\@/ARCHIVE_EXT/ge;
+ s/\@SIGNATURE_EXT\@/SIGNATURE_EXT/ge;
+ s/\@DEB_EXT\@/DEB_EXT/ge;
+
+ my $line = Devscripts::Uscan::WatchLine->new({
+ # Shared between lines
+ config => $self->config,
+ downloader => $self->downloader,
+ shared => $self->shared,
+ keyring => $self->keyring,
+
+ # Other parameters
+ line => $_,
+ pkg => $self->package,
+ pkg_dir => $self->pkg_dir,
+ pkg_version => $self->pkg_version,
+ watch_version => $watch_version,
+ watchfile => $self->watchfile,
+ });
+ push @{ $self->group }, $lineNumber
+ if ($line->type and $line->type =~ /^(?:group|checksum)$/);
+ push @{ $self->watchlines }, $line;
+ $lineNumber++;
+ }
+
+ close WATCH
+ or $self->status(1),
+ uscan_warn "problems reading $$args->{watchfile}: $!";
+ $self->watch_version($watch_version);
+}
+
+sub process_lines {
+ my ($self) = shift;
+ return $self->process_group if (@{ $self->group });
+ foreach (@{ $self->watchlines }) {
+
+ # search newfile and newversion
+ my $res = $_->process;
+ $self->status($res) if ($res);
+ }
+ return $self->{status};
+}
+
+sub process_group {
+ my ($self) = @_;
+ my $saveDconfig = $self->config->download_version;
+ # Build version
+ my @cur_versions = split /\+~/, $self->pkg_version;
+ my $checksum = 0;
+ my $newChecksum = 0;
+ if ( $cur_versions[$#cur_versions]
+ and $cur_versions[$#cur_versions] =~ s/^cs//) {
+ $checksum = pop @cur_versions;
+ }
+ my (@new_versions, @last_debian_mangled_uversions, @last_versions);
+ my $download = 0;
+ my $last_shared = $self->shared;
+ my $last_comp_version;
+ my @dversion;
+ my @ck_versions;
+ # Isolate component and following lines
+ if (my $v = $self->config->download_version) {
+ @dversion = map { s/\+.*$//; /^cs/ ? () : $_ } split /\+~/, $v;
+ }
+ foreach my $line (@{ $self->watchlines }) {
+ if ( $line->type and $line->type eq 'group'
+ or $line->type eq 'checksum') {
+ $last_shared = $self->new_shared;
+ $last_comp_version = shift @cur_versions if $line->type eq 'group';
+ }
+ if ($line->type and $line->type eq 'group') {
+ $line->{groupDversion} = shift @dversion;
+ }
+ $line->shared($last_shared);
+ $line->pkg_version($last_comp_version || 0);
+ }
+ # Check if download is needed
+ foreach my $line (@{ $self->watchlines }) {
+ next unless ($line->type eq 'group' or $line->type eq 'checksum');
+ # Stop on error
+ $self->config->download_version($line->{groupDversion})
+ if $line->{groupDversion};
+ $self->config->download_version(undef) if $line->type eq 'checksum';
+ if ( $line->parse
+ or $line->search
+ or $line->get_upstream_url
+ or $line->get_newfile_base
+ or ($line->type eq 'group' and $line->cmp_versions)
+ or ($line->ctype and $line->cmp_versions)) {
+ $self->{status} += $line->status;
+ return $self->{status};
+ }
+ $download = $line->shared->{download}
+ if $line->shared->{download} > $download
+ and ($line->type eq 'group' or $line->ctype);
+ }
+ foreach my $line (@{ $self->watchlines }) {
+ next unless $line->type eq 'checksum';
+ $newChecksum
+ = $self->sum($newChecksum, $line->search_result->{newversion});
+ push @ck_versions, $line->search_result->{newversion};
+ }
+ foreach my $line (@{ $self->watchlines }) {
+ next unless ($line->type eq 'checksum');
+ $line->parse_result->{mangled_lastversion} = $checksum;
+ my $tmp = $line->search_result->{newversion};
+ $line->search_result->{newversion} = $newChecksum;
+ unless ($line->ctype) {
+ if ($line->cmp_versions) {
+ $self->{status} += $line->status;
+ return $self->{status};
+ }
+ $download = $line->shared->{download}
+ if $line->shared->{download} > $download;
+ }
+ $line->search_result->{newversion} = $tmp;
+ if ($line->component) {
+ pop @{ $dehs_tags->{'component-upstream-version'} };
+ push @{ $dehs_tags->{'component-upstream-version'} }, $tmp;
+ }
+ }
+ foreach my $line (@{ $self->watchlines }) {
+ # Set same $download for all
+ $line->shared->{download} = $download;
+ # Non "group" lines where not initialized
+ unless ($line->type eq 'group' or $line->type eq 'checksum') {
+ if ( $line->parse
+ or $line->search
+ or $line->get_upstream_url
+ or $line->get_newfile_base
+ or $line->cmp_versions) {
+ $self->{status} += $line->status;
+ return $self->{status};
+ }
+ }
+ if ($line->download_file_and_sig) {
+ $self->{status} += $line->status;
+ return $self->{status};
+ }
+ if ($line->mkorigtargz) {
+ $self->{status} += $line->status;
+ return $self->{status};
+ }
+ if ($line->type eq 'group') {
+ push @new_versions, $line->shared->{common_mangled_newversion}
+ || $line->shared->{common_newversion}
+ || ();
+ push @last_versions, $line->parse_result->{lastversion};
+ push @last_debian_mangled_uversions,
+ $line->parse_result->{mangled_lastversion};
+ }
+ }
+ my $new_version = join '+~', @new_versions;
+ if ($newChecksum) {
+ $new_version .= "+~cs$newChecksum";
+ }
+ if ($checksum) {
+ push @last_versions, "cs$newChecksum";
+ push @last_debian_mangled_uversions, "cs$checksum";
+ }
+ $dehs_tags->{'upstream-version'} = $new_version;
+ $dehs_tags->{'debian-uversion'} = join('+~', @last_versions)
+ if (grep { $_ } @last_versions);
+ $dehs_tags->{'debian-mangled-uversion'} = join '+~',
+ @last_debian_mangled_uversions
+ if (grep { $_ } @last_debian_mangled_uversions);
+ my $mangled_ver
+ = Dpkg::Version->new(
+ "1:" . $dehs_tags->{'debian-mangled-uversion'} . "-0",
+ check => 0);
+ my $upstream_ver = Dpkg::Version->new("1:$new_version-0", check => 0);
+ if ($mangled_ver == $upstream_ver) {
+ $dehs_tags->{'status'} = "up to date";
+ } elsif ($mangled_ver > $upstream_ver) {
+ $dehs_tags->{'status'} = "only older package available";
+ } else {
+ $dehs_tags->{'status'} = "newer package available";
+ }
+ foreach my $line (@{ $self->watchlines }) {
+ my $path = $line->destfile or next;
+ my $ver = $line->shared->{common_mangled_newversion};
+ $path =~ s/\Q$ver\E/$new_version/;
+ uscan_warn "rename $line->{destfile} to $path\n";
+ rename $line->{destfile}, $path;
+ if ($dehs_tags->{"target-path"} eq $line->{destfile}) {
+ $dehs_tags->{"target-path"} = $path;
+ $dehs_tags->{target} =~ s/\Q$ver\E/$new_version/;
+ } else {
+ for (
+ my $i = 0 ;
+ $i < @{ $dehs_tags->{"component-target-path"} } ;
+ $i++
+ ) {
+ if ($dehs_tags->{"component-target-path"}->[$i] eq
+ $line->{destfile}) {
+ $dehs_tags->{"component-target-path"}->[$i] = $path;
+ $dehs_tags->{"component-target"}->[$i]
+ =~ s/\Q$ver\E/$new_version/
+ or die $ver;
+ }
+ }
+ }
+ if ($line->signature_available) {
+ rename "$line->{destfile}.asc", "$path.asc";
+ rename "$line->{destfile}.sig", "$path.sig";
+ }
+ }
+ if (@ck_versions) {
+ my $v = join '+~', @ck_versions;
+ if ($dehs) {
+ $dehs_tags->{'decoded-checksum'} = $v;
+ } else {
+ uscan_verbose 'Checksum ref: ' . join('+~', @ck_versions) . "\n";
+ }
+ }
+ return 0;
+}
+
+sub sum {
+ my ($self, @versions) = @_;
+ my (@res, @str);
+ foreach my $v (@versions) {
+ my @tmp = grep { $_ ne '.' } version_split_digits($v);
+ for (my $i = 0 ; $i < @tmp ; $i++) {
+ $str[$i] //= '';
+ $res[$i] //= 0;
+ if ($tmp[$i] =~ /^\d+$/) {
+ $res[$i] += $tmp[$i];
+ } else {
+ uscan_die
+"Checksum supports only digits in versions, $tmp[$i] is not accepted";
+ }
+ }
+ }
+ for (my $i = 0 ; $i < @res ; $i++) {
+ my $tmp = shift @str;
+ $res[$i] .= $tmp if $tmp ne '';
+ }
+ push @res, @str;
+ return join '.', @res;
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/WatchLine.pm b/lib/Devscripts/Uscan/WatchLine.pm
new file mode 100644
index 0000000..b707be7
--- /dev/null
+++ b/lib/Devscripts/Uscan/WatchLine.pm
@@ -0,0 +1,1850 @@
+
+=pod
+
+=head1 NAME
+
+Devscripts::Uscan::WatchLine - watch line object for L<uscan>
+
+=head1 DESCRIPTION
+
+Uscan class to parse watchfiles.
+
+=head1 MAIN METHODS
+
+=cut
+
+package Devscripts::Uscan::WatchLine;
+
+use strict;
+use Cwd qw/abs_path/;
+use Devscripts::Uscan::Keyring;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Dpkg::IPC;
+use Dpkg::Version;
+use File::Basename;
+use File::Copy;
+use File::Spec::Functions qw/catfile/;
+use HTTP::Headers;
+use Moo;
+use Text::ParseWords;
+
+#################
+### ACCESSORS ###
+#################
+
+=head2 new() I<(Constructor)>
+
+=head3 Required parameters
+
+=over
+
+=item B<shared>: ref to hash containing line options shared between lines. See
+L<Devscripts::Uscan::WatchFile> code to see required keys.
+
+=item B<keyring>: L<Devscripts::Uscan::Keyring> object
+
+=item B<config>: L<Devscripts::Uscan::Config> object
+
+=item B<downloader>: L<Devscripts::Uscan::Downloader> object
+
+=item B<line>: search line (assembled in one line)
+
+=item B<pkg>: Debian package name
+
+=item B<pkg_dir>: Debian package source directory
+
+=item B<pkg_version>: Debian package version
+
+=item B<watchfile>: Current watchfile
+
+=item B<watch_version>: Version of current watchfile
+
+=back
+
+=cut
+
+foreach (
+
+ # Shared attributes stored in WatchFile object (ref to WatchFile value)
+ 'shared', 'keyring', 'config',
+
+ # Other
+ 'downloader', # Devscripts::Uscan::Downloader object
+ 'line', # watch line string (concatenated line over the tailing \ )
+ 'pkg', # source package name found in debian/changelog
+ 'pkg_dir', # usually .
+ 'pkg_version', # last source package version
+ # found in debian/changelog
+ 'watchfile', # usually debian/watch
+ 'watch_version', # usually 4 (or 3)
+) {
+ has $_ => (is => 'rw', required => 1);
+}
+
+has repack => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->{repack} },
+);
+
+has safe => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->{safe} },
+);
+
+has symlink => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { $_[0]->config->{symlink} },
+);
+
+has versionmode => (
+ is => 'rw',
+ lazy => 1,
+ default => sub { 'newer' },
+);
+
+has destfile => (is => 'rw');
+has sigfile => (is => 'rw');
+
+# 2 - Line options read/write attributes
+
+foreach (qw(
+ component ctype hrefdecode repacksuffix unzipopt searchmode
+ dirversionmangle downloadurlmangle dversionmangle filenamemangle pagemangle
+ oversionmangle oversionmanglepagemangle pgpsigurlmangle uversionmangle
+ versionmangle
+ )
+) {
+ has $_ => (
+ is => 'rw',
+ (/mangle/ ? (default => sub { [] }) : ()));
+}
+
+has compression => (
+ is => 'rw',
+ lazy => 1,
+ default => sub {
+ $_[0]->config->compression
+ ? get_compression($_[0]->config->compression)
+ : undef;
+ },
+);
+has versionless => (is => 'rw');
+
+# 4 - Internal attributes
+has style => (is => 'rw', default => sub { 'new' });
+has status => (is => 'rw', default => sub { 0 });
+foreach (
+ qw(badversion
+ signature_available must_download)
+) {
+ has $_ => (is => 'rw', default => sub { 0 });
+}
+foreach (qw(mangled_version)) {
+ has $_ => (is => 'rw');
+}
+foreach (qw(sites basedirs patterns)) {
+ has $_ => (is => 'rw', default => sub { [] });
+}
+
+# 5 - Results
+foreach (qw(parse_result search_result)) {
+ has $_ => (is => 'rw', default => sub { {} });
+}
+foreach (qw(force_repack type upstream_url newfile_base)) {
+ has $_ => (is => 'rw');
+}
+
+# 3.1 - Attributes initialized with default value, modified by line content
+has date => (
+ is => 'rw',
+ default => sub { '%Y%m%d' },
+);
+has decompress => (
+ is => 'rw',
+ default => sub { 0 },
+);
+has gitexport => (
+ is => 'rw',
+ default => sub { 'default' },
+);
+has gitmode => (
+ is => 'rw',
+ default => sub { 'shallow' },
+);
+has mode => (
+ is => 'rw',
+ default => sub { 'LWP' },
+);
+has pgpmode => (
+ is => 'rw',
+ default => sub { 'default' },
+);
+has pretty => (
+ is => 'rw',
+ default => sub { '0.0~git%cd.%h' },
+);
+
+# 3.2 - Self build attributes
+
+has gitrepo_dir => ( # Working repository used only within uscan.
+ is => 'ro',
+ lazy => 1,
+ builder => sub {
+ $_[0]->{component}
+ ? $_[0]->{pkg} . "-temporary.$$." . $_[0]->{component} . '.git'
+ : $_[0]->{pkg} . "-temporary.$$.git";
+ });
+has headers => (
+ is => 'ro',
+ default => sub {
+ my $h = HTTP::Headers->new;
+ $h->header(
+ 'X-uscan-features' => 'enhanced-matching',
+ 'Accept' => '*/*'
+ );
+ return $h;
+ },
+);
+
+my $minversion = '';
+
+###############
+# Main method #
+###############
+
+=head2 process()
+
+Launches all needed methods in this order: parse(), search(),
+get_upstream_url(), get_newfile_base(), cmp_versions(),
+download_file_and_sig(), mkorigtargz(), clean()
+
+If one method returns a non 0 value, it stops and return this error code.
+
+=cut
+
+sub process {
+ my ($self) = @_;
+
+ # - parse line
+ $self->parse
+
+ # - search newfile and newversion
+ or $self->search
+
+ # - determine upstream_url
+ or $self->get_upstream_url
+
+ # - determine newfile_base
+ or $self->get_newfile_base
+
+ # - compare versions
+ or $self->cmp_versions
+
+ # - download
+ or $self->download_file_and_sig
+
+ # - make orig.tar.gz
+ or $self->mkorigtargz
+
+ # - clean (used by git)
+ or $self->clean;
+ return $self->status;
+}
+
+#########
+# STEPS #
+#########
+
+=head2 Steps
+
+=cut
+
+# I - parse
+
+=head3 parse()
+
+Parse the line and return 0 if nothing bad happen. It populates
+C<$self-E<gt>parse_result> accessor with a hash that contains the
+following keys:
+
+=over
+
+=item base
+=item filepattern
+=item lastversion
+=item action
+=item site
+=item basedir
+=item mangled_lastversion
+=item pattern
+
+=back
+
+=cut
+
+# watch_version=1: Lines have up to 5 parameters which are:
+#
+# $1 = Remote site
+# $2 = Directory on site
+# $3 = Pattern to match, with (...) around version number part
+# $4 = Last version we have (or 'debian' for the current Debian version)
+# $5 = Actions to take on successful retrieval
+#
+# watch_version=2:
+#
+# For ftp sites:
+# ftp://site.name/dir/path/pattern-(.+)\.tar\.gz [version [action]]
+#
+# For http sites:
+# http://site.name/dir/path/pattern-(.+)\.tar\.gz [version [action]]
+#
+# watch_version=3 and 4: See details in POD.
+#
+# For ftp sites:
+# ftp://site.name/dir/path pattern-(.+)\.tar\.gz [version [action]]
+#
+# For http sites:
+# http://site.name/dir/path pattern-(.+)\.tar\.gz [version [action]]
+#
+# For git sites:
+# http://site.name/dir/path/project.git refs/tags/v([\d\.]+) [version [action]]
+# or
+# http://site.name/dir/path/project.git HEAD [version [action]]
+#
+# For svn sites:
+# http://site.name/dir/path/project/tags v([\d\.]+)\/ [version [action]]
+# or
+# http://site.name/dir/path/project/trunk HEAD [version [action]]
+#
+# watch_version=3 and 4: See POD for details.
+#
+# Lines can be prefixed with opts=<opts> but can be folded for readability.
+#
+# Then the patterns matched will be checked to find the one with the
+# greatest version number (as determined by the (...) group), using the
+# Debian version number comparison algorithm described below.
+
+sub BUILD {
+ my ($self, $args) = @_;
+ if ($self->watch_version > 3) {
+ my $line = $self->line;
+ if ($line =~ s/^opt(?:ion)?s\s*=\s*//) {
+ unless ($line =~ s/^".*?"(?:\s+|$)//) {
+ $line =~ s/^[^"\s]\S*(?:\s+|$)//;
+ }
+ }
+ my ($base, $filepattern, $lastversion, $action) = split /\s+/, $line,
+ 4;
+ $self->type($lastversion);
+ }
+ return $self;
+}
+
+sub parse {
+ my ($self) = @_;
+ uscan_debug "parse line $self->{line}";
+
+ # Need to clear remembered redirection URLs so we don't try to build URLs
+ # from previous watch files or watch lines
+ $self->downloader->user_agent->clear_redirections;
+
+ my $watchfile = $self->watchfile;
+ my ($action, $base, $basedir, $filepattern, $lastversion, $pattern, $site);
+ $dehs_tags->{package} = $self->pkg;
+
+ # Start parsing the watch line
+ if ($self->watch_version == 1) {
+ my ($dir);
+ ($site, $dir, $filepattern, $lastversion, $action) = split ' ',
+ $self->line, 5;
+ if ( !$lastversion
+ or $site =~ /\(.*\)/
+ or $dir =~ /\(.*\)/) {
+ uscan_warn <<EOF;
+there appears to be a version 2 format line in
+the version 1 watch file $watchfile;
+Have you forgotten a 'version=2' line at the start, perhaps?
+Skipping the line: $self->{line}
+EOF
+ return $self->status(1);
+ }
+ if ($site !~ m%\w+://%) {
+ $site = "ftp://$site";
+ if ($filepattern !~ /\(.*\)/) {
+
+ # watch_version=1 and old style watch file;
+ # pattern uses ? and * shell wildcards; everything from the
+ # first to last of these metachars is the pattern to match on
+ $filepattern =~ s/(\?|\*)/($1/;
+ $filepattern =~ s/(\?|\*)([^\?\*]*)$/$1)$2/;
+ $filepattern =~ s/\./\\./g;
+ $filepattern =~ s/\?/./g;
+ $filepattern =~ s/\*/.*/g;
+ $self->style('old');
+ uscan_warn
+ "Using very old style of filename pattern in $watchfile\n"
+ . " (this might lead to incorrect results): $3";
+ }
+ }
+
+ # Merge site and dir
+ $base = "$site/$dir/";
+ $base =~ s%(?<!:)//%/%g;
+ $base =~ m%^(\w+://[^/]+)%;
+ $site = $1;
+ $pattern = $filepattern;
+
+ # Check $filepattern is OK
+ if ($filepattern !~ /\(.*\)/) {
+ uscan_warn "Filename pattern missing version delimiters ()\n"
+ . " in $watchfile, skipping:\n $self->{line}";
+ return $self->status(1);
+ }
+ } else {
+ # version 2/3/4 watch file
+ if ($self->{line} =~ s/^opt(?:ion)?s\s*=\s*//) {
+ my $opts;
+ if ($self->{line} =~ s/^"(.*?)"(?:\s+|$)//) {
+ $opts = $1;
+ } elsif ($self->{line} =~ s/^([^"\s]\S*)(?:\s+|$)//) {
+ $opts = $1;
+ } else {
+ uscan_warn
+"malformed opts=... in watch file, skipping line:\n$self->{line}";
+ return $self->status(1);
+ }
+
+ # $opts string extracted from the argument of opts=
+ uscan_verbose "opts: $opts";
+
+ # $self->line watch line string without opts=... part
+ uscan_verbose "line: $self->{line}";
+
+ # user-agent strings has ,;: in it so special handling
+ if ( $opts =~ /^\s*user-agent\s*=\s*(.+?)\s*$/
+ or $opts =~ /^\s*useragent\s*=\s*(.+?)\s*$/) {
+ my $user_agent_string = $1;
+ $user_agent_string = $self->config->user_agent
+ if $self->config->user_agent ne
+ &Devscripts::Uscan::Config::default_user_agent;
+ $self->downloader->user_agent->agent($user_agent_string);
+ uscan_verbose "User-agent: $user_agent_string";
+ $opts = '';
+ }
+ my @opts = split /,/, $opts;
+ foreach my $opt (@opts) {
+ next unless ($opt =~ /\S/);
+ uscan_verbose "Parsing $opt";
+ if ($opt =~ /^\s*pasv\s*$/ or $opt =~ /^\s*passive\s*$/) {
+ $self->downloader->pasv(1);
+ } elsif ($opt =~ /^\s*active\s*$/
+ or $opt =~ /^\s*nopasv\s*$/
+ or $opt =~ /^s*nopassive\s*$/) {
+ $self->downloader->pasv(0);
+ }
+
+ # Line option "compression" is ignored if "--compression"
+ # was set in command-line
+ elsif ($opt =~ /^\s*compression\s*=\s*(.+?)\s*$/
+ and not $self->compression) {
+ $self->compression(get_compression($1));
+ } elsif ($opt =~ /^\s*bare\s*$/) {
+
+ # persistent $bare
+ ${ $self->shared->{bare} } = 1;
+ }
+
+ # Boolean line parameter
+ #
+ # $ regexp-assemble <<EOF
+ # decompress
+ # repack
+ # EOF
+ elsif ($opt =~ /^\s*(decompress|repack)\s*$/) {
+ $self->$1(1);
+ }
+
+ # Line parameter with a value
+ #
+ # $ regexp-assemble <<EOF
+ # component
+ # ctype
+ # date
+ # gitexport
+ # gitmode
+ # hrefdecode
+ # mode
+ # pgpmode
+ # pretty
+ # repacksuffix
+ # searchmode
+ # unzipopt
+ # EOF
+ elsif ($opt
+ =~ /^\s*((?:(?:(?:(?:search)?m|hrefdec)od|dat)e|c(?:omponent|type)|git(?:export|mode)|p(?:gpmode|retty)|repacksuffix|unzipopt))\s*=\s*(.+?)\s*$/
+ ) {
+ $self->$1($2);
+ } elsif ($opt =~ /^\s*versionmangle\s*=\s*(.+?)\s*$/) {
+ $self->uversionmangle([split /;/, $1]);
+ $self->dversionmangle([split /;/, $1]);
+ } elsif ($opt =~ /^\s*pgpsigurlmangle\s*=\s*(.+?)\s*$/) {
+ $self->pgpsigurlmangle([split /;/, $1]);
+ $self->pgpmode('mangle');
+ } elsif ($opt =~ /^\s*dversionmangle\s*=\s*(.+?)\s*$/) {
+
+ $self->dversionmangle([
+ map {
+
+ # If dversionmangle is "auto", replace it by
+ # DEB_EXT removal
+ $_ eq 'auto'
+ ? ('s/'
+ . &Devscripts::Uscan::WatchFile::DEB_EXT
+ . '//')
+ : ($_)
+ } split /;/,
+ $1
+ ]);
+ }
+
+ # Handle other *mangle:
+ #
+ # $ regexp-assemble <<EOF
+ # pagemangle
+ # dirversionmangle
+ # uversionmangle
+ # downloadurlmangle
+ # filenamemangle
+ # oversionmangle
+ # EOF
+ elsif ($opt
+ =~ /^\s*((?:d(?:ownloadurl|irversion)|(?:filenam|pag)e|[ou]version)mangle)\s*=\s*(.+?)\s*$/
+ ) {
+ $self->$1([split /;/, $2]);
+ } else {
+ uscan_warn "unrecognized option $opt";
+ }
+ }
+
+ # $self->line watch line string when no opts=...
+ uscan_verbose "line: $self->{line}";
+ }
+
+ if ($self->line eq '') {
+ uscan_verbose "watch line only with opts=\"...\" and no URL";
+ return $self->status(1);
+ }
+
+ # 4 parameter watch line
+ ($base, $filepattern, $lastversion, $action) = split /\s+/,
+ $self->line, 4;
+
+ # 3 parameter watch line (override)
+ if ($base =~ s%/([^/]*\([^/]*\)[^/]*)$%/%) {
+
+ # Last component of $base has a pair of parentheses, so no
+ # separate filepattern field; we remove the filepattern from the
+ # end of $base and rescan the rest of the line
+ $filepattern = $1;
+ (undef, $lastversion, $action) = split /\s+/, $self->line, 3;
+ }
+
+ # Always define "" if not defined
+ $lastversion //= '';
+ $action //= '';
+ if ($self->mode eq 'LWP') {
+ if ($base =~ m%^https?://%) {
+ $self->mode('http');
+ } elsif ($base =~ m%^ftp://%) {
+ $self->mode('ftp');
+ } else {
+ uscan_warn "unknown protocol for LWP: $base";
+ return $self->status(1);
+ }
+ }
+
+ # compression is persistent
+ $self->compression('default') unless ($self->compression);
+
+ # Set $lastversion to the numeric last version
+ # Update $self->versionmode (its default "newer")
+ if (!length($lastversion)
+ or $lastversion =~ /^(group|checksum|debian)$/) {
+ if (!defined $self->pkg_version) {
+ uscan_warn "Unable to determine the current version\n"
+ . " in $watchfile, skipping:\n $self->{line}";
+ return $self->status(1);
+ }
+ $lastversion = $self->pkg_version;
+ } elsif ($lastversion eq 'ignore') {
+ $self->versionmode('ignore');
+ $lastversion = $minversion;
+ } elsif ($lastversion eq 'same') {
+ $self->versionmode('same');
+ $lastversion = $minversion;
+ } elsif ($lastversion =~ m/^prev/) {
+ $self->versionmode('previous');
+
+ # set $lastversion = $previous_newversion later
+ }
+
+ # Check $filepattern has ( ...)
+ if ($filepattern !~ /\([^?].*\)/) {
+ if (($self->mode eq 'git' or $self->mode eq 'svn')
+ and $filepattern eq 'HEAD') {
+ $self->versionless(1);
+ } elsif ($self->mode eq 'git'
+ and $filepattern =~ m&^heads/&) {
+ $self->versionless(1);
+ } elsif ($self->mode eq 'http'
+ and @{ $self->filenamemangle }) {
+ $self->versionless(1);
+ } else {
+ uscan_warn
+ "Tag pattern missing version delimiters () in $watchfile"
+ . ", skipping:\n $self->{line}";
+ return $self->status(1);
+ }
+ }
+
+ # Check validity of options
+ if ($self->mode eq 'ftp'
+ and @{ $self->downloadurlmangle }) {
+ uscan_warn "downloadurlmangle option invalid for ftp sites,\n"
+ . " ignoring downloadurlmangle in $watchfile:\n"
+ . " $self->{line}";
+ return $self->status(1);
+ }
+ if ($self->mode ne 'git' and $self->gitexport ne 'default') {
+ uscan_warn "gitexport option is valid only in git mode,\n"
+ . " ignoring gitexport in $watchfile:\n"
+ . " $self->{line}";
+ return $self->status(1);
+ }
+
+ # Limit use of opts="repacksuffix" to the single upstream package
+ if ($self->repacksuffix and @{ $self->shared->{components} }) {
+ uscan_warn
+"repacksuffix is not compatible with the multiple upstream tarballs;\n"
+ . " use oversionmangle";
+ return $self->status(1);
+ }
+
+ # Allow 2 char shorthands for opts="pgpmode=..." and check
+ if ($self->pgpmode =~ m/^au/) {
+ $self->pgpmode('auto');
+ if (@{ $self->pgpsigurlmangle }) {
+ uscan_warn "Ignore pgpsigurlmangle because pgpmode=auto";
+ $self->pgpsigurlmangle([]);
+ }
+ } elsif ($self->pgpmode =~ m/^ma/) {
+ $self->pgpmode('mangle');
+ if (not @{ $self->pgpsigurlmangle }) {
+ uscan_warn "Missing pgpsigurlmangle. Setting pgpmode=default";
+ $self->pgpmode('default');
+ }
+ } elsif ($self->pgpmode =~ m/^no/) {
+ $self->pgpmode('none');
+ } elsif ($self->pgpmode =~ m/^ne/) {
+ $self->pgpmode('next');
+ } elsif ($self->pgpmode =~ m/^pr/) {
+ $self->pgpmode('previous');
+ $self->versionmode('previous'); # no other value allowed
+ # set $lastversion = $previous_newversion later
+ } elsif ($self->pgpmode =~ m/^se/) {
+ $self->pgpmode('self');
+ } elsif ($self->pgpmode =~ m/^git/) {
+ $self->pgpmode('gittag');
+ } else {
+ $self->pgpmode('default');
+ }
+
+ # For mode=svn, make pgpmode=none the default
+ if ($self->mode eq 'svn') {
+ if ($self->pgpmode eq 'default') {
+ $self->pgpmode('none');
+ } elsif ($self->pgpmode ne 'none') {
+ uscan_die "Only pgpmode=none can be used with mode=svn.\n";
+ }
+ }
+
+ # If PGP used, check required programs and generate files
+ if (@{ $self->pgpsigurlmangle }) {
+ my $pgpsigurlmanglestring = join(";", @{ $self->pgpsigurlmangle });
+ uscan_debug "\$self->{'pgpmode'}=$self->{'pgpmode'}, "
+ . "\$self->{'pgpsigurlmangle'}=$pgpsigurlmanglestring";
+ } else {
+ uscan_debug "\$self->{'pgpmode'}=$self->{'pgpmode'}, "
+ . "\$self->{'pgpsigurlmangle'}=undef";
+ }
+
+ # Check component for duplication and set $orig to the proper
+ # extension string
+ if ($self->pgpmode ne 'previous') {
+ if ($self->component) {
+ if (grep { $_ eq $self->component }
+ @{ $self->shared->{components} }) {
+ uscan_warn "duplicate component name: $self->{component}";
+ return $self->status(1);
+ }
+ push @{ $self->shared->{components} }, $self->component;
+ } else {
+ $self->shared->{origcount}++;
+ if ($self->shared->{origcount} > 1) {
+ uscan_warn "more than one main upstream tarballs listed.";
+
+ # reset variables
+ @{ $self->shared->{components} } = ();
+ $self->{shared}->{common_newversion} = undef;
+ $self->{shared}->{common_mangled_newversion} = undef;
+ $self->{shared}->{previous_newversion} = undef;
+ $self->{shared}->{previous_newfile_base} = undef;
+ $self->{shared}->{previous_sigfile_base} = undef;
+ $self->{shared}->{previous_download_available} = undef;
+ $self->{shared}->{uscanlog} = undef;
+ }
+ }
+ }
+
+ # Allow 2 char shorthands for opts="gitmode=..." and check
+ if ($self->gitmode =~ m/^sh/) {
+ $self->gitmode('shallow');
+ } elsif ($self->gitmode =~ m/^fu/) {
+ $self->gitmode('full');
+ } else {
+ uscan_warn
+ "Override strange manual gitmode '$self->gitmode --> 'shallow'";
+ $self->gitmode('shallow');
+ }
+
+ # Handle sf.net addresses specially
+ if (!$self->shared->{bare} and $base =~ m%^https?://sf\.net/%) {
+ uscan_verbose "sf.net redirection to qa.debian.org/watch/sf.php";
+ $base =~ s%^https?://sf\.net/%https://qa.debian.org/watch/sf.php/%;
+ $filepattern .= '(?:\?.*)?';
+ }
+
+ # Handle pypi.python.org addresses specially
+ if ( !$self->shared->{bare}
+ and $base =~ m%^https?://pypi\.python\.org/packages/source/%) {
+ uscan_verbose "pypi.python.org redirection to pypi.debian.net";
+ $base
+ =~ s%^https?://pypi\.python\.org/packages/source/./%https://pypi.debian.net/%;
+ }
+
+ # Handle pkg-ruby-extras gemwatch addresses specially
+ if ($base
+ =~ m%^https?://pkg-ruby-extras\.alioth\.debian\.org/cgi-bin/gemwatch%
+ ) {
+ uscan_warn
+"redirecting DEPRECATED pkg-ruby-extras.alioth.debian.org/cgi-bin/gemwatch"
+ . " to gemwatch.debian.net";
+ $base
+ =~ s%^https?://pkg-ruby-extras\.alioth\.debian\.org/cgi-bin/gemwatch%https://gemwatch.debian.net%;
+ }
+
+ }
+
+ if ($self->ctype) {
+ my $version;
+ my $mod = "Devscripts::Uscan::Ctype::$self->{ctype}";
+ eval "require $mod";
+ if ($@) {
+ uscan_warn "unknown ctype $self->{ctype}";
+ uscan_debug $@;
+ return $self->status(1);
+ }
+ my $dir = $self->component || '.';
+ my $ctypeTransform = $mod->new({ dir => $dir });
+ if ($version = $ctypeTransform->version) {
+ $lastversion = $version;
+ uscan_verbose "Found version $version for component $dir";
+ $self->versionmode('newer');
+ }
+ }
+
+ # End parsing the watch line for all version=1/2/3/4
+ # all options('...') variables have been set
+
+ # Override the last version with --download-debversion
+ if ($self->config->download_debversion) {
+ $lastversion = $self->config->download_debversion;
+ $lastversion =~ s/-[^-]+$//; # revision
+ $lastversion =~ s/^\d+://; # epoch
+ uscan_verbose
+"specified --download-debversion to set the last version: $lastversion";
+ } elsif ($self->versionmode eq 'previous') {
+ $lastversion = $self->shared->{previous_newversion};
+ # $lastversion is set only if something was downloaded before
+ if ($lastversion) {
+ uscan_verbose "Previous version downloaded: $lastversion";
+ } else {
+ uscan_verbose "Previous version not set, skipping";
+ }
+ } else {
+ uscan_verbose
+"Last orig.tar.* tarball version (from debian/changelog): $lastversion";
+ }
+
+ # And mangle it if requested
+ my $mangled_lastversion = $lastversion;
+ if (
+ mangle(
+ $watchfile, \$self->line,
+ 'dversionmangle:', \@{ $self->dversionmangle },
+ \$mangled_lastversion
+ )
+ ) {
+ return $self->status(1);
+ }
+
+ # Set $download_version etc. if already known
+ if ($self->versionmode eq 'ignore' and $self->config->download_version) {
+ uscan_verbose 'Ignore --download_version for component with "ignore"';
+ } elsif ($self->config->download_version) {
+ $self->shared->{download_version} = $self->config->download_version;
+ $self->shared->{download} = 2
+ if $self->shared->{download} == 1; # Change default 1 -> 2
+ $self->badversion(1);
+ uscan_verbose "Download the --download-version specified version: "
+ . "$self->{shared}->{download_version}";
+ } elsif ($self->config->download_debversion) {
+ $self->shared->{download_version} = $mangled_lastversion;
+ $self->shared->{download} = 2
+ if $self->shared->{download} == 1; # Change default 1 -> 2
+ $self->badversion(1);
+ uscan_verbose "Download the --download-debversion specified version "
+ . "(dversionmangled): $self->{shared}->{download_version}";
+ } elsif ($self->config->download_current_version) {
+ $self->shared->{download_version} = $mangled_lastversion;
+ $self->shared->{download} = 2
+ if $self->shared->{download} == 1; # Change default 1 -> 2
+ $self->badversion(1);
+ uscan_verbose
+ "Download the --download-current-version specified version: "
+ . "$self->{shared}->{download_version}";
+ } elsif ($self->versionmode eq 'same') {
+ unless (defined $self->shared->{common_newversion}) {
+ uscan_warn
+"Unable to set versionmode=prev for the line without opts=pgpmode=prev\n"
+ . " in $watchfile, skipping:\n"
+ . " $self->{line}";
+ return $self->status(1);
+ }
+ $self->shared->{download_version} = $self->shared->{common_newversion};
+ $self->shared->{download} = 2
+ if $self->shared->{download} == 1; # Change default 1 -> 2
+ $self->badversion(1);
+ uscan_verbose "Download secondary tarball with the matching version: "
+ . "$self->{shared}->{download_version}";
+ } elsif ($self->versionmode eq 'previous') {
+ unless ($self->pgpmode eq 'previous'
+ and defined $self->shared->{previous_newversion}) {
+ if ($self->shared->{download}) {
+ uscan_warn
+"Unable to set versionmode=prev for the line without opts=pgpmode=prev\n"
+ . " in $watchfile, skipping:\n $self->{line}";
+ } else {
+ uscan_verbose
+ "Nothing was downloaded before, skipping pgp check";
+ uscan_verbose " line " . $self->line;
+ }
+ return $self->status(1);
+ }
+ $self->shared->{download_version}
+ = $self->shared->{previous_newversion};
+ $self->shared->{download} = 2
+ if $self->shared->{download} == 1; # Change default 1 -> 2
+ $self->badversion(1);
+ uscan_verbose
+ "Download the signature file with the previous tarball's version:"
+ . " $self->{shared}->{download_version}";
+ } else {
+ # $options{'versionmode'} should be debian or ignore
+ if (defined $self->shared->{download_version}) {
+ uscan_die
+ "\$download_version defined after dversionmangle ... strange";
+ } else {
+ uscan_verbose "Last orig.tar.* tarball version (dversionmangled):"
+ . " $mangled_lastversion";
+ }
+ }
+
+ if ($self->watch_version != 1) {
+ if ($self->mode eq 'http' or $self->mode eq 'ftp') {
+ if ($base =~ m%^(\w+://[^/]+)%) {
+ $site = $1;
+ } else {
+ uscan_warn "Can't determine protocol and site in\n"
+ . " $watchfile, skipping:\n"
+ . " $self->{line}";
+ return $self->status(1);
+ }
+
+ # Find the path with the greatest version number matching the regex
+ $base
+ = recursive_regex_dir($self, $base,
+ $self->dirversionmangle, $watchfile, \$self->line,
+ $self->shared->{download_version});
+ if ($base eq '') {
+ return $self->status(1);
+ }
+
+ # We're going to make the pattern
+ # (?:(?:http://site.name)?/dir/path/)?base_pattern
+ # It's fine even for ftp sites
+ $basedir = $base;
+ $basedir =~ s%^\w+://[^/]+/%/%;
+ $basedir =~ s%/[^/]*(?:[#?].*)?$%/%;
+ $pattern
+ = "(?:(?:$site)?" . quotemeta($basedir) . ")?$filepattern";
+ } else {
+ # git tag match is simple
+ $site = $base; # dummy
+ $basedir = ''; # dummy
+ $pattern = $filepattern;
+ }
+ }
+
+ push @{ $self->sites }, $site;
+ push @{ $self->basedirs }, $basedir;
+ push @{ $self->patterns }, $pattern;
+
+ my $match = '';
+
+# Start Checking $site and look for $filepattern which is newer than $lastversion
+ uscan_debug "watch file has:\n"
+ . " \$base = $base\n"
+ . " \$filepattern = $filepattern\n"
+ . " \$lastversion = $lastversion\n"
+ . " \$action = $action\n"
+ . " mode = $self->{mode}\n"
+ . " pgpmode = $self->{pgpmode}\n"
+ . " versionmode = $self->{versionmode}\n"
+ . " \$site = $site\n"
+ . " \$basedir = $basedir";
+
+ $self->parse_result({
+ base => $base,
+ filepattern => $filepattern,
+ lastversion => $lastversion,
+ action => $action,
+ site => $site,
+ basedir => $basedir,
+ mangled_lastversion => $mangled_lastversion,
+ pattern => $pattern,
+ });
+
+# What is the most recent file, based on the filenames?
+# We first have to find the candidates, then we sort them using
+# Devscripts::Versort::upstream_versort (if it is real upstream version string) or
+# Devscripts::Versort::versort (if it is suffixed upstream version string)
+ return $self->status;
+}
+
+# II - search
+
+=head3 search()
+
+Search new file link and new version on the remote site using either:
+
+=over
+
+=item L<Devscripts::Uscan::http>::http_search()
+=item L<Devscripts::Uscan::ftp>::ftp_search()
+=item L<Devscripts::Uscan::git>::git_search()
+=item L<Devscripts::Uscan::svn>::svn_search()
+
+=back
+
+It populates B<$self-E<gt>search_result> hash ref with the following keys:
+
+=over
+
+=item B<newversion>: URL/tag pointing to the file to be downloaded
+=item B<newfile>: version number to be used for the downloaded file
+
+=back
+
+=cut
+
+sub search {
+ my ($self) = @_;
+ uscan_debug "line: search()";
+ my ($newversion, $newfile) = $self->_do('search');
+ unless ($newversion and $newfile) {
+ return $self->status(1);
+ }
+ $self->status and return $self->status;
+ uscan_verbose "Looking at \$base = $self->{parse_result}->{base} with\n"
+ . " \$filepattern = $self->{parse_result}->{filepattern} found\n"
+ . " \$newfile = $newfile\n"
+ . " \$newversion = $newversion\n"
+ . " \$lastversion = $self->{parse_result}->{mangled_lastversion}";
+ $self->search_result({
+ newversion => $newversion,
+ newfile => $newfile,
+ });
+
+ # The original version of the code didn't use (...) in the watch
+ # file to delimit the version number; thus if there is no (...)
+ # in the pattern, we will use the old heuristics, otherwise we
+ # use the new.
+
+ if ($self->style eq 'old') {
+
+ # Old-style heuristics
+ if ($newversion =~ /^\D*(\d+\.(?:\d+\.)*\d+)\D*$/) {
+ $self->search_result->{newversion} = $1;
+ } else {
+ uscan_warn <<"EOF";
+$progname warning: In $self->{watchfile}, couldn\'t determine a
+ pure numeric version number from the file name for watch line
+ $self->{line}
+ and file name $newfile
+ Please use a new style watch file instead!
+EOF
+ $self->status(1);
+ }
+ }
+ return $self->status;
+}
+
+# III - get_upstream_url
+
+=head3 get_upstream_url()
+
+Transform newfile/newversion into upstream url using either:
+
+=over
+
+=item L<Devscripts::Uscan::http>::http_upstream_url()
+=item L<Devscripts::Uscan::ftp>::ftp_upstream_url()
+=item L<Devscripts::Uscan::git>::git_upstream_url()
+=item L<Devscripts::Uscan::svn>::svn_upstream_url()
+
+=back
+
+Result is stored in B<$self-E<gt>upstream_url> accessor.
+
+=cut
+
+sub get_upstream_url {
+ my ($self) = @_;
+ uscan_debug "line: get_upstream_url()";
+ if ( $self->parse_result->{site} =~ m%^https?://%
+ and not $self->mode eq 'git'
+ and not $self->mode eq 'svn') {
+ $self->mode('http');
+ } elsif (not $self->mode) {
+ $self->mode('ftp');
+ }
+ $self->upstream_url($self->_do('upstream_url'));
+ $self->status and return $self->status;
+ uscan_verbose "Upstream URL(+tag) to download is identified as"
+ . " $self->{upstream_url}";
+ return $self->status;
+}
+
+# IV - get_newfile_base
+
+=head3 get_newfile_base()
+
+Calculates the filename (filenamemangled) for downloaded file using either:
+
+=over
+
+=item L<Devscripts::Uscan::http>::http_newfile_base()
+=item L<Devscripts::Uscan::ftp>::ftp_newfile_base()
+=item L<Devscripts::Uscan::git>::git_newfile_base()
+=item L<Devscripts::Uscan::svn>::svn_newfile_base()
+
+=back
+
+Result is stored in B<$self-E<gt>newfile_base> accessor.
+
+=cut
+
+sub get_newfile_base {
+ my ($self) = @_;
+ uscan_debug "line: get_newfile_base()";
+ $self->newfile_base($self->_do('newfile_base'));
+ return $self->status if ($self->status);
+ uscan_verbose
+ "Filename (filenamemangled) for downloaded file: $self->{newfile_base}";
+ return $self->status;
+}
+
+# V - cmp_versions
+
+=head3 cmp_versions()
+
+Compare available and local versions.
+
+=cut
+
+sub cmp_versions {
+ my ($self) = @_;
+ uscan_debug "line: cmp_versions()";
+ my $name = $self->component || $self->pkg;
+ my $mangled_lastversion = $self->parse_result->{mangled_lastversion};
+ unless (defined $self->shared->{common_newversion}) {
+ $self->shared->{common_newversion}
+ = $self->search_result->{newversion};
+ }
+
+ $dehs_tags->{'debian-uversion'} //= $self->parse_result->{lastversion};
+ $dehs_tags->{'debian-mangled-uversion'} //= $mangled_lastversion;
+ $dehs_tags->{'upstream-version'} //= $self->search_result->{newversion};
+ $dehs_tags->{'upstream-url'} //= $self->upstream_url;
+ $dehs_tags->{'component-name'} //= [];
+ $dehs_tags->{'component-upstream-version'} //= [];
+ if ($self->component) {
+ push @{ $dehs_tags->{'component-name'} }, $self->component;
+ push @{ $dehs_tags->{'component-debian-uversion'} },
+ $self->parse_result->{lastversion};
+ push @{ $dehs_tags->{'component-debian-mangled-uversion'} },
+ $mangled_lastversion;
+ push @{ $dehs_tags->{'component-upstream-version'} },
+ $self->search_result->{newversion};
+ push @{ $dehs_tags->{'component-upstream-url'} }, $self->upstream_url;
+ }
+
+ my $mangled_ver
+ = Dpkg::Version->new("1:${mangled_lastversion}-0", check => 0);
+ my $upstream_ver
+ = Dpkg::Version->new("1:$self->{search_result}->{newversion}-0",
+ check => 0);
+ my $compver;
+ if ($mangled_ver == $upstream_ver) {
+ $compver = 'same';
+ } elsif ($mangled_ver > $upstream_ver) {
+ $compver = 'older';
+ } else {
+ $compver = 'newer';
+ }
+
+ # Version dependent $download adjustment
+ if (defined $self->shared->{download_version}
+ and not $self->versionmode eq 'ignore') {
+
+ # Pretend to find a newer upstream version to exit without error
+ uscan_msg "Newest version of $name on remote site is "
+ . "$self->{search_result}->{newversion}, "
+ . "specified download version is $self->{shared}->{download_version}";
+ $found++ unless ($self->versionmode =~ /(?:same|ignore)/);
+ } elsif ($self->versionmode eq 'newer') {
+ if ($compver eq 'newer') {
+ uscan_msg "Newest version of $name on remote site is "
+ . "$self->{search_result}->{newversion}, "
+ . "local version is $self->{parse_result}->{mangled_lastversion}\n"
+ . (
+ $mangled_lastversion eq $self->parse_result->{lastversion}
+ ? ""
+ : " (mangled local version is $mangled_lastversion)\n"
+ );
+
+ # There's a newer upstream version available, which may already
+ # be on our system or may not be
+ uscan_msg " => Newer package available from:\n"
+ . " => $self->{upstream_url}";
+ $dehs_tags->{'status'} //= "newer package available";
+ $main::found++;
+ } elsif ($compver eq 'same') {
+ uscan_verbose "Newest version of $name on remote site is "
+ . $self->search_result->{newversion}
+ . ", local version is $self->{parse_result}->{mangled_lastversion}\n"
+ . (
+ $mangled_lastversion eq $self->parse_result->{lastversion}
+ ? ""
+ : " (mangled local version is $mangled_lastversion)\n"
+ );
+ uscan_verbose " => Package is up to date from:\n"
+ . " => $self->{upstream_url}";
+ $dehs_tags->{'status'} //= "up to date";
+ if ($self->shared->{download} > 1) {
+
+ # 2=force-download or 3=overwrite-download
+ uscan_verbose " => Forcing download as requested";
+ $main::found++;
+ } else {
+ # 0=no-download or 1=download
+ $self->shared->{download} = 0;
+ }
+ } else { # $compver eq 'old'
+ uscan_verbose "Newest version of $name on remote site is "
+ . $self->search_result->{newversion}
+ . ", local version is $self->{parse_result}->{mangled_lastversion}\n"
+ . (
+ $mangled_lastversion eq $self->parse_result->{lastversion}
+ ? ""
+ : " (mangled local version is $mangled_lastversion)\n"
+ );
+ uscan_verbose " => Only older package available from:\n"
+ . " => $self->{upstream_url}";
+ $dehs_tags->{'status'} //= "only older package available";
+ if ($self->shared->{download} > 1) {
+ uscan_verbose " => Forcing download as requested";
+ $main::found++;
+ } else {
+ $self->shared->{download} = 0;
+ }
+ }
+ } elsif ($self->versionmode eq 'ignore') {
+ uscan_msg "Newest version of $name on remote site is "
+ . $self->search_result->{newversion}
+ . ", ignore local version";
+ $dehs_tags->{'status'} //= "package available";
+ } else { # same/previous -- secondary-tarball or signature-file
+ uscan_die "strange ... <version> stanza = same/previous "
+ . "should have defined \$download_version";
+ }
+ return 0;
+}
+
+# VI - download_file_and_sig
+
+=head3 download_file_and_sig()
+
+Download file and, if available and needed, signature files.
+
+=cut
+
+my %already_downloaded;
+
+sub download_file_and_sig {
+ my ($self) = @_;
+ uscan_debug "line: download_file_and_sig()";
+ my $skip_git_vrfy;
+
+ # If we're not downloading or performing signature verification, we can
+ # stop here
+ if (!$self->shared->{download} || $self->shared->{signature} == -1) {
+ return 0;
+ }
+
+ # configure downloader
+ $self->downloader->git_export_all($self->gitexport eq 'all');
+
+ # 6.1 download tarball
+ my $download_available = 0;
+ my $upstream_base = basename($self->upstream_url);
+ $self->signature_available(0);
+ my $sigfile;
+ my $sigfile_base = $self->newfile_base;
+ uscan_die
+"Already downloaded a file named $self->{newfile_base}: use filenamemangle to avoid this"
+ if ($already_downloaded{ $self->{newfile_base} });
+ $already_downloaded{ $self->{newfile_base} } = 1;
+
+ if ($self->pgpmode ne 'previous') {
+
+ # try download package
+ if ($self->shared->{download} == 3
+ and -e "$self->{config}->{destdir}/$self->{newfile_base}") {
+ uscan_verbose
+"Downloading and overwriting existing file: $self->{newfile_base}";
+ $download_available = $self->downloader->download(
+ $self->upstream_url,
+ "$self->{config}->{destdir}/$self->{newfile_base}",
+ $self,
+ $self->parse_result->{base},
+ $self->pkg_dir,
+ $self->pkg,
+ $self->mode
+ );
+ if ($download_available) {
+ dehs_verbose
+ "Successfully downloaded package: $self->{newfile_base}\n";
+ } else {
+ dehs_verbose
+ "Failed to download upstream package: $upstream_base\n";
+ }
+ } elsif (-e "$self->{config}->{destdir}/$self->{newfile_base}") {
+ $download_available = 1;
+ dehs_verbose
+ "Not downloading, using existing file: $self->{newfile_base}\n";
+ $skip_git_vrfy = 1;
+ } elsif ($self->shared->{download} > 0) {
+ uscan_verbose "Downloading upstream package: $upstream_base";
+ $download_available = $self->downloader->download(
+ $self->upstream_url,
+ "$self->{config}->{destdir}/$self->{newfile_base}",
+ $self,
+ $self->parse_result->{base},
+ $self->pkg_dir,
+ $self->pkg,
+ $self->mode,
+ $self->gitrepo_dir,
+ );
+ if ($download_available) {
+ dehs_verbose
+ "Successfully downloaded upstream package: $upstream_base\n";
+ if (@{ $self->filenamemangle }) {
+ dehs_verbose
+ "Renamed upstream package to: $self->{newfile_base}\n";
+ }
+ } else {
+ dehs_verbose
+ "Failed to download upstream package: $upstream_base\n";
+ }
+ } else { # $download = 0,
+ $download_available = 0;
+ dehs_verbose "Not downloading upstream package: $upstream_base\n";
+ }
+ }
+ if ($self->pgpmode eq 'self') {
+ $sigfile_base =~ s/^(.*?)\.[^\.]+$/$1/; # drop .gpg, .asc, ...
+ if ($self->shared->{signature} == -1) {
+ uscan_warn("SKIP Checking OpenPGP signature (by request).\n");
+ $download_available
+ = -1; # can't proceed with self-signature archive
+ $self->signature_available(0);
+ } elsif (!$self->keyring) {
+ uscan_die("FAIL Checking OpenPGP signature (no keyring).\n");
+ } elsif ($download_available == 0) {
+ uscan_warn
+"FAIL Checking OpenPGP signature (no signed upstream tarball downloaded).";
+ return $self->status(1);
+ } else {
+ $self->keyring->verify(
+ "$self->{config}->{destdir}/$sigfile_base",
+ "$self->{config}->{destdir}/$self->{newfile_base}"
+ );
+
+# XXX FIXME XXX extract signature as detached signature to $self->{config}->{destdir}/$sigfile
+ $sigfile = $self->{newfile_base}; # XXX FIXME XXX place holder
+ $self->{newfile_base} = $sigfile_base;
+ $self->signature_available(3);
+ }
+ }
+ if ($self->pgpmode ne 'previous') {
+
+ # Decompress archive if requested and applicable
+ if ($download_available == 1 and $self->{'decompress'}) {
+ my $suffix_gz = $sigfile_base;
+ $suffix_gz =~ s/.*?(\.gz|\.xz|\.bz2|\.lzma|\.zstd?)?$/$1/;
+ if ($suffix_gz eq '.gz') {
+ if (-x '/bin/gunzip') {
+ uscan_exec('/bin/gunzip', "--keep",
+ "$self->{config}->{destdir}/$sigfile_base");
+ $sigfile_base =~ s/(.*?)\.gz/$1/;
+ } else {
+ uscan_warn("Please install gzip.\n");
+ return $self->status(1);
+ }
+ } elsif ($suffix_gz eq '.xz') {
+ if (-x '/usr/bin/unxz') {
+ uscan_exec('/usr/bin/unxz', "--keep",
+ "$self->{config}->{destdir}/$sigfile_base");
+ $sigfile_base =~ s/(.*?)\.xz/$1/;
+ } else {
+ uscan_warn("Please install xz-utils.\n");
+ return $self->status(1);
+ }
+ } elsif ($suffix_gz eq '.bz2') {
+ if (-x '/bin/bunzip2') {
+ uscan_exec('/bin/bunzip2', "--keep",
+ "$self->{config}->{destdir}/$sigfile_base");
+ $sigfile_base =~ s/(.*?)\.bz2/$1/;
+ } else {
+ uscan_warn("Please install bzip2.\n");
+ return $self->status(1);
+ }
+ } elsif ($suffix_gz eq '.lzma') {
+ if (-x '/usr/bin/unlzma') {
+ uscan_exec('/usr/bin/unlzma', "--keep",
+ "$self->{config}->{destdir}/$sigfile_base");
+ $sigfile_base =~ s/(.*?)\.lzma/$1/;
+ } else {
+ uscan_warn "Please install xz-utils or lzma.";
+ return $self->status(1);
+ }
+ } elsif ($suffix_gz =~ /^zstd?$/) {
+ if (-x '/usr/bin/unzstd') {
+ uscan_exec('/usr/bin/unzstd', "--keep",
+ "$self->{config}->{destdir}/$sigfile_base");
+ $sigfile_base =~ s/(.*?)\.zst/$1/;
+ } else {
+ uscan_warn("Please install zstd.\n");
+ return $self->status(1);
+ }
+ } else {
+ uscan_die "Unknown type file to decompress: $sigfile_base";
+ }
+ }
+ }
+
+ # 6.2 download signature
+ my $pgpsig_url;
+ my $suffix_sig;
+ if (($self->pgpmode eq 'default' or $self->pgpmode eq 'auto')
+ and $self->shared->{signature} == 1) {
+ uscan_verbose
+"Start checking for common possible upstream OpenPGP signature files";
+ foreach $suffix_sig (qw(asc gpg pgp sig sign)) {
+ my $sigrequest = HTTP::Request->new(
+ 'HEAD' => "$self->{upstream_url}.$suffix_sig");
+ my $sigresponse
+ = $self->downloader->user_agent->request($sigrequest);
+ if ($sigresponse->is_success()) {
+ if ($self->pgpmode eq 'default') {
+ uscan_warn "Possible OpenPGP signature found at:\n"
+ . " $self->{upstream_url}.$suffix_sig\n"
+ . " * Add opts=pgpsigurlmangle=s/\$/.$suffix_sig/ or "
+ . "opts=pgpmode=auto to debian/watch\n"
+ . " * Add debian/upstream/signing-key.asc.\n"
+ . " See uscan(1) for more details";
+ $self->pgpmode('none');
+ } else { # auto
+ $self->pgpmode('mangle');
+ $self->pgpsigurlmangle(['s/$/.' . $suffix_sig . '/',]);
+ }
+ last;
+ }
+ }
+ uscan_verbose
+ "End checking for common possible upstream OpenPGP signature files";
+ $self->signature_available(0);
+ }
+ if ($self->pgpmode eq 'mangle') {
+ $pgpsig_url = $self->upstream_url;
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'pgpsigurlmangle:', \@{ $self->pgpsigurlmangle },
+ \$pgpsig_url
+ )
+ ) {
+ return $self->status(1);
+ }
+ if (!$suffix_sig) {
+ $suffix_sig = $pgpsig_url;
+ $suffix_sig =~ s/^.*\.//;
+ if ($suffix_sig and $suffix_sig !~ m/^[a-zA-Z]+$/)
+ { # strange suffix
+ $suffix_sig = "pgp";
+ }
+ uscan_debug "Add $suffix_sig suffix based on $pgpsig_url.";
+ }
+ $sigfile = "$sigfile_base.$suffix_sig";
+ if ($self->shared->{signature} == 1) {
+ uscan_verbose "Downloading OpenPGP signature from:\n"
+ . " $pgpsig_url (pgpsigurlmangled)\n as $sigfile";
+ $self->signature_available(
+ $self->downloader->download(
+ $pgpsig_url, "$self->{config}->{destdir}/$sigfile",
+ $self, $self->parse_result->{base},
+ $self->pkg_dir, $self->pkg,
+ $self->mode
+ ));
+ } else { # -1, 0
+ uscan_verbose "Not downloading OpenPGP signature from:\n"
+ . " $pgpsig_url (pgpsigurlmangled)\n as $sigfile";
+ $self->signature_available(
+ (-e "$self->{config}->{destdir}/$sigfile") ? 1 : 0);
+ }
+ } elsif ($self->pgpmode eq 'previous') {
+ $pgpsig_url = $self->upstream_url;
+ $sigfile = $self->newfile_base;
+ if ($self->shared->{signature} == 1) {
+ uscan_verbose "Downloading OpenPGP signature from:\n"
+ . " $pgpsig_url (pgpmode=previous)\n as $sigfile";
+ $self->signature_available(
+ $self->downloader->download(
+ $pgpsig_url, "$self->{config}->{destdir}/$sigfile",
+ $self, $self->parse_result->{base},
+ $self->pkg_dir, $self->pkg,
+ $self->mode
+ ));
+ } else { # -1, 0
+ uscan_verbose "Not downloading OpenPGP signature from:\n"
+ . " $pgpsig_url (pgpmode=previous)\n as $sigfile";
+ $self->signature_available(
+ (-e "$self->{config}->{destdir}/$sigfile") ? 1 : 0);
+ }
+ $download_available = $self->shared->{previous_download_available};
+ $self->{newfile_base} = $self->shared->{previous_newfile_base};
+ $sigfile_base = $self->shared->{previous_sigfile_base};
+ uscan_verbose
+ "Use $self->{newfile_base} as upstream package (pgpmode=previous)";
+ }
+ $self->sigfile("$self->{config}->{destdir}/$sigfile") if ($sigfile);
+
+ # 6.3 verify signature
+ #
+ # 6.3.1 pgpmode
+ if ($self->pgpmode eq 'mangle' or $self->pgpmode eq 'previous') {
+ if ($self->shared->{signature} == -1) {
+ uscan_verbose("SKIP Checking OpenPGP signature (by request).\n");
+ } elsif (!$self->keyring) {
+ uscan_die("FAIL Checking OpenPGP signature (no keyring).\n");
+ } elsif ($download_available == 0) {
+ uscan_warn
+"FAIL Checking OpenPGP signature (no upstream tarball downloaded).";
+ return $self->status(1);
+ } elsif ($self->signature_available == 0) {
+ uscan_die(
+"FAIL Checking OpenPGP signature (no signature file downloaded).\n"
+ );
+ } else {
+ if ($self->shared->{signature} == 0) {
+ uscan_verbose "Use the existing file: $sigfile";
+ }
+ $self->keyring->verifyv(
+ "$self->{config}->{destdir}/$sigfile",
+ "$self->{config}->{destdir}/$sigfile_base"
+ );
+ }
+ $self->shared->{previous_newfile_base} = undef;
+ $self->shared->{previous_sigfile_base} = undef;
+ $self->shared->{previous_newversion} = undef;
+ $self->shared->{previous_download_available} = undef;
+ } elsif ($self->pgpmode eq 'none' or $self->pgpmode eq 'default') {
+ uscan_verbose "Missing OpenPGP signature.";
+ $self->shared->{previous_newfile_base} = undef;
+ $self->shared->{previous_sigfile_base} = undef;
+ $self->shared->{previous_newversion} = undef;
+ $self->shared->{previous_download_available} = undef;
+ } elsif ($self->pgpmode eq 'next') {
+ uscan_verbose
+ "Defer checking OpenPGP signature to the next watch line";
+ $self->shared->{previous_newfile_base} = $self->newfile_base;
+ $self->shared->{previous_sigfile_base} = $sigfile_base;
+ $self->shared->{previous_newversion}
+ = $self->search_result->{newversion};
+ $self->shared->{previous_download_available} = $download_available;
+ uscan_verbose "previous_newfile_base = $self->{newfile_base}";
+ uscan_verbose "previous_sigfile_base = $sigfile_base";
+ uscan_verbose
+ "previous_newversion = $self->{search_result}->{newversion}";
+ uscan_verbose "previous_download_available = $download_available";
+ } elsif ($self->pgpmode eq 'self') {
+ $self->shared->{previous_newfile_base} = undef;
+ $self->shared->{previous_sigfile_base} = undef;
+ $self->shared->{previous_newversion} = undef;
+ $self->shared->{previous_download_available} = undef;
+ } elsif ($self->pgpmode eq 'auto') {
+ uscan_verbose "Don't check OpenPGP signature";
+ } elsif ($self->pgpmode eq 'gittag') {
+ if ($skip_git_vrfy) {
+ uscan_warn "File already downloaded, skipping gpg verification";
+ } elsif (!$self->keyring) {
+ uscan_warn "No keyring file, skipping gpg verification";
+ return $self->status(1);
+ } else {
+ my ($gitrepo, $gitref) = split /[[:space:]]+/, $self->upstream_url;
+ $self->keyring->verify_git(
+ "$self->{downloader}->{destdir}/"
+ . $self->pkg
+ . "-temporary.$$.git",
+ $gitref, $self->downloader->git_upstream
+ );
+ }
+ } else {
+ uscan_warn "strange ... unknown pgpmode = $self->{pgpmode}";
+ return $self->status(1);
+ }
+ my $mangled_newversion = $self->search_result->{newversion};
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'oversionmangle:', \@{ $self->oversionmangle },
+ \$mangled_newversion
+ )
+ ) {
+ return $self->status(1);
+ }
+
+ if (!$self->shared->{common_mangled_newversion}) {
+
+ # $mangled_newversion = version used for the new orig.tar.gz (a.k.a oversion)
+ uscan_verbose
+"New orig.tar.* tarball version (oversionmangled): $mangled_newversion";
+
+ # MUT package always use the same $common_mangled_newversion
+ # MUT disables repacksuffix so it is safe to have this before mk-origtargz
+ $self->shared->{common_mangled_newversion} = $mangled_newversion;
+ }
+ if ($self->pgpmode eq 'next') {
+ uscan_verbose "Read the next watch line (pgpmode=next)";
+ return 0;
+ }
+ if ($self->safe) {
+ uscan_verbose "SKIP generation of orig.tar.* "
+ . "and running of script/uupdate (--safe)";
+ return 0;
+ }
+ if ($download_available == 0) {
+ uscan_warn "No upstream tarball downloaded."
+ . " No further processing with mk_origtargz ...";
+ return $self->status(1);
+ }
+ if ($download_available == -1) {
+ uscan_warn "No upstream tarball unpacked from self signature file."
+ . " No further processing with mk_origtargz ...";
+ return $self->status(1);
+ }
+ if ($self->signature_available == 1 and $self->decompress) {
+ $self->signature_available(2);
+ }
+ $self->search_result->{sigfile} = $sigfile;
+ $self->must_download(1);
+ return $self->status;
+}
+
+# VII - mkorigtargz
+
+=head3 mkorigtargz()
+
+Call L<mk_origtargz> to build source tarball.
+
+=cut
+
+sub mkorigtargz {
+ my ($self) = @_;
+ uscan_debug "line: mkorigtargz()";
+ return 0 unless ($self->must_download);
+ my $mk_origtargz_out;
+ my $path = "$self->{config}->{destdir}/$self->{newfile_base}";
+ my $target = $self->newfile_base;
+ unless ($self->symlink eq "no" or $self->symlink eq "0") {
+ require Devscripts::MkOrigtargz;
+ if ($Devscripts::MkOrigtargz::found_comp) {
+ uscan_verbose
+ "Forcing compression to $Devscripts::MkOrigtargz::found_comp";
+ $self->repack(1);
+ }
+ @ARGV = ();
+ push @ARGV, "--package", $self->pkg;
+ push @ARGV, "--version", $self->shared->{common_mangled_newversion};
+ push @ARGV, '--repack-suffix', $self->repacksuffix
+ if $self->repacksuffix;
+ push @ARGV, "--rename" if $self->symlink eq "rename";
+ push @ARGV, "--copy" if $self->symlink eq "copy";
+ push @ARGV, "--signature", $self->signature_available
+ if ($self->signature_available != 0);
+ push @ARGV, "--signature-file",
+ "$self->{config}->{destdir}/$self->{search_result}->{sigfile}"
+ if ($self->signature_available != 0);
+ push @ARGV, "--repack" if $self->repack;
+ push @ARGV, "--force-repack" if $self->force_repack;
+ push @ARGV, "--component", $self->component
+ if $self->component;
+ push @ARGV, "--compression",
+ $Devscripts::MkOrigtargz::found_comp || $self->compression;
+ push @ARGV, "--directory", $self->config->destdir;
+ push @ARGV, "--copyright-file", "debian/copyright"
+ if ($self->config->exclusion && -e "debian/copyright");
+ push @ARGV, "--copyright-file", $self->config->copyright_file
+ if ($self->config->exclusion && $self->config->copyright_file);
+ push @ARGV, "--unzipopt", $self->unzipopt
+ if $self->unzipopt;
+ push @ARGV, $path;
+ my $tmp = $Devscripts::Output::die_on_error;
+
+ uscan_verbose "Launch mk-origtargz with options:\n "
+ . join(" ", @ARGV);
+ my $mk = Devscripts::MkOrigtargz->new;
+ $mk->do;
+ uscan_die "mk-origtargz failed" if ($mk->status);
+
+ $path = $mk->destfile_nice;
+ $target = basename($path);
+ $self->shared->{common_mangled_newversion} = $1
+ if $target =~ m/[^_]+_(.+)\.orig(?:-.+)?\.tar\.(?:gz|bz2|lzma|xz)$/;
+ uscan_verbose "New orig.tar.* tarball version (after mk-origtargz): "
+ . "$self->{shared}->{common_mangled_newversion}";
+ }
+ push @{ $self->shared->{origtars} }, $target;
+
+ if ($self->config->log) {
+
+ # Check pkg-ver.tar.gz and pkg_ver.orig.tar.gz
+ if (!$self->shared->{uscanlog}) {
+ $self->shared->{uscanlog}
+ = "$self->{config}->{destdir}/$self->{pkg}_$self->{shared}->{common_mangled_newversion}.uscan.log";
+ if (-e "$self->{shared}->{uscanlog}.old") {
+ unlink "$self->{shared}->{uscanlog}.old"
+ or uscan_die "Can\'t remove old backup log "
+ . "$self->{shared}->{uscanlog}.old: $!";
+ uscan_warn "Old backup uscan log found. "
+ . "Remove: $self->{shared}->{uscanlog}.old";
+ }
+ if (-e $self->shared->uscanlog) {
+ move($self->shared->uscanlog,
+ "$self->{shared}->{uscanlog}.old");
+ uscan_warn "Old uscan log found. "
+ . "Moved to: $self->{shared}->{uscanlog}.old";
+ }
+ open(USCANLOG, ">> $self->{shared}->{uscanlog}")
+ or uscan_die "$progname: could not open "
+ . "$self->{shared}->{uscanlog} for append: $!";
+ print USCANLOG "# uscan log\n";
+ } else {
+ open(USCANLOG, ">> $self->{shared}->{uscanlog}")
+ or uscan_die "$progname: could not open "
+ . "$self->{shared}->{uscanlog} for append: $!";
+ }
+ if ($self->symlink ne "rename") {
+ my $umd5sum = Digest::MD5->new;
+ my $omd5sum = Digest::MD5->new;
+ open(my $ufh, '<',
+ "$self->{config}->{destdir}/$self->{newfile_base}")
+ or uscan_die "Can't open '"
+ . "$self->{config}->{destdir}/$self->{newfile_base}" . "': $!";
+ open(my $ofh, '<', "$self->{config}->{destdir}/${target}")
+ or uscan_die
+ "Can't open '$self->{config}->{destdir}/${target}': $!";
+ $umd5sum->addfile($ufh);
+ $omd5sum->addfile($ofh);
+ close($ufh);
+ close($ofh);
+ my $umd5hex = $umd5sum->hexdigest;
+ my $omd5hex = $omd5sum->hexdigest;
+
+ if ($umd5hex eq $omd5hex) {
+ print USCANLOG
+ "# == $self->{newfile_base}\t-->\t${target}\t(same)\n";
+ } else {
+ print USCANLOG
+ "# !! $self->{newfile_base}\t-->\t${target}\t(changed)\n";
+ }
+ print USCANLOG "$umd5hex $self->{newfile_base}\n";
+ print USCANLOG "$omd5hex ${target}\n";
+ }
+ close USCANLOG
+ or uscan_die
+ "$progname: could not close $self->{shared}->{uscanlog} $!";
+ }
+
+ dehs_verbose "$mk_origtargz_out\n" if $mk_origtargz_out;
+ if ($self->component) {
+ push @{ $dehs_tags->{"component-target"} }, $target;
+ push @{ $dehs_tags->{"component-target-path"} }, $path;
+ } else {
+ $dehs_tags->{target} = $target;
+ $dehs_tags->{'target-path'} = $path;
+ }
+
+#######################################################################
+ # code 3.10: call uupdate
+#######################################################################
+ # Do whatever the user wishes to do
+ if ($self->parse_result->{action}) {
+ my @cmd = shellwords($self->parse_result->{action});
+
+ # script invocation changed in $watch_version=4
+ if ($self->watch_version > 3) {
+ if ($cmd[0] eq "uupdate") {
+ push @cmd, "-f";
+ if ($verbose) {
+ push @cmd, "--verbose";
+ }
+ if ($self->badversion) {
+ push @cmd, "-b";
+ }
+ }
+ push @cmd, "--upstream-version",
+ $self->shared->{common_mangled_newversion};
+ if (abs_path($self->{config}->{destdir}) ne abs_path("..")) {
+ foreach my $origtar (@{ $self->shared->{origtars} }) {
+ copy(catfile($self->{config}->{destdir}, $origtar),
+ catfile("..", $origtar));
+ }
+ }
+ } elsif ($self->watch_version > 1) {
+
+ # Any symlink requests are already handled by uscan
+ if ($cmd[0] eq "uupdate") {
+ push @cmd, "--no-symlink";
+ if ($verbose) {
+ push @cmd, "--verbose";
+ }
+ if ($self->badversion) {
+ push @cmd, "-b";
+ }
+ }
+ push @cmd, "--upstream-version",
+ $self->shared->{common_mangled_newversion}, $path;
+ } else {
+ push @cmd, $path, $self->shared->{common_mangled_newversion};
+ }
+ my $actioncmd = join(" ", @cmd);
+ my $actioncmdmsg;
+ spawn(exec => \@cmd, wait_child => 1, to_string => \$actioncmdmsg);
+ local $, = ' ';
+ dehs_verbose "Executing user specified script:\n @cmd\n"
+ . $actioncmdmsg;
+ }
+ $self->destfile($path);
+
+ return 0;
+}
+
+# VIII - clean
+
+=head3 clean()
+
+Clean temporary files using either:
+
+=over
+
+=item L<Devscripts::Uscan::http>::http_clean()
+=item L<Devscripts::Uscan::ftp>::ftp_clean()
+=item L<Devscripts::Uscan::git>::git_clean()
+=item L<Devscripts::Uscan::svn>::svn_clean()
+
+=back
+
+=cut
+
+sub clean {
+ my ($self) = @_;
+ $self->_do('clean');
+}
+
+# Internal sub to call sub modules (git, http,...)
+sub _do {
+ my ($self, $sub) = @_;
+ my $mode = $self->mode;
+ $mode =~ s/git-dumb/git/;
+ $sub = $mode . "_$sub";
+ with("Devscripts::Uscan::$mode") unless ($self->can($sub));
+ if ($@) {
+ uscan_warn "Unknown '$mode' mode set in $self->{watchfile} ($@)";
+ $self->status(1);
+ }
+ return $self->$sub;
+}
+
+1;
+
+=head1 SEE ALSO
+
+L<uscan>, L<Devscripts::Uscan::WatchFile>, L<Devscripts::Uscan::Config>
+
+=head1 AUTHOR
+
+B<uscan> was originally written by Christoph Lameter
+E<lt>clameter@debian.orgE<gt> (I believe), modified by Julian Gilbey
+E<lt>jdg@debian.orgE<gt>. HTTP support was added by Piotr Roszatycki
+E<lt>dexter@debian.orgE<gt>. B<uscan> was rewritten in Perl by Julian Gilbey.
+Xavier Guimard E<lt>yadd@debian.orgE<gt> rewrote uscan in object
+oriented Perl.
+
+=head1 COPYRIGHT AND LICENSE
+
+Copyright 2002-2006 by Julian Gilbey <jdg@debian.org>,
+2018 by Xavier Guimard <yadd@debian.org>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+
+=cut
diff --git a/lib/Devscripts/Uscan/_vcs.pm b/lib/Devscripts/Uscan/_vcs.pm
new file mode 100644
index 0000000..b66f560
--- /dev/null
+++ b/lib/Devscripts/Uscan/_vcs.pm
@@ -0,0 +1,93 @@
+# Common sub shared between git and svn
+package Devscripts::Uscan::_vcs;
+
+use strict;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Exporter 'import';
+use File::Basename;
+
+our @EXPORT = ('get_refs');
+
+our $progname = basename($0);
+
+sub _vcs_newfile_base {
+ my ($self) = @_;
+ my $zsuffix = get_suffix($self->compression);
+ my $newfile_base
+ = "$self->{pkg}-$self->{search_result}->{newversion}.tar.$zsuffix";
+ return $newfile_base;
+}
+
+sub get_refs {
+ my ($self, $command, $ref_pattern, $package) = @_;
+ my @command = @$command;
+ my ($newfile, $newversion);
+ {
+ local $, = ' ';
+ uscan_verbose "Execute: @command";
+ }
+ open(REFS, "-|", @command)
+ || uscan_die "$progname: you must have the $package package installed";
+ my @refs;
+ my $ref;
+ my $version;
+ while (<REFS>) {
+ chomp;
+ uscan_debug "$_";
+ if ($_ =~ $ref_pattern) {
+ $ref = $1;
+ foreach my $_pattern (@{ $self->patterns }) {
+ $version = join(".",
+ map { $_ if defined($_) } $ref =~ m&^$_pattern$&);
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'uversionmangle:', \@{ $self->uversionmangle },
+ \$version
+ )
+ ) {
+ return undef;
+ }
+ push @refs, [$version, $ref];
+ }
+ }
+ }
+ if (@refs) {
+ @refs = Devscripts::Versort::upstream_versort(@refs);
+ my $msg = "Found the following matching refs:\n";
+ foreach my $ref (@refs) {
+ $msg .= " $$ref[1] ($$ref[0])\n";
+ }
+ uscan_verbose "$msg";
+ if ($self->shared->{download_version}
+ and not $self->versionmode eq 'ignore') {
+
+# extract ones which has $version in the above loop matched with $download_version
+ my @vrefs
+ = grep { $$_[0] eq $self->shared->{download_version} } @refs;
+ if (@vrefs) {
+ ($newversion, $newfile) = @{ $vrefs[0] };
+ } else {
+ uscan_warn
+ "$progname warning: In $self->{watchfile} no matching"
+ . " refs for version "
+ . $self->shared->{download_version}
+ . " in watch line\n "
+ . $self->{line};
+ return undef;
+ }
+
+ } else {
+ ($newversion, $newfile) = @{ $refs[0] };
+ }
+ } else {
+ uscan_warn "$progname warning: In $self->{watchfile},\n"
+ . " no matching refs for watch line\n"
+ . " $self->{line}";
+ return undef;
+ }
+ return ($newversion, $newfile);
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/_xtp.pm b/lib/Devscripts/Uscan/_xtp.pm
new file mode 100644
index 0000000..092cb52
--- /dev/null
+++ b/lib/Devscripts/Uscan/_xtp.pm
@@ -0,0 +1,90 @@
+# Common sub shared between http and ftp
+package Devscripts::Uscan::_xtp;
+
+use strict;
+use File::Basename;
+use Exporter 'import';
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+
+our @EXPORT = ('partial_version');
+
+sub _xtp_newfile_base {
+ my ($self) = @_;
+ my $newfile_base;
+ if (@{ $self->filenamemangle }) {
+
+ # HTTP or FTP site (with filenamemangle)
+ if ($self->versionless) {
+ $newfile_base = $self->upstream_url;
+ } else {
+ $newfile_base = $self->search_result->{newfile};
+ }
+ my $cmp = $newfile_base;
+ uscan_verbose "Matching target for filenamemangle: $newfile_base";
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'filenamemangle:', \@{ $self->filenamemangle },
+ \$newfile_base
+ )
+ ) {
+ $self->status(1);
+ return undef;
+ }
+ if ($newfile_base =~ m/^(?:https?|ftp):/) {
+ $newfile_base = basename($newfile_base);
+ }
+ if ($cmp eq $newfile_base) {
+ uscan_die "filenamemangle failed for $cmp";
+ }
+ unless ($self->search_result->{newversion}) {
+
+ # uversionmanglesd version is '', make best effort to set it
+ $newfile_base
+ =~ m/^.+?[-_]?(\d[\-+\.:\~\da-zA-Z]*)(?:\.tar\.(gz|bz2|xz|zstd?)|\.zip)$/i;
+ $self->search_result->{newversion} = $1;
+ unless ($self->search_result->{newversion}) {
+ uscan_warn
+"Fix filenamemangle to produce a filename with the correct version";
+ $self->status(1);
+ return undef;
+ }
+ uscan_verbose
+"Newest upstream tarball version from the filenamemangled filename: $self->{search_result}->{newversion}";
+ }
+ } else {
+ # HTTP or FTP site (without filenamemangle)
+ $newfile_base = basename($self->search_result->{newfile});
+ if ($self->mode eq 'http') {
+
+ # Remove HTTP header trash
+ $newfile_base =~ s/[\?#].*$//; # PiPy
+ # just in case this leaves us with nothing
+ if ($newfile_base eq '') {
+ uscan_warn
+"No good upstream filename found after removing tailing ?... and #....\n Use filenamemangle to fix this.";
+ $self->status(1);
+ return undef;
+ }
+ }
+ }
+ return $newfile_base;
+}
+
+sub partial_version {
+ my ($download_version) = @_;
+ my ($d1, $d2, $d3);
+ if (defined $download_version) {
+ uscan_verbose "download version requested: $download_version";
+ if ($download_version
+ =~ m/^([-~\+\w]+)(\.[-~\+\w]+)?(\.[-~\+\w]+)?(\.[-~\+\w]+)?$/) {
+ $d1 = "$1" if defined $1;
+ $d2 = "$1$2" if defined $2;
+ $d3 = "$1$2$3" if defined $3;
+ }
+ }
+ return ($d1, $d2, $d3);
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/ftp.pm b/lib/Devscripts/Uscan/ftp.pm
new file mode 100644
index 0000000..5a24d8a
--- /dev/null
+++ b/lib/Devscripts/Uscan/ftp.pm
@@ -0,0 +1,280 @@
+package Devscripts::Uscan::ftp;
+
+use strict;
+use Cwd qw/abs_path/;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Devscripts::Uscan::_xtp;
+use Moo::Role;
+
+#######################################################################
+# search $newfile $newversion (ftp mode)
+#######################################################################
+sub ftp_search {
+ my ($self) = @_;
+
+ # FTP site
+ uscan_verbose "Requesting URL:\n $self->{parse_result}->{base}";
+ my $request = HTTP::Request->new('GET', $self->parse_result->{base});
+ my $response = $self->downloader->user_agent->request($request);
+ if (!$response->is_success) {
+ uscan_warn
+"In watch file $self->{watchfile}, reading FTP directory\n $self->{parse_result}->{base} failed: "
+ . $response->status_line . "";
+ return undef;
+ }
+
+ my $content = $response->content;
+ uscan_extra_debug
+ "received content:\n$content\n[End of received content] by FTP";
+
+ # FTP directory listings either look like:
+ # info info ... info filename [ -> linkname]
+ # or they're HTMLised (if they've been through an HTTP proxy)
+ # so we may have to look for <a href="filename"> type patterns
+ uscan_verbose "matching pattern $self->{parse_result}->{pattern}";
+ my (@files);
+
+ # We separate out HTMLised listings from standard listings, so
+ # that we can target our search correctly
+ if ($content =~ /<\s*a\s+[^>]*href/i) {
+ uscan_verbose "HTMLized FTP listing by the HTTP proxy";
+ while ($content
+ =~ m/(?:<\s*a\s+[^>]*href\s*=\s*\")((?-i)$self->{parse_result}->{pattern})\"/gi
+ ) {
+ my $file = fix_href($1);
+ my $mangled_version
+ = join(".", $file =~ m/^$self->{parse_result}->{pattern}$/);
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'uversionmangle:', \@{ $self->uversionmangle },
+ \$mangled_version
+ )
+ ) {
+ return undef;
+ }
+ my $match = '';
+ if (defined $self->shared->{download_version}
+ and not $self->versionmode eq 'ignore') {
+ if ($mangled_version eq $self->shared->{download_version}) {
+ $match = "matched with the download version";
+ }
+ }
+ my $priority = $mangled_version . '-' . get_priority($file);
+ push @files, [$priority, $mangled_version, $file, $match];
+ }
+ } else {
+ uscan_verbose "Standard FTP listing.";
+
+ # they all look like:
+ # info info ... info filename [ -> linkname]
+ for my $ln (split(/\n/, $content)) {
+ $ln =~ s/^d.*$//; # FTP listing of directory, '' skipped
+ $ln =~ s/\s+->\s+\S+$//; # FTP listing for link destination
+ $ln =~ s/^.*\s(\S+)$/$1/; # filename only
+ if ($ln and $ln =~ m/^($self->{parse_result}->{filepattern})$/) {
+ my $file = $1;
+ my $mangled_version = join(".",
+ $file =~ m/^$self->{parse_result}->{filepattern}$/);
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'uversionmangle:', \@{ $self->uversionmangle },
+ \$mangled_version
+ )
+ ) {
+ return undef;
+ }
+ my $match = '';
+ if (defined $self->shared->{download_version}) {
+ if ($mangled_version eq $self->shared->{download_version})
+ {
+ $match = "matched with the download version";
+ }
+ }
+ my $priority = $mangled_version . '-' . get_priority($file);
+ push @files, [$priority, $mangled_version, $file, $match];
+ }
+ }
+ }
+ if (@files) {
+ @files = Devscripts::Versort::versort(@files);
+ my $msg
+ = "Found the following matching files on the web page (newest first):\n";
+ foreach my $file (@files) {
+ $msg .= " $$file[2] ($$file[1]) index=$$file[0] $$file[3]\n";
+ }
+ uscan_verbose $msg;
+ }
+ my ($newversion, $newfile);
+ if (defined $self->shared->{download_version}) {
+
+ # extract ones which has $match in the above loop defined
+ my @vfiles = grep { $$_[3] } @files;
+ if (@vfiles) {
+ (undef, $newversion, $newfile, undef) = @{ $vfiles[0] };
+ } else {
+ uscan_warn
+"In $self->{watchfile} no matching files for version $self->{shared}->{download_version}"
+ . " in watch line\n $self->{line}";
+ return undef;
+ }
+ } else {
+ if (@files) {
+ (undef, $newversion, $newfile, undef) = @{ $files[0] };
+ } else {
+ uscan_warn
+"In $self->{watchfile} no matching files for watch line\n $self->{line}";
+ return undef;
+ }
+ }
+ return ($newversion, $newfile);
+}
+
+sub ftp_upstream_url {
+ my ($self) = @_;
+ return $self->parse_result->{base} . $self->search_result->{newfile};
+}
+
+*ftp_newfile_base = \&Devscripts::Uscan::_xtp::_xtp_newfile_base;
+
+sub ftp_newdir {
+ my ($line, $site, $dir, $pattern, $dirversionmangle, $watchfile,
+ $lineptr, $download_version)
+ = @_;
+ my $downloader = $line->downloader;
+
+ my ($request, $response, $newdir);
+ my ($download_version_short1, $download_version_short2,
+ $download_version_short3)
+ = partial_version($download_version);
+ my $base = $site . $dir;
+ $request = HTTP::Request->new('GET', $base);
+ $response = $downloader->user_agent->request($request);
+ if (!$response->is_success) {
+ uscan_warn
+ "In watch file $watchfile, reading webpage\n $base failed: "
+ . $response->status_line;
+ return '';
+ }
+
+ my $content = $response->content;
+ uscan_extra_debug
+ "received content:\n$content\n[End of received content] by FTP";
+
+ # FTP directory listings either look like:
+ # info info ... info filename [ -> linkname]
+ # or they're HTMLised (if they've been through an HTTP proxy)
+ # so we may have to look for <a href="filename"> type patterns
+ uscan_verbose "matching pattern $pattern";
+ my (@dirs);
+ my $match = '';
+
+ # We separate out HTMLised listings from standard listings, so
+ # that we can target our search correctly
+ if ($content =~ /<\s*a\s+[^>]*href/i) {
+ uscan_verbose "HTMLized FTP listing by the HTTP proxy";
+ while (
+ $content =~ m/(?:<\s*a\s+[^>]*href\s*=\s*\")((?-i)$pattern)\"/gi) {
+ my $dir = $1;
+ uscan_verbose "Matching target for dirversionmangle: $dir";
+ my $mangled_version = join(".", $dir =~ m/^$pattern$/);
+ if (
+ mangle(
+ $watchfile, $lineptr,
+ 'dirversionmangle:', \@{$dirversionmangle},
+ \$mangled_version
+ )
+ ) {
+ return 1;
+ }
+ $match = '';
+ if (defined $download_version
+ and $mangled_version eq $download_version) {
+ $match = "matched with the download version";
+ }
+ if (defined $download_version_short3
+ and $mangled_version eq $download_version_short3) {
+ $match = "matched with the download version (partial 3)";
+ }
+ if (defined $download_version_short2
+ and $mangled_version eq $download_version_short2) {
+ $match = "matched with the download version (partial 2)";
+ }
+ if (defined $download_version_short1
+ and $mangled_version eq $download_version_short1) {
+ $match = "matched with the download version (partial 1)";
+ }
+ push @dirs, [$mangled_version, $dir, $match];
+ }
+ } else {
+ # they all look like:
+ # info info ... info filename [ -> linkname]
+ uscan_verbose "Standard FTP listing.";
+ foreach my $ln (split(/\n/, $content)) {
+ $ln =~ s/^-.*$//; # FTP listing of file, '' skipped
+ $ln =~ s/\s+->\s+\S+$//; # FTP listing for link destination
+ $ln =~ s/^.*\s(\S+)$/$1/; # filename only
+ if ($ln =~ m/^($pattern)(\s+->\s+\S+)?$/) {
+ my $dir = $1;
+ uscan_verbose "Matching target for dirversionmangle: $dir";
+ my $mangled_version = join(".", $dir =~ m/^$pattern$/);
+ if (
+ mangle(
+ $watchfile, $lineptr,
+ 'dirversionmangle:', \@{$dirversionmangle},
+ \$mangled_version
+ )
+ ) {
+ return 1;
+ }
+ $match = '';
+ if (defined $download_version
+ and $mangled_version eq $download_version) {
+ $match = "matched with the download version";
+ }
+ if (defined $download_version_short3
+ and $mangled_version eq $download_version_short3) {
+ $match = "matched with the download version (partial 3)";
+ }
+ if (defined $download_version_short2
+ and $mangled_version eq $download_version_short2) {
+ $match = "matched with the download version (partial 2)";
+ }
+ if (defined $download_version_short1
+ and $mangled_version eq $download_version_short1) {
+ $match = "matched with the download version (partial 1)";
+ }
+ push @dirs, [$mangled_version, $dir, $match];
+ }
+ }
+ }
+
+ # extract ones which has $match in the above loop defined
+ my @vdirs = grep { $$_[2] } @dirs;
+ if (@vdirs) {
+ @vdirs = Devscripts::Versort::upstream_versort(@vdirs);
+ $newdir = $vdirs[0][1];
+ }
+ if (@dirs) {
+ @dirs = Devscripts::Versort::upstream_versort(@dirs);
+ my $msg
+ = "Found the following matching FTP directories (newest first):\n";
+ foreach my $dir (@dirs) {
+ $msg .= " $$dir[1] ($$dir[0]) $$dir[2]\n";
+ }
+ uscan_verbose $msg;
+ $newdir //= $dirs[0][1];
+ } else {
+ uscan_warn
+ "In $watchfile no matching dirs for pattern\n $base$pattern";
+ $newdir = '';
+ }
+ return $newdir;
+}
+
+# Nothing to clean here
+sub ftp_clean { 0 }
+
+1;
diff --git a/lib/Devscripts/Uscan/git.pm b/lib/Devscripts/Uscan/git.pm
new file mode 100644
index 0000000..843e7c0
--- /dev/null
+++ b/lib/Devscripts/Uscan/git.pm
@@ -0,0 +1,172 @@
+package Devscripts::Uscan::git;
+
+use strict;
+use Cwd qw/abs_path/;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Devscripts::Uscan::_vcs;
+use Dpkg::IPC;
+use File::Path 'remove_tree';
+use Moo::Role;
+
+######################################################
+# search $newfile $newversion (git mode/versionless)
+######################################################
+sub git_search {
+ my ($self) = @_;
+ my ($newfile, $newversion);
+ if ($self->versionless) {
+ $newfile = $self->parse_result->{filepattern}; # HEAD or heads/<branch>
+ if ($self->pretty eq 'describe') {
+ $self->gitmode('full');
+ }
+ if ( $self->gitmode eq 'shallow'
+ and $self->parse_result->{filepattern} eq 'HEAD') {
+ uscan_exec(
+ 'git',
+ 'clone',
+ '--quiet',
+ '--bare',
+ '--depth=1',
+ $self->parse_result->{base},
+ "$self->{downloader}->{destdir}/" . $self->gitrepo_dir
+ );
+ $self->downloader->gitrepo_state(1);
+ } elsif ($self->gitmode eq 'shallow'
+ and $self->parse_result->{filepattern} ne 'HEAD')
+ { # heads/<branch>
+ $newfile =~ s&^heads/&&; # Set to <branch>
+ uscan_exec(
+ 'git',
+ 'clone',
+ '--quiet',
+ '--bare',
+ '--depth=1',
+ '-b',
+ "$newfile",
+ $self->parse_result->{base},
+ "$self->{downloader}->{destdir}/" . $self->gitrepo_dir
+ );
+ $self->downloader->gitrepo_state(1);
+ } else {
+ uscan_exec(
+ 'git', 'clone', '--quiet', '--bare',
+ $self->parse_result->{base},
+ "$self->{downloader}->{destdir}/" . $self->gitrepo_dir
+ );
+ $self->downloader->gitrepo_state(2);
+ }
+ if ($self->pretty eq 'describe') {
+
+ # use unannotated tags to be on safe side
+ spawn(
+ exec => [
+ 'git',
+"--git-dir=$self->{downloader}->{destdir}/$self->{gitrepo_dir}",
+ 'describe',
+ '--tags'
+ ],
+ wait_child => 1,
+ to_string => \$newversion
+ );
+ $newversion =~ s/-/./g;
+ chomp($newversion);
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'uversionmangle:', \@{ $self->uversionmangle },
+ \$newversion
+ )
+ ) {
+ return undef;
+ }
+ } else {
+ my $tmp = $ENV{TZ};
+ $ENV{TZ} = 'UTC';
+ spawn(
+ exec => [
+ 'git',
+"--git-dir=$self->{downloader}->{destdir}/$self->{gitrepo_dir}",
+ 'log',
+ '-1',
+ "--date=format-local:$self->{date}",
+ "--pretty=$self->{pretty}"
+ ],
+ wait_child => 1,
+ to_string => \$newversion
+ );
+ $ENV{TZ} = $tmp;
+ chomp($newversion);
+ }
+ }
+ ################################################
+ # search $newfile $newversion (git mode w/tag)
+ ################################################
+ elsif ($self->mode eq 'git') {
+ my @args = ('ls-remote', $self->parse_result->{base});
+ # Try to use local upstream branch if available
+ if (-d '.git') {
+ my $out;
+ eval {
+ spawn(
+ exec => ['git', 'remote', '--verbose', 'show'],
+ wait_child => 1,
+ to_string => \$out
+ );
+ };
+ # Check if git repo found in debian/watch exists in
+ # `git remote show` output
+ if ($out and $out =~ /^(\S+)\s+\Q$self->{parse_result}->{base}\E/m)
+ {
+ $self->downloader->git_upstream($1);
+ uscan_warn
+ "Using $self->{downloader}->{git_upstream} remote origin";
+ # Found, launch a "fetch" to be up to date
+ spawn(
+ exec => ['git', 'fetch', $self->downloader->git_upstream],
+ wait_child => 1
+ );
+ @args = ('show-ref');
+ }
+ }
+ ($newversion, $newfile)
+ = get_refs($self, ['git', @args], qr/^\S+\s+([^\^\{\}]+)$/, 'git');
+ return undef if !defined $newversion;
+ }
+ return ($newversion, $newfile);
+}
+
+sub git_upstream_url {
+ my ($self) = @_;
+ my $upstream_url
+ = $self->parse_result->{base} . ' ' . $self->search_result->{newfile};
+ return $upstream_url;
+}
+
+*git_newfile_base = \&Devscripts::Uscan::_vcs::_vcs_newfile_base;
+
+sub git_clean {
+ my ($self) = @_;
+
+ # If git cloned repo exists and not --debug ($verbose=2) -> remove it
+ if ( $self->downloader->gitrepo_state > 0
+ and $verbose < 2
+ and !$self->downloader->git_upstream) {
+ my $err;
+ uscan_verbose "Removing git repo ($self->{downloader}->{destdir}/"
+ . $self->gitrepo_dir . ")";
+ remove_tree "$self->{downloader}->{destdir}/" . $self->gitrepo_dir,
+ { error => \$err };
+ if (@$err) {
+ local $, = "\n\t";
+ uscan_warn "Errors during git repo clean:\n\t@$err";
+ }
+ $self->downloader->gitrepo_state(0);
+ } else {
+ uscan_debug "Keep git repo ($self->{downloader}->{destdir}/"
+ . $self->gitrepo_dir . ")";
+ }
+ return 0;
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/http.pm b/lib/Devscripts/Uscan/http.pm
new file mode 100644
index 0000000..0da9798
--- /dev/null
+++ b/lib/Devscripts/Uscan/http.pm
@@ -0,0 +1,510 @@
+package Devscripts::Uscan::http;
+
+use strict;
+use Cwd qw/abs_path/;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Devscripts::Uscan::_xtp;
+use Moo::Role;
+
+*http_newfile_base = \&Devscripts::Uscan::_xtp::_xtp_newfile_base;
+
+##################################
+# search $newversion (http mode)
+##################################
+
+#returns (\@patterns, \@base_sites, \@base_dirs)
+sub handle_redirection {
+ my ($self, $pattern, @additional_bases) = @_;
+ my @redirections = @{ $self->downloader->user_agent->get_redirections };
+ my (@patterns, @base_sites, @base_dirs);
+
+ uscan_verbose "redirections: @redirections" if @redirections;
+
+ foreach my $_redir (@redirections, @additional_bases) {
+ my $base_dir = $_redir;
+
+ $base_dir =~ s%^\w+://[^/]+/%/%;
+ $base_dir =~ s%/[^/]*(?:[#?].*)?$%/%;
+ if ($_redir =~ m%^(\w+://[^/]+)%) {
+ my $base_site = $1;
+
+ push @patterns,
+ quotemeta($base_site) . quotemeta($base_dir) . "$pattern";
+ push @base_sites, $base_site;
+ push @base_dirs, $base_dir;
+
+ # remove the filename, if any
+ my $base_dir_orig = $base_dir;
+ $base_dir =~ s%/[^/]*$%/%;
+ if ($base_dir ne $base_dir_orig) {
+ push @patterns,
+ quotemeta($base_site) . quotemeta($base_dir) . "$pattern";
+ push @base_sites, $base_site;
+ push @base_dirs, $base_dir;
+ }
+ }
+ }
+ return (\@patterns, \@base_sites, \@base_dirs);
+}
+
+sub http_search {
+ my ($self) = @_;
+
+ # $content: web page to be scraped to find the URLs to be downloaded
+ if ($self->{parse_result}->{base} =~ /^https/ and !$self->downloader->ssl)
+ {
+ uscan_die
+"you must have the liblwp-protocol-https-perl package installed\nto use https URLs";
+ }
+ uscan_verbose "Requesting URL:\n $self->{parse_result}->{base}";
+ my $request = HTTP::Request->new('GET', $self->parse_result->{base});
+ foreach my $k (keys %{ $self->downloader->headers }) {
+ if ($k =~ /^(.*?)@(.*)$/) {
+ my $baseUrl = $1;
+ my $hdr = $2;
+ if ($self->parse_result->{base} =~ m#^\Q$baseUrl\E(?:/.*)?$#) {
+ $request->header($hdr => $self->headers->{$k});
+ uscan_verbose "Set per-host custom header $hdr for "
+ . $self->parse_result->{base};
+ } else {
+ uscan_debug
+ "$self->parse_result->{base} does not start with $1";
+ }
+ } else {
+ uscan_warn "Malformed http-header: $k";
+ }
+ }
+ $request->header('Accept-Encoding' => 'gzip');
+ $request->header('Accept' => '*/*');
+ my $response = $self->downloader->user_agent->request($request);
+ if (!$response->is_success) {
+ uscan_warn
+"In watchfile $self->{watchfile}, reading webpage\n $self->{parse_result}->{base} failed: "
+ . $response->status_line;
+ return undef;
+ }
+
+ my ($patterns, $base_sites, $base_dirs)
+ = handle_redirection($self, $self->{parse_result}->{filepattern});
+ push @{ $self->patterns }, @$patterns;
+ push @{ $self->sites }, @$base_sites;
+ push @{ $self->basedirs }, @$base_dirs;
+
+ my $content = $response->decoded_content;
+ uscan_extra_debug
+ "received content:\n$content\n[End of received content] by HTTP";
+
+ my @hrefs;
+ if (!$self->searchmode or $self->searchmode eq 'html') {
+ @hrefs = $self->html_search($content, $self->patterns);
+ } elsif ($self->searchmode eq 'plain') {
+ @hrefs = $self->plain_search($content);
+ } else {
+ uscan_warn 'Unknown searchmode "' . $self->searchmode . '", skipping';
+ return undef;
+ }
+
+ if (@hrefs) {
+ @hrefs = Devscripts::Versort::versort(@hrefs);
+ my $msg
+ = "Found the following matching hrefs on the web page (newest first):\n";
+ foreach my $href (@hrefs) {
+ $msg .= " $$href[2] ($$href[1]) index=$$href[0] $$href[3]\n";
+ }
+ uscan_verbose $msg;
+ }
+ my ($newversion, $newfile);
+ if (defined $self->shared->{download_version}
+ and not $self->versionmode eq 'ignore') {
+
+ # extract ones which has $match in the above loop defined
+ my @vhrefs = grep { $$_[3] } @hrefs;
+ if (@vhrefs) {
+ (undef, $newversion, $newfile, undef) = @{ $vhrefs[0] };
+ } else {
+ uscan_warn
+"In $self->{watchfile} no matching hrefs for version $self->{shared}->{download_version}"
+ . " in watch line\n $self->{line}";
+ return undef;
+ }
+ } else {
+ if (@hrefs) {
+ (undef, $newversion, $newfile, undef) = @{ $hrefs[0] };
+ } else {
+ uscan_warn
+"In $self->{watchfile} no matching files for watch line\n $self->{line}";
+ return undef;
+ }
+ }
+ return ($newversion, $newfile);
+}
+
+#######################################################################
+# determine $upstream_url (http mode)
+#######################################################################
+# http is complicated due to absolute/relative URL issue
+sub http_upstream_url {
+ my ($self) = @_;
+ my $upstream_url;
+ my $newfile = $self->search_result->{newfile};
+ if ($newfile =~ m%^\w+://%) {
+ $upstream_url = $newfile;
+ } elsif ($newfile =~ m%^//%) {
+ $upstream_url = $self->parse_result->{site};
+ $upstream_url =~ s/^(https?:).*/$1/;
+ $upstream_url .= $newfile;
+ } elsif ($newfile =~ m%^/%) {
+
+ # absolute filename
+ # Were there any redirections? If so try using those first
+ if ($#{ $self->patterns } > 0) {
+
+ # replace $site here with the one we were redirected to
+ foreach my $index (0 .. $#{ $self->patterns }) {
+ if ("$self->{sites}->[$index]$newfile"
+ =~ m&^$self->{patterns}->[$index]$&) {
+ $upstream_url = "$self->{sites}->[$index]$newfile";
+ last;
+ }
+ }
+ if (!defined($upstream_url)) {
+ uscan_verbose
+ "Unable to determine upstream url from redirections,\n"
+ . "defaulting to using site specified in watch file";
+ $upstream_url = "$self->{sites}->[0]$newfile";
+ }
+ } else {
+ $upstream_url = "$self->{sites}->[0]$newfile";
+ }
+ } else {
+ # relative filename, we hope
+ # Were there any redirections? If so try using those first
+ if ($#{ $self->patterns } > 0) {
+
+ # replace $site here with the one we were redirected to
+ foreach my $index (0 .. $#{ $self->patterns }) {
+
+ # skip unless the basedir looks like a directory
+ next unless $self->{basedirs}->[$index] =~ m%/$%;
+ my $nf = "$self->{basedirs}->[$index]$newfile";
+ if ("$self->{sites}->[$index]$nf"
+ =~ m&^$self->{patterns}->[$index]$&) {
+ $upstream_url = "$self->{sites}->[$index]$nf";
+ last;
+ }
+ }
+ if (!defined($upstream_url)) {
+ uscan_verbose
+ "Unable to determine upstream url from redirections,\n"
+ . "defaulting to using site specified in watch file";
+ $upstream_url = "$self->{parse_result}->{urlbase}$newfile";
+ }
+ } else {
+ $upstream_url = "$self->{parse_result}->{urlbase}$newfile";
+ }
+ }
+
+ # mangle if necessary
+ $upstream_url =~ s/&amp;/&/g;
+ uscan_verbose "Matching target for downloadurlmangle: $upstream_url";
+ if (@{ $self->downloadurlmangle }) {
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'downloadurlmangle:', \@{ $self->downloadurlmangle },
+ \$upstream_url
+ )
+ ) {
+ $self->status(1);
+ return undef;
+ }
+ }
+ return $upstream_url;
+}
+
+sub http_newdir {
+ my ($https, $line, $site, $dir, $pattern, $dirversionmangle,
+ $watchfile, $lineptr, $download_version)
+ = @_;
+
+ my $downloader = $line->downloader;
+ my ($request, $response, $newdir);
+ my ($download_version_short1, $download_version_short2,
+ $download_version_short3)
+ = partial_version($download_version);
+ my $base = $site . $dir;
+
+ $pattern .= "/?";
+
+ if (defined($https) and !$downloader->ssl) {
+ uscan_die
+"$progname: you must have the liblwp-protocol-https-perl package installed\n"
+ . "to use https URLs";
+ }
+ # At least for now, set base in the line object - other methods need it
+ local $line->parse_result->{base} = $base;
+ $request = HTTP::Request->new('GET', $base);
+ $response = $downloader->user_agent->request($request);
+ if (!$response->is_success) {
+ uscan_warn
+ "In watch file $watchfile, reading webpage\n $base failed: "
+ . $response->status_line;
+ return '';
+ }
+
+ my $content = $response->content;
+ if ( $response->header('Content-Encoding')
+ and $response->header('Content-Encoding') =~ /^gzip$/i) {
+ require IO::Uncompress::Gunzip;
+ require IO::String;
+ uscan_debug "content seems gzip encoded, let's decode it";
+ my $out;
+ if (IO::Uncompress::Gunzip::gunzip(IO::String->new($content), \$out)) {
+ $content = $out;
+ } else {
+ uscan_warn 'Unable to decode remote content: '
+ . $IO::Uncompress::GunzipError;
+ return '';
+ }
+ }
+ uscan_extra_debug
+ "received content:\n$content\n[End of received content] by HTTP";
+
+ clean_content(\$content);
+
+ my ($dirpatterns, $base_sites, $base_dirs)
+ = handle_redirection($line, $pattern, $base);
+ $downloader->user_agent->clear_redirections; # we won't be needing that
+
+ my @hrefs;
+ for my $parsed (
+ html_search($line, $content, $dirpatterns, 'dirversionmangle')) {
+ my ($priority, $mangled_version, $href, $match) = @$parsed;
+ $match = '';
+ if (defined $download_version
+ and $mangled_version eq $download_version) {
+ $match = "matched with the download version";
+ }
+ if (defined $download_version_short3
+ and $mangled_version eq $download_version_short3) {
+ $match = "matched with the download version (partial 3)";
+ }
+ if (defined $download_version_short2
+ and $mangled_version eq $download_version_short2) {
+ $match = "matched with the download version (partial 2)";
+ }
+ if (defined $download_version_short1
+ and $mangled_version eq $download_version_short1) {
+ $match = "matched with the download version (partial 1)";
+ }
+ push @hrefs, [$mangled_version, $href, $match];
+ }
+
+ # extract ones which has $match in the above loop defined
+ my @vhrefs = grep { $$_[2] } @hrefs;
+ if (@vhrefs) {
+ @vhrefs = Devscripts::Versort::upstream_versort(@vhrefs);
+ $newdir = $vhrefs[0][1];
+ }
+ if (@hrefs) {
+ @hrefs = Devscripts::Versort::upstream_versort(@hrefs);
+ my $msg = "Found the following matching directories (newest first):\n";
+ foreach my $href (@hrefs) {
+ $msg .= " $$href[1] ($$href[0]) $$href[2]\n";
+ }
+ uscan_verbose $msg;
+ $newdir //= $hrefs[0][1];
+ } else {
+ uscan_warn
+"In $watchfile,\n no matching hrefs for pattern\n $site$dir$pattern";
+ return '';
+ }
+
+ # just give the final directory component
+ $newdir =~ s%/$%%;
+ $newdir =~ s%^.*/%%;
+ return ($newdir);
+}
+
+# Nothing to clean here
+sub http_clean { 0 }
+
+sub clean_content {
+ my ($content) = @_;
+
+ # We need this horrid stuff to handle href=foo type
+ # links. OK, bad HTML, but we have to handle it nonetheless.
+ # It's bug #89749.
+ $$content =~ s/href\s*=\s*(?=[^\"\'])([^\s>]+)/href="$1"/ig;
+
+ # Strip comments
+ $$content =~ s/<!-- .*?-->//sg;
+ return $content;
+}
+
+sub url_canonicalize_dots {
+ my ($base, $url) = @_;
+
+ if ($url !~ m{^[^:#?/]+://}) {
+ if ($url =~ m{^//}) {
+ $base =~ m{^[^:#?/]+:}
+ and $url = $& . $url;
+ } elsif ($url =~ m{^/}) {
+ $base =~ m{^[^:#?/]+://[^/#?]*}
+ and $url = $& . $url;
+ } else {
+ uscan_debug "Resolving urls with query part unimplemented"
+ if ($url =~ m/^[#?]/);
+ $base =~ m{^[^:#?/]+://[^/#?]*(?:/(?:[^#?/]*/)*)?} and do {
+ my $base_to_path = $&;
+ $base_to_path .= '/' unless $base_to_path =~ m|/$|;
+ $url = $base_to_path . $url;
+ };
+ }
+ }
+ $url =~ s{^([^:#?/]+://[^/#?]*)(/[^#?]*)}{
+ my ($h, $p) = ($1, $2);
+ $p =~ s{/\.(?:/|$|(?=[#?]))}{/}g;
+ 1 while $p =~ s{/(?!\.\./)[^/]*/\.\.(?:/|(?=[#?])|$)}{/}g;
+ $h.$p;}e;
+ $url;
+}
+
+sub html_search {
+ my ($self, $content, $patterns, $mangle) = @_;
+
+ # pagenmangle: should not abuse this slow operation
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'pagemangle:\n', [@{ $self->pagemangle }],
+ \$content
+ )
+ ) {
+ return undef;
+ }
+ if ( !$self->shared->{bare}
+ and $content =~ m%^<[?]xml%i
+ and $content =~ m%xmlns="http://s3.amazonaws.com/doc/2006-03-01/"%
+ and $content !~ m%<Key><a\s+href%) {
+ # this is an S3 bucket listing. Insert an 'a href' tag
+ # into the content for each 'Key', so that it looks like html (LP: #798293)
+ uscan_warn
+"*** Amazon AWS special case code is deprecated***\nUse opts=pagemangle rule, instead";
+ $content =~ s%<Key>([^<]*)</Key>%<Key><a href="$1">$1</a></Key>%g;
+ uscan_extra_debug
+"processed content:\n$content\n[End of processed content] by Amazon AWS special case code";
+ }
+ clean_content(\$content);
+
+ # Is there a base URL given?
+ if ($content =~ /<\s*base\s+[^>]*href\s*=\s*([\"\'])(.*?)\1/i) {
+ $self->parse_result->{urlbase}
+ = url_canonicalize_dots($self->parse_result->{base}, $2);
+ } else {
+ $self->parse_result->{urlbase} = $self->parse_result->{base};
+ }
+ uscan_extra_debug
+"processed content:\n$content\n[End of processed content] by fix bad HTML code";
+
+# search hrefs in web page to obtain a list of uversionmangled version and matching download URL
+ {
+ local $, = ',';
+ uscan_verbose "Matching pattern:\n @{$self->{patterns}}";
+ }
+ my @hrefs;
+ while ($content =~ m/<\s*a\s+[^>]*(?<=\s)href\s*=\s*([\"\'])(.*?)\1/sgi) {
+ my $href = $2;
+ $href = fix_href($href);
+ my $href_canonical
+ = url_canonicalize_dots($self->parse_result->{urlbase}, $href);
+ if (defined $self->hrefdecode) {
+ if ($self->hrefdecode eq 'percent-encoding') {
+ uscan_debug "... Decoding from href: $href";
+ $href =~ s/%([A-Fa-f\d]{2})/chr hex $1/eg;
+ $href_canonical =~ s/%([A-Fa-f\d]{2})/chr hex $1/eg;
+ } else {
+ uscan_warn "Illegal value for hrefdecode: "
+ . "$self->{hrefdecode}";
+ return undef;
+ }
+ }
+ uscan_extra_debug "Checking href $href";
+ foreach my $_pattern (@$patterns) {
+ if (my @match = $href =~ /^$_pattern$/) {
+ push @hrefs,
+ parse_href($self, $href_canonical, $_pattern, \@match,
+ $mangle);
+ }
+ uscan_extra_debug "Checking href $href_canonical";
+ if (my @match = $href_canonical =~ /^$_pattern$/) {
+ push @hrefs,
+ parse_href($self, $href_canonical, $_pattern, \@match,
+ $mangle);
+ }
+ }
+ }
+ return @hrefs;
+}
+
+sub plain_search {
+ my ($self, $content) = @_;
+ my @hrefs;
+ foreach my $_pattern (@{ $self->patterns }) {
+ while ($content =~ s/.*?($_pattern)//) {
+ push @hrefs, $self->parse_href($1, $_pattern, $2);
+ }
+ }
+ $self->parse_result->{urlbase} = $self->parse_result->{base};
+ return @hrefs;
+}
+
+sub parse_href {
+ my ($self, $href, $_pattern, $match, $mangle) = @_;
+ $mangle //= 'uversionmangle';
+
+ my $mangled_version;
+ if ($self->watch_version == 2) {
+
+ # watch_version 2 only recognised one group; the code
+ # below will break version 2 watch files with a construction
+ # such as file-([\d\.]+(-\d+)?) (bug #327258)
+ $mangled_version
+ = ref $match eq 'ARRAY'
+ ? $match->[0]
+ : $match;
+ } else {
+ # need the map { ... } here to handle cases of (...)?
+ # which may match but then return undef values
+ if ($self->versionless) {
+
+ # exception, otherwise $mangled_version = 1
+ $mangled_version = '';
+ } else {
+ $mangled_version = join(".",
+ map { $_ if defined($_) }
+ ref $match eq 'ARRAY' ? @$match : $href =~ m&^$_pattern$&);
+ }
+
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ "$mangle:", \@{ $self->$mangle },
+ \$mangled_version
+ )
+ ) {
+ return ();
+ }
+ }
+ $match = '';
+ if (defined $self->shared->{download_version}) {
+ if ($mangled_version eq $self->shared->{download_version}) {
+ $match = "matched with the download version";
+ }
+ }
+ my $priority = $mangled_version . '-' . get_priority($href);
+ return [$priority, $mangled_version, $href, $match];
+}
+
+1;
diff --git a/lib/Devscripts/Uscan/svn.pm b/lib/Devscripts/Uscan/svn.pm
new file mode 100644
index 0000000..65dacae
--- /dev/null
+++ b/lib/Devscripts/Uscan/svn.pm
@@ -0,0 +1,67 @@
+package Devscripts::Uscan::svn;
+
+use strict;
+use Cwd qw/abs_path/;
+use Devscripts::Uscan::Output;
+use Devscripts::Uscan::Utils;
+use Devscripts::Uscan::_vcs;
+use Dpkg::IPC;
+use File::Path 'remove_tree';
+use Moo::Role;
+
+######################################################
+# search $newfile $newversion (svn mode/versionless)
+######################################################
+sub svn_search {
+ my ($self) = @_;
+ my ($newfile, $newversion);
+ if ($self->versionless) {
+ $newfile = $self->parse_result->{base};
+ spawn(
+ exec => [
+ 'svn', 'info',
+ '--show-item', 'last-changed-revision',
+ '--no-newline', $self->parse_result->{base}
+ ],
+ wait_child => 1,
+ to_string => \$newversion
+ );
+ chomp($newversion);
+ $newversion = sprintf '0.0~svn%d', $newversion;
+ if (
+ mangle(
+ $self->watchfile, \$self->line,
+ 'uversionmangle:', \@{ $self->uversionmangle },
+ \$newversion
+ )
+ ) {
+ return undef;
+ }
+
+ }
+ ################################################
+ # search $newfile $newversion (svn mode w/tag)
+ ################################################
+ elsif ($self->mode eq 'svn') {
+ my @args = ('list', $self->parse_result->{base});
+ ($newversion, $newfile)
+ = get_refs($self, ['svn', @args], qr/(.+)/, 'subversion');
+ return undef if !defined $newversion;
+ }
+ return ($newversion, $newfile);
+}
+
+sub svn_upstream_url {
+ my ($self) = @_;
+ my $upstream_url = $self->parse_result->{base};
+ if (!$self->versionless) {
+ $upstream_url .= '/' . $self->search_result->{newfile};
+ }
+ return $upstream_url;
+}
+
+*svn_newfile_base = \&Devscripts::Uscan::_vcs::_vcs_newfile_base;
+
+sub svn_clean { }
+
+1;
diff --git a/lib/Devscripts/Utils.pm b/lib/Devscripts/Utils.pm
new file mode 100644
index 0000000..8495ce4
--- /dev/null
+++ b/lib/Devscripts/Utils.pm
@@ -0,0 +1,31 @@
+package Devscripts::Utils;
+
+use strict;
+use Devscripts::Output;
+use Exporter 'import';
+use IPC::Run qw(run);
+
+our @EXPORT = qw(ds_exec ds_exec_no_fail);
+
+sub ds_exec_no_fail {
+ {
+ local $, = ' ';
+ ds_debug "Execute: @_...";
+ }
+ run \@_, '>', '/dev/null';
+ return $?;
+}
+
+sub ds_exec {
+ {
+ local $, = ' ';
+ ds_debug "Execute: @_...";
+ }
+ run \@_;
+ if ($?) {
+ local $, = ' ';
+ ds_die "Command failed (@_)";
+ }
+}
+
+1;
diff --git a/lib/Devscripts/Versort.pm b/lib/Devscripts/Versort.pm
new file mode 100644
index 0000000..48368d0
--- /dev/null
+++ b/lib/Devscripts/Versort.pm
@@ -0,0 +1,60 @@
+# Copyright (C) 1998,2002 Julian Gilbey <jdg@debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or (at
+# your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+# The functions in this Perl module are versort, upstream_versort and
+# deb_versort. They each take as input an array of elements of the form
+# [version, data, ...] and sort them into decreasing order according to dpkg's
+# understanding of version sorting. The output is a sorted array. In
+# upstream_versort, "version" is assumed to be an upstream version number only,
+# whereas in deb_versort, "version" is assumed to be a Debian version number,
+# possibly including an epoch and/or a Debian revision. versort is available
+# for compatibility reasons. It compares versions as Debian versions
+# (i.e. 1-2-4 < 1-3) but disables checks for wellformed versions.
+#
+# The returned array has the greatest version as the 0th array element.
+
+package Devscripts::Versort;
+use Dpkg::Version;
+
+sub versort (@) {
+ return _versort(0, sub { return shift->[0] }, @_);
+}
+
+sub deb_versort (@) {
+ return _versort(1, sub { return shift->[0] }, @_);
+}
+
+sub upstream_versort (@) {
+ return _versort(0, sub { return "1:" . shift->[0] . "-0" }, @_);
+}
+
+sub _versort ($@) {
+ my ($check, $getversion, @namever_pairs) = @_;
+
+ foreach my $pair (@namever_pairs) {
+ unshift(@$pair,
+ Dpkg::Version->new(&$getversion($pair), check => $check));
+ }
+
+ my @sorted = sort { $b->[0] <=> $a->[0] } @namever_pairs;
+
+ foreach my $pair (@sorted) {
+ shift @$pair;
+ }
+
+ return @sorted;
+}
+
+1;