#!/usr/bin/perl -w
use utf8;

use strict;
my $dn = `dirname $0`;chomp($dn);
my $pwd = `pwd`;chomp($pwd);
if ($dn !~ /^\//) { $dn = $pwd . "/" . $dn; }
push @INC,$dn;
my $cverepobase=`dirname $dn`;
chomp($cverepobase);

use POSIX qw/strftime setlocale LC_TIME/;
use List::MoreUtils qw/uniq/;
use DateTime;
use JSON;
use Data::Dumper;

my $jsoncoder = JSON->new->allow_nonref;
$jsoncoder->canonical(1);

require SMASHData;
require CanDBReader;
require UpdateInfoReader;
UpdateInfoReader->import_product_updates();
# FIXME UpdateInfoReader->import_images();
require CVEListReader;

my @lastmodified = ();

my $mode = pop @ARGV;

if (!defined($mode)) { $mode = "fast"; }

my $osvpath = "/mounts/mirror/SuSE/ftp.suse.com/pub/projects/security/osv/";
#my $osvpath = "$cverepobase/osv";
chdir($osvpath)||die "chdir $osvpath:$!";

# url escape for purl. currently we only need spaces encoded, others should not appear.
sub percent_escape($) {
	my ($str) = @_;

	$str =~ s/ /%20/g;
	return $str;
}

sub translate_ecosystem($) {
	my ($product) = @_;

	if ($product =~ /openSUSE (.*)/) {
		return "openSUSE:$1";
	}
	if ($product =~ /SUSE (.*)/) {
		return "SUSE:$1";
	}
	if ($product =~ /HPE (.*)/) {
		return "SUSE:HPE $1";
	}
	if ($product =~ /Subscription Management (.*)/) {
		return "SUSE:$product";
	}
	if ($product =~ /SUSE:(.*)/) {
		return $product;
	}
	die "unhandled ecosystem $product\n";
}

sub generate_osv() {
	# products for which we want to create OSV
	my @products = ();

	my %allpatches;

	my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime();

	$year += 1900;

	# collect all patches we want to do by product (SLED/SLES, various SPs)

	foreach my $product (keys %UpdateInfoReader::patches) {
		my %patches = %{$UpdateInfoReader::patches{$product}};

		next if ($product =~ /SUSE:SLE-/);
		next if ($product =~ /openSUSE 1/);
		next if ($product =~ /openSUSE Ever/);

		push @products, $product;

		print STDERR "scanning patches for $product\n" if (-t STDERR);

		foreach my $patch (keys %patches) {
			if ($UpdateInfoReader::patchinqa{$patch}) {
				print STDERR "$patch is in QA\n" if (-t STDERR);
				next;
			}
			$allpatches{$patch} = 1;
		}
	}
	my @notices = keys %CanDBReader::susenotice2patches;

	foreach my $notice (sort @notices) {
		my %osv = ();

		my @patches = sort keys %{$CanDBReader::susenotice2patches{$notice}};

		my $patch;
		my $codestreampatch;

		foreach my $xpatch (@patches) {
			if (($xpatch =~ /^SUSE-\d*-\d*$/)  && (!defined($codestreampatch))) { # codestream
				$codestreampatch = $xpatch;
			}
			# skip removed patches. this has happened as we released patches during development and later deleted them as they were included in GA.
			$patch = $xpatch if (defined($UpdateInfoReader::patchtype{$xpatch}));
		}
		if (!defined($patch)) {
			print STDERR "ERROR: notice $notice has no valid patches out of: " . join(",",@patches) . "\n";
			next;
		}

		# codestreampatch is only on SLE, but here we process also openSUSE
		if (defined($codestreampatch) && ($codestreampatch =~ /SUSE(-\d*-\d*)$/)) {
			my $patchid = $1;
			# add all images and containers here ... wild hack
			push @patches, grep (/^Image.*$patchid$/,keys %allpatches);
			push @patches, grep (/^Container.*$patchid$/,keys %allpatches);
		}

		unless (defined($UpdateInfoReader::patchtype{$patch})) {
			print STDERR "SKIP $notice as $patch is not known.\n";
			print STDERR "patches: " . join(",",@patches) . "\n";
			next;
		}
		if ($UpdateInfoReader::patchtype{$patch} ne "security") {
                        # check if we have CVEs in references.
                        unless (grep (/CVE/,keys %{$UpdateInfoReader::patchreferences{$patch}})) {
                                print STDERR "SKIP $notice as $patch is not security or has no CVEs.\n";
                                print STDERR "patches: " . join(",",@patches) . "\n";
                                next;
                        }
		}

		@patches = sort @patches;

		print STDERR "$notice ...\n" if (-t STDERR);
		if (!defined($UpdateInfoReader::patchpackages{$patch})) {
			print STDERR "SKIP $notice: deleted patch $patch?\n";
			next;
		}

		#print "scanning patch $patch\n";

		# strict filenames rules: https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html#51-filename
		# the "id" is convered to the filename. We use the notice as ID.
		# done: lowercasing and : replaced by _
		my $fn = "$notice.json";

		# we will need the last modified date.
		if (($mode ne "all") && ($notice !~ /SU-$year/)) {
			# only read the date for adding to all.json
			if (open(JSONFILE,"<$fn")) {
				my $json = join("",<JSONFILE>);
				close(JSONFILE);

				eval {
					my $oldosvmap = $jsoncoder->decode($json);
					push @lastmodified, {
						"modified" => $oldosvmap->{'modified'},
						"id" => $notice,
					}
				} or do {
					warn "json invalid: $json\n";
					unlink($fn);
				};
				next;
			}
		}

		# FIXME make faster, generate only once.
		#		next if (-f $fn && ($mode ne "all"));

		my $dt = DateTime->from_epoch (epoch  => $UpdateInfoReader::patchissued{$patch});

		# translate suse notice id to weblink
		# SUSE-SU-2017:0367-1 to https://www.suse.com/support/update/announcement/2017/suse-su-20170367-1/
		my $urlnotice = "\L$notice";
		$urlnotice =~ /su-(\d*):/;
		my $noticeyear = $1;
		$urlnotice =~ s/://;
		$urlnotice = "https://www.suse.com/support/update/announcement/$noticeyear/$urlnotice/";

		if ($notice !~ /^SUSE/) {	# only SUSE notices are uploaded to suse.com/support/update, otherwise use imported one
			$urlnotice = $CanDBReader::advisoryid2url{$notice};
		}

		$osv{"id"} = $notice;
		$osv{"published"} =  $dt->iso8601()."Z";
		$osv{"modified"} =  $dt->iso8601()."Z";
		push @lastmodified, {
			"modified" => $osv{'modified'},
			"id" => $notice,
		};
		$osv{"summary"} = $UpdateInfoReader::patchtitle{$patch};
		$osv{"details"} = $UpdateInfoReader::patchdescription{$patch},
		$osv{"aliases"} = [];	# no aliases
		my @refs = (
			{	"type"	=> "ADVISORY",
				"url"	=> $urlnotice,
			},
		);

		my %references = ();
		if (defined($UpdateInfoReader::patchreferences{$patch})) {
			%references = %{$UpdateInfoReader::patchreferences{$patch}};
		}
		my @relations = ();
		foreach my $reference (sort keys %references) {
			if ($reference =~ /CVE/) {
				push @relations,$reference;

				my %reference = ( "type" => "WEB", "url" => "https://www.suse.com/security/cve/$reference");
				push @refs , \%reference;
				next;
			}
			if ($reference =~ /[0-9]*$/) {
				my %reference = ( "type" => "REPORT", "url" => "https://bugzilla.suse.com/$reference");
				push @refs , \%reference;
				next;
			}
		}
		$osv{"related"} = \@relations;
		$osv{"references"} = \@refs;

		# for which products do we have this patch

		my @affectedproducts = ();
		foreach my $xpatch (@patches) {
			foreach my $product (sort @products) {
				my $patches = $UpdateInfoReader::patches{$product};
				if (defined($patches->{$xpatch})) {
					push @affectedproducts,$product;
				}
			}
		}

		my @affected = ();

		# Foreach patch in the notice, dump the packages and the product:packages relation
		foreach my $product (@affectedproducts) {
			my $patches = $UpdateInfoReader::patches{$product};

			foreach my $xpatch (@patches) {
				next unless (defined($patches->{$xpatch}));
				next unless (defined($UpdateInfoReader::patchpackages{$xpatch}));

				# only print the ones for this product.
				my %packages = %{$UpdateInfoReader::patchpackages{$xpatch}};
				my %binaries = ();
				foreach my $pkg (keys %packages) {
					next if ($pkg =~ /-debuginfo/);
					next if ($pkg =~ /-debugsource/);

					$binaries{$pkg} = $packages{$pkg};
				}
				next unless defined($UpdateInfoReader::patchsrcpackages{$xpatch});
				my %srcpackages = %{$UpdateInfoReader::patchsrcpackages{$xpatch}};
				# my %packagearchs = %{$UpdateInfoReader::patchpackagearchs{$xpatch}};
				foreach my $package (sort keys %srcpackages) {
					# do not do this per-arch currently. will just blow the size up.
					# foreach my $arch (sort keys %{$packagearchs{$package}}) {

						my $ecoproduct = translate_ecosystem($product);

						my %osvaffected = (
							"package" => {
								"ecosystem" => $ecoproduct,
								"name" => $package,
								"purl" => "purl:rpm/suse/$package&distro=" . percent_escape($product),
							},
							"ranges" => [ {
								"type" => "ECOSYSTEM",
								"events" => [
									{
										"introduced" => "0",
									} , {
										"fixed" => "$srcpackages{$package}",
									}
								]
							} ],
							"ecosystem_specific" => { "binaries" => [ \%binaries ], },
						);
						push @affected,\%osvaffected;
					# }
				}
			}
		}

		$osv{"affected"} = \@affected;

		open(OSV,">$fn.new");
		print OSV $jsoncoder->encode(\%osv);
		close(OSV);

		my $newosv;
		my $oldosv = "<empty>";
		if (open(OSV,"<$fn.new")) {
			$newosv = join("",<OSV>);
			close(OSV);
		}
		if (open(OSV,"<$fn")) {
			$oldosv = join("",<OSV>);
			close(OSV);
		}
		if ($oldosv ne $newosv) {
			if (system("jq . <$fn >$fn.formatted ; jq . <$fn.new >$fn.new.formatted ; diff -uN $fn.formatted $fn.new.formatted")) {
				rename("$fn.new",$fn);
				unlink("$fn.new.formatted");
				unlink("$fn.formatted");
			} else {
				print STDERR "diff for $fn not detected by reader?\n";
				unlink("$fn.new");
			}
		} else {
			unlink("$fn.new");
		}
	}
}

umask 022;

generate_osv();
chdir("/mounts/mirror/SuSE/ftp.suse.com/pub/projects/security/");
open(ALLJSON,">osv/all.json")||die "could not open all.json:$!";
print ALLJSON $jsoncoder->encode(\@lastmodified);
close(ALLJSON);
system("tar cjf osv.tar.bz2 osv/");


print "SUCCESS\n";
