Using threads, because it's fun:)

This commit is contained in:
2019-07-07 00:57:07 +02:00
parent f3a40e3b50
commit b087c00991
2 changed files with 219 additions and 64 deletions

122
pcurse
View File

@ -1,44 +1,114 @@
#!/usr/bin/env perl
use strict;
use warnings;
no warnings 'all';
use lib '.';
use pcurse;
use feature ':5.10';
select(STDOUT);
$| = 1;
my $ret;
my $msg;
my %opts = pcurse::parse_arguments;
$opts{'config'} = $ENV{'HOME'}.'/.pcurse/config.json' unless($opts{'config'});
my $conf = pcurse::load_config($opts{'config'});
my @toupd;
my %jobs;
my $workers;
my $opts = pcurse::parse_arguments;
$opts->{'config'} = $ENV{'HOME'}.'/.pcurse/config.json' unless(defined($opts->{'config'}));
my $conf = pcurse::load_config($opts->{'config'});
$conf = pcurse::check_config($conf);
($ret,$msg) = pcurse::save_config($opts{'config'},$conf);
print $msg."\n" unless($ret);
($ret,$msg) = pcurse::save_config($conf->{'config'},$conf);
unless($ret) {
print $msg."\n";
exit 0;
}
$conf = pcurse::merge_opts($opts,$conf);
my $addons = pcurse::load_addons($conf->{'addons'});
say 'Loaded '.scalar(@{$addons}).' addons';
my $pool = pcurse::init_pool($conf->{'workers'});
my $worki = 0;
print 'Checking for updates ';
foreach my $addon(@{$addons}) {
print 'Found '.$addon->{'name'}.'('.$addon->{'version'}.')';
if(exists($addon->{'uri'})) {
my $html = pcurse::html_get($conf->{'baseuri'}.$addon->{'uri'});
my $fileid = pcurse::get_latest_file_id($html,$addon->{'uri'});
if($fileid) {
my $version = pcurse::get_product_version($html,$addon->{'uri'},$fileid);
print ' - latest version is '.$version;
if($version && ($version ne $addon->{'version'})) {
unless($opts{'test'}) {
print ' - updating';
my $ret = pcurse::update($conf->{'baseuri'}.$addon->{'uri'},$fileid,$conf->{'wowpath'});
if($ret) {
print ' - done';
$addon->{'version'} = $version;
} else {
print ' - failed';
}
}
my $workid = $pool->job('check',$addon,$conf);
$jobs{$workid}{'todo'} = 'check';
$jobs{$workid}{'job'} = $addon;
$jobs{$workid}{'i'} = $worki;
$worki++;
}
while(scalar(keys %jobs)) {
my $jobid;
if(my @jres = $pool->result_any(\$jobid)) {
if($jres[0] == 1) {
my $a = $jres[1]; #Addon data structure
my $i = $jobs{$jobid}{'i'}; #ID from @{$addons}
my $v = $jres[1]->{'targetversion'}; #Version we're trying to fetch
die "Not enough params: a: $a, i: $i, v: $v" unless(defined($a) && defined($i) && defined($v));
my $nextjob = [ $i, $a, $v ];
push(@toupd,$nextjob);
} elsif($jres[1] eq 'No need to update') {
} else {
say 'Error during check: '.$jres[1];
}
delete $jobs{$jobid};
print '.';
}
}
print "\n";
print 'Downloading updates ' if(scalar(@toupd));
foreach my $a(@toupd) {
my $id = $a->[0];
my $addon = $a->[1];
my $version = $a->[2];
my $workid = $pool->job('download',$conf->{'baseuri'}.$addon->{'uri'},$addon->{'fileid'});
$jobs{$workid}{'todo'} = 'download';
$jobs{$workid}{'job'} = [ $conf->{'baseuri'}.$addon->{'uri'},$addon->{'fileid'} ];
$jobs{$workid}{'id'} = $id;
$jobs{$workid}{'tv'} = $version;
}
my %tounpack;
while(scalar(keys %jobs)) {
my $jobid;
if(my @jres = $pool->result_any(\$jobid)) {
if($jres[0]{'retval'} == 1) {
my $ret = $jres[0]{'retval'};
my $filename = $jres[0]{'filename'};
my $file = $jres[0]{'filecontent'};
my $version = $jobs{$jobid}{'tv'};
if(defined($filename)) {
#say 'Going to unpack file '.$filename.' containing version '.$version;
$tounpack{$filename} = [ $jobs{$jobid}{'id'},$file,$version ];
} else {
#print Dumper @jres;
my $ai = $jobs{$jobid}{'id'};
my $an = $addons->[$ai]->{'name'};
say 'Passed an empty filename? in update';
}
} else {
print 'Could not find file id for '.$addon->{'name'};
my $uri = shift;
say 'Download failed for '.$uri;
}
delete $jobs{$jobid};
print '.';
}
print "\n";
}
$pool->shutdown;
print "\n" if(scalar(@toupd));
say 'Unpacking updates ' if(scalar(keys %tounpack));
foreach my $unpacking(keys %tounpack) {
my $id = $tounpack{$unpacking}->[0];
my $file = $tounpack{$unpacking}->[1];
my $version = $tounpack{$unpacking}->[2];
if(pcurse::update($unpacking,$file,$conf->{'wowpath'})) {
say 'Updated '.$addons->[$id]->{'name'}.' from version '.$addons->[$id]->{'version'}.' to '.$version;
$addons->[$id]->{'version'} = $version;
} else {
say 'Unpacking failed for '.$unpacking;
}
}
($ret,$msg) = pcurse::save_config($conf->{'addons'},$addons);
print $msg."\n" unless($ret);

161
pcurse.pm
View File

@ -1,21 +1,44 @@
#!/usr/bin/env perl
package pcurse;
use JSON;
use HTML::HTML5::Parser;
use LWP::UserAgent ();
use strict;
no warnings 'all';
use Getopt::Long;
use Archive::Extract;
use Thread::Pool;
use JSON;
use LWP::UserAgent;
use HTML::HTML5::Parser;
use IO::Socket::SSL;
use feature ':5.10';
sub merge_opts {
my $opts = shift;
my $conf = shift;
if(defined($opts->{'debug'})) {
print 'We got opts:'."\n";
print Dumper $opts;
print 'We got conf:'."\n";
print Dumper $conf;
}
foreach my $k(keys %{$opts}) {
next unless(defined($opts->{$k}));
$conf->{$k} = $opts->{$k};
}
return $conf;
}
sub parse_arguments {
my %toret;
GetOptions (
"verbose+" => \$toret{'verbose'},
"wowpath=s" => \$toret{'wowpath'},
"baseuri=s" => \$toret{'baseuri'},
"config=s" => \$toret{'config'},
"test" => \$toret{'test'},
my $toret;
Getopt::Long::GetOptions (
"verbose" => \$toret->{'verbose'},
"wowpath=s" => \$toret->{'wowpath'},
"baseuri=s" => \$toret->{'baseuri'},
"config=s" => \$toret->{'config'},
"test" => \$toret->{'test'},
"workers" => \$toret->{'workers'},
"debug" => \$toret->{'debug'},
);
return %toret;
return $toret;
}
sub load_config {
@ -55,8 +78,9 @@ sub check_config {
sub sane_defaults {
my $in = shift;
$in->{'baseuri'} = 'https://www.curseforge.com' unless(exists($in->{'baseuri'}));
$in->{'config'} = $ENV{'HOME'}.'/.pcurse/config.json' unless(exists($in->{'config'}));
$in->{'baseuri'} =~ s/^http/https/ unless($in->{'baseuri'} =~ m/^https/);
$in->{'addons'} = $ENV{'HOME'}.'/.pcurse/addons.json' unless(exists($in->{'addons'}));
$in->{'workers'} = "4" unless(exists($in->{'workers'}));
return $in;
}
@ -93,13 +117,21 @@ sub load_addons {
sub save_config {
my $json = JSON->new;
$json->convert_blessed;
$json->allow_nonref;
$json->allow_tags;
$json->allow_blessed;
my $file = shift;
my $json_data = shift;
my $text = $json->pretty->encode($json_data);
open my $fh, ">", $file or return (0,'Could not open '.$file.' for writing: '.$!);
print $fh $text;
close $fh;
return (1,$file.' saved successfully');
if(ref $json_data eq 'ARRAY' or ref $json_data eq 'HASH') {
my $text = $json->pretty->encode($json_data);
open my $fh, ">", $file or return (0,'Could not open '.$file.' for writing: '.$!);
print $fh $text;
close $fh;
return (1,$file.' saved successfully');
} else {
say 'Invalid format on passed data (not a HASH or ARRAY ref)';
}
}
sub import_json {
@ -183,34 +215,44 @@ sub find_in_html {
} else {
#This means we're on our own = whatever we're getting here is a html document as a string, unparsed.
my $parser = HTML::HTML5::Parser->new();
my @file = split(/\n/, $html);
foreach my $line(@file) {
if($line =~ m/$sstring/) {
my $parsed = $parser->parse_balanced_chunk($line);
my @nodes = $parsed->nonBlankChildNodes();
foreach my $node(@nodes) {
my @atr = $node->attributes();
if($mode eq 'dlstring') {
my $href = $node->getAttribute('href');
$retstr = (split(/$sstring/, $href,2))[1];
} elsif($mode eq 'vstring') {
$retstr = $node->getAttribute('data-name');
if(defined($html)) {
my @file = split(/\n/, $html);
foreach my $line(@file) {
if($line =~ m/$sstring/) {
my $parsed = $parser->parse_balanced_chunk($line);
my @nodes = $parsed->nonBlankChildNodes();
foreach my $node(@nodes) {
my @atr = $node->attributes();
if($mode eq 'dlstring') {
my $href = $node->getAttribute('href');
$retstr = (split(/$sstring/, $href,2))[1];
} elsif($mode eq 'vstring') {
$retstr = $node->getAttribute('data-name');
}
return $retstr if($retstr);
}
return $retstr if($retstr);
}
}
} else {
return undef;
}
}
return undef;
}
sub download_update {
my $uri = shift;
my $fileid = shift;
$uri .= '/download/'.$fileid.'/file';
my ($ret,$filename,$file) = pcurse::download($uri);
return (1,$filename,$file) if($ret);
return (0,undef,undef);
}
sub update {
my $uri = shift;
my $fileid = shift;
my $filename = shift;
my $file = shift;
my $targetpath = shift;
$uri .= '/download/'.$fileid.'/file';
my ($filename,$file) = pcurse::download($uri);
unless(-e "/tmp/$filename") {
open my $fh, '>', "/tmp/$filename" or return 0;
print $fh $file;
@ -226,9 +268,52 @@ sub update {
sub download {
my $uri = shift;
my $file = pcurse::http_get($uri);
my $filename = $file->filename;
my $content = $file->decoded_content;
return ($filename,$content);
if(defined($file)) {
my $filename = $file->filename;
my $content = $file->decoded_content;
return (1,$filename,$content);
} else {
return (0,$uri,undef);
}
}
sub init_pool {
my $w = shift;
my $p = Thread::Pool->new( {
workers => $w,
do => sub {
my $todo = shift;
if($todo eq 'check') {
my $addon = shift;
my $conf = shift;
my $html = pcurse::html_get($conf->{'baseuri'}.$addon->{'uri'});
my $fileid = pcurse::get_latest_file_id($html,$addon->{'uri'});
if($fileid) {
$addon->{'fileid'} = $fileid;
my $version = pcurse::get_product_version($html,$addon->{'uri'},$fileid);
if($version && ($version ne $addon->{'version'})) {
unless($conf->{'test'}) {
$addon->{'targetversion'} = $version;
return (1,$addon);
}
} else {
return (0,'No need to update');
}
} else {
return (0,'Could not find file id for '.$addon->{'name'});
}
} elsif($todo eq 'download') {
my $uri = shift;
my $fileid = shift;
my ($ret,$filename,$file) = pcurse::download_update($uri,$fileid);
return { retval => $ret, filename => $filename, filecontent => $file } if($ret);
return { retval => 0, filename => undef, filecontent => undef };
} else {
return (0,'Unknown task');
}
},
});
return $p;
}
1;