replace backtick operator with run3
This commit is contained in:
@@ -14,6 +14,7 @@ use Text::Diff;
|
|||||||
use IPC::Run qw(run);
|
use IPC::Run qw(run);
|
||||||
use Digest::SHA qw(hmac_sha256_hex);
|
use Digest::SHA qw(hmac_sha256_hex);
|
||||||
use String::Compare::ConstantTime qw(equals);
|
use String::Compare::ConstantTime qw(equals);
|
||||||
|
use IPC::Run3;
|
||||||
|
|
||||||
|
|
||||||
sub api : Chained('/') PathPart('api') CaptureArgs(0) {
|
sub api : Chained('/') PathPart('api') CaptureArgs(0) {
|
||||||
@@ -218,8 +219,13 @@ sub scmdiff : Path('/api/scmdiff') Args(0) {
|
|||||||
} elsif ($type eq "git") {
|
} elsif ($type eq "git") {
|
||||||
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
|
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
|
||||||
die if ! -d $clonePath;
|
die if ! -d $clonePath;
|
||||||
$diff .= `(cd $clonePath; git --git-dir .git log $rev1..$rev2)`;
|
my ($stdout1, $stderr1);
|
||||||
$diff .= `(cd $clonePath; git --git-dir .git diff $rev1..$rev2)`;
|
run3(['git', '-C', $clonePath, 'log', "$rev1..$rev2"], \undef, \$stdout1, \$stderr1);
|
||||||
|
$diff .= $stdout1 if $? == 0;
|
||||||
|
|
||||||
|
my ($stdout2, $stderr2);
|
||||||
|
run3(['git', '-C', $clonePath, 'diff', "$rev1..$rev2"], \undef, \$stdout2, \$stderr2);
|
||||||
|
$diff .= $stdout2 if $? == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
$c->stash->{'plain'} = { data => (scalar $diff) || " " };
|
$c->stash->{'plain'} = { data => (scalar $diff) || " " };
|
||||||
|
@@ -13,6 +13,8 @@ use Data::Dump qw(dump);
|
|||||||
use List::SomeUtils qw(all);
|
use List::SomeUtils qw(all);
|
||||||
use Encode;
|
use Encode;
|
||||||
use JSON::PP;
|
use JSON::PP;
|
||||||
|
use IPC::Run qw(run);
|
||||||
|
use IPC::Run3;
|
||||||
use WWW::Form::UrlEncoded::PP qw();
|
use WWW::Form::UrlEncoded::PP qw();
|
||||||
|
|
||||||
use feature 'state';
|
use feature 'state';
|
||||||
@@ -348,19 +350,21 @@ sub contents : Chained('buildChain') PathPart Args(1) {
|
|||||||
|
|
||||||
notFound($c, "Product $path has disappeared.") unless -e $path;
|
notFound($c, "Product $path has disappeared.") unless -e $path;
|
||||||
|
|
||||||
# Sanitize $path to prevent shell injection attacks.
|
|
||||||
$path =~ /^\/[\/[A-Za-z0-9_\-\.=+:]+$/ or die "Filename contains illegal characters.\n";
|
|
||||||
|
|
||||||
# FIXME: don't use shell invocations below.
|
|
||||||
|
|
||||||
# FIXME: use nix store cat
|
# FIXME: use nix store cat
|
||||||
|
|
||||||
my $res;
|
my $res;
|
||||||
|
|
||||||
if ($product->type eq "nix-build" && -d $path) {
|
if ($product->type eq "nix-build" && -d $path) {
|
||||||
# FIXME: use nix ls-store -R --json
|
# FIXME: use nix ls-store -R --json
|
||||||
$res = `cd '$path' && find . -print0 | xargs -0 ls -ld --`;
|
# We need to use a pipe between find and xargs, so we'll use IPC::Run
|
||||||
error($c, "`ls -lR' error: $?") if $? != 0;
|
my $error;
|
||||||
|
# Run find with absolute path and post-process to get relative paths
|
||||||
|
my $success = run(['find', $path, '-print0'], '|', ['xargs', '-0', 'ls', '-ld', '--'], \$res, \$error);
|
||||||
|
error($c, "`find $path -print0 | xargs -0 ls -ld --' error: $error") unless $success;
|
||||||
|
|
||||||
|
# Strip the base path to show relative paths
|
||||||
|
my $escaped_path = quotemeta($path);
|
||||||
|
$res =~ s/^(.*\s)$escaped_path(\/|$)/$1.$2/mg;
|
||||||
|
|
||||||
#my $baseuri = $c->uri_for('/build', $c->stash->{build}->id, 'download', $product->productnr);
|
#my $baseuri = $c->uri_for('/build', $c->stash->{build}->id, 'download', $product->productnr);
|
||||||
#$baseuri .= "/".$product->name if $product->name;
|
#$baseuri .= "/".$product->name if $product->name;
|
||||||
@@ -368,34 +372,59 @@ sub contents : Chained('buildChain') PathPart Args(1) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.rpm$/) {
|
elsif ($path =~ /\.rpm$/) {
|
||||||
$res = `rpm --query --info --package '$path'`;
|
my ($stdout1, $stderr1);
|
||||||
error($c, "RPM error: $?") if $? != 0;
|
run3(['rpm', '--query', '--info', '--package', $path], \undef, \$stdout1, \$stderr1);
|
||||||
|
error($c, "RPM error: $stderr1") if $? != 0;
|
||||||
|
$res = $stdout1;
|
||||||
|
|
||||||
$res .= "===\n";
|
$res .= "===\n";
|
||||||
$res .= `rpm --query --list --verbose --package '$path'`;
|
|
||||||
error($c, "RPM error: $?") if $? != 0;
|
my ($stdout2, $stderr2);
|
||||||
|
run3(['rpm', '--query', '--list', '--verbose', '--package', $path], \undef, \$stdout2, \$stderr2);
|
||||||
|
error($c, "RPM error: $stderr2") if $? != 0;
|
||||||
|
$res .= $stdout2;
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.deb$/) {
|
elsif ($path =~ /\.deb$/) {
|
||||||
$res = `dpkg-deb --info '$path'`;
|
my ($stdout1, $stderr1);
|
||||||
error($c, "`dpkg-deb' error: $?") if $? != 0;
|
run3(['dpkg-deb', '--info', $path], \undef, \$stdout1, \$stderr1);
|
||||||
|
error($c, "`dpkg-deb' error: $stderr1") if $? != 0;
|
||||||
|
$res = $stdout1;
|
||||||
|
|
||||||
$res .= "===\n";
|
$res .= "===\n";
|
||||||
$res .= `dpkg-deb --contents '$path'`;
|
|
||||||
error($c, "`dpkg-deb' error: $?") if $? != 0;
|
my ($stdout2, $stderr2);
|
||||||
|
run3(['dpkg-deb', '--contents', $path], \undef, \$stdout2, \$stderr2);
|
||||||
|
error($c, "`dpkg-deb' error: $stderr2") if $? != 0;
|
||||||
|
$res .= $stdout2;
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.(tar(\.gz|\.bz2|\.xz|\.lzma)?|tgz)$/ ) {
|
elsif ($path =~ /\.(tar(\.gz|\.bz2|\.xz|\.lzma)?|tgz)$/ ) {
|
||||||
$res = `tar tvfa '$path'`;
|
my ($stdout, $stderr);
|
||||||
error($c, "`tar' error: $?") if $? != 0;
|
run3(['tar', 'tvfa', $path], \undef, \$stdout, \$stderr);
|
||||||
|
error($c, "`tar' error: $stderr") if $? != 0;
|
||||||
|
$res = $stdout;
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.(zip|jar)$/ ) {
|
elsif ($path =~ /\.(zip|jar)$/ ) {
|
||||||
$res = `unzip -v '$path'`;
|
my ($stdout, $stderr);
|
||||||
error($c, "`unzip' error: $?") if $? != 0;
|
run3(['unzip', '-v', $path], \undef, \$stdout, \$stderr);
|
||||||
|
error($c, "`unzip' error: $stderr") if $? != 0;
|
||||||
|
$res = $stdout;
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.iso$/ ) {
|
elsif ($path =~ /\.iso$/ ) {
|
||||||
$res = `isoinfo -d -i '$path' && isoinfo -l -R -i '$path'`;
|
# Run first isoinfo command
|
||||||
error($c, "`isoinfo' error: $?") if $? != 0;
|
my ($stdout1, $stderr1);
|
||||||
|
run3(['isoinfo', '-d', '-i', $path], \undef, \$stdout1, \$stderr1);
|
||||||
|
error($c, "`isoinfo' error: $stderr1") if $? != 0;
|
||||||
|
$res = $stdout1;
|
||||||
|
|
||||||
|
# Run second isoinfo command
|
||||||
|
my ($stdout2, $stderr2);
|
||||||
|
run3(['isoinfo', '-l', '-R', '-i', $path], \undef, \$stdout2, \$stderr2);
|
||||||
|
error($c, "`isoinfo' error: $stderr2") if $? != 0;
|
||||||
|
$res .= $stdout2;
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
|
@@ -14,6 +14,7 @@ use Encode;
|
|||||||
use File::Basename;
|
use File::Basename;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use HTML::Entities;
|
use HTML::Entities;
|
||||||
|
use IPC::Run3;
|
||||||
use List::Util qw[min max];
|
use List::Util qw[min max];
|
||||||
use List::SomeUtils qw{any};
|
use List::SomeUtils qw{any};
|
||||||
use Net::Prometheus;
|
use Net::Prometheus;
|
||||||
@@ -177,8 +178,14 @@ sub queue_runner_status_GET {
|
|||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
|
|
||||||
#my $status = from_json($c->model('DB::SystemStatus')->find('queue-runner')->status);
|
#my $status = from_json($c->model('DB::SystemStatus')->find('queue-runner')->status);
|
||||||
my $status = decode_json(`hydra-queue-runner --status`);
|
my ($stdout, $stderr);
|
||||||
if ($?) { $status->{status} = "unknown"; }
|
run3(['hydra-queue-runner', '--status'], \undef, \$stdout, \$stderr);
|
||||||
|
my $status;
|
||||||
|
if ($? != 0) {
|
||||||
|
$status = { status => "unknown" };
|
||||||
|
} else {
|
||||||
|
$status = decode_json($stdout);
|
||||||
|
}
|
||||||
my $json = JSON->new->pretty()->canonical();
|
my $json = JSON->new->pretty()->canonical();
|
||||||
|
|
||||||
$c->stash->{template} = 'queue-runner-status.tt';
|
$c->stash->{template} = 'queue-runner-status.tt';
|
||||||
|
@@ -12,12 +12,14 @@ use Nix::Store;
|
|||||||
use Encode;
|
use Encode;
|
||||||
use Sys::Hostname::Long;
|
use Sys::Hostname::Long;
|
||||||
use IPC::Run;
|
use IPC::Run;
|
||||||
|
use IPC::Run3;
|
||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use UUID4::Tiny qw(is_uuid4_string);
|
use UUID4::Tiny qw(is_uuid4_string);
|
||||||
|
|
||||||
our @ISA = qw(Exporter);
|
our @ISA = qw(Exporter);
|
||||||
our @EXPORT = qw(
|
our @EXPORT = qw(
|
||||||
|
addToStore
|
||||||
cancelBuilds
|
cancelBuilds
|
||||||
constructRunCommandLogPath
|
constructRunCommandLogPath
|
||||||
findLog
|
findLog
|
||||||
@@ -614,4 +616,14 @@ sub constructRunCommandLogPath {
|
|||||||
return "$hydra_path/runcommand-logs/$bucket/$uuid";
|
return "$hydra_path/runcommand-logs/$bucket/$uuid";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
sub addToStore {
|
||||||
|
my ($path) = @_;
|
||||||
|
|
||||||
|
my ($stdout, $stderr);
|
||||||
|
run3(['nix-store', '--add', $path], \undef, \$stdout, \$stderr);
|
||||||
|
die "cannot add path $path to the Nix store: $stderr\n" if $? != 0;
|
||||||
|
return trim($stdout);
|
||||||
|
}
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
@@ -7,6 +7,7 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
|
use Hydra::Helper::Nix;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
|
|
||||||
@@ -48,9 +49,7 @@ sub fetchInput {
|
|||||||
print $fh encode_json \%pulls;
|
print $fh encode_json \%pulls;
|
||||||
close $fh;
|
close $fh;
|
||||||
system("jq -S . < $filename > $tempdir/bitbucket-pulls-sorted.json");
|
system("jq -S . < $filename > $tempdir/bitbucket-pulls-sorted.json");
|
||||||
my $storePath = trim(`nix-store --add "$tempdir/bitbucket-pulls-sorted.json"`
|
my $storePath = addToStore("$tempdir/bitbucket-pulls-sorted.json");
|
||||||
or die "cannot copy path $filename to the Nix store.\n");
|
|
||||||
chomp $storePath;
|
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -7,6 +7,7 @@ use Digest::SHA qw(sha256_hex);
|
|||||||
use File::Path;
|
use File::Path;
|
||||||
use Hydra::Helper::Exec;
|
use Hydra::Helper::Exec;
|
||||||
use Hydra::Helper::Nix;
|
use Hydra::Helper::Nix;
|
||||||
|
use IPC::Run3;
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -70,8 +71,11 @@ sub fetchInput {
|
|||||||
(system "darcs", "get", "--lazy", $clonePath, "$tmpDir/export", "--quiet",
|
(system "darcs", "get", "--lazy", $clonePath, "$tmpDir/export", "--quiet",
|
||||||
"--to-match", "hash $revision") == 0
|
"--to-match", "hash $revision") == 0
|
||||||
or die "darcs export failed";
|
or die "darcs export failed";
|
||||||
$revCount = `darcs changes --count --repodir $tmpDir/export`; chomp $revCount;
|
my ($stdout, $stderr);
|
||||||
die "darcs changes --count failed" if $? != 0;
|
run3(['darcs', 'changes', '--count', '--repodir', "$tmpDir/export"], \undef, \$stdout, \$stderr);
|
||||||
|
die "darcs changes --count failed: $stderr\n" if $? != 0;
|
||||||
|
$revCount = $stdout;
|
||||||
|
chomp $revCount;
|
||||||
|
|
||||||
system "rm", "-rf", "$tmpDir/export/_darcs";
|
system "rm", "-rf", "$tmpDir/export/_darcs";
|
||||||
$storePath = $MACHINE_LOCAL_STORE->addToStore("$tmpDir/export", 1, "sha256");
|
$storePath = $MACHINE_LOCAL_STORE->addToStore("$tmpDir/export", 1, "sha256");
|
||||||
|
@@ -7,6 +7,7 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
|
use Hydra::Helper::Nix;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
|
|
||||||
@@ -58,9 +59,7 @@ sub fetchInput {
|
|||||||
print $fh JSON->new->utf8->canonical->encode(\%pulls);
|
print $fh JSON->new->utf8->canonical->encode(\%pulls);
|
||||||
close $fh;
|
close $fh;
|
||||||
|
|
||||||
my $storePath = trim(`nix-store --add "$filename"`
|
my $storePath = addToStore($filename);
|
||||||
or die "cannot copy path $filename to the Nix store.\n");
|
|
||||||
chomp $storePath;
|
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -7,6 +7,7 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
|
use Hydra::Helper::Nix;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
|
|
||||||
@@ -115,9 +116,7 @@ sub fetchInput {
|
|||||||
print $fh encode_json \%refs;
|
print $fh encode_json \%refs;
|
||||||
close $fh;
|
close $fh;
|
||||||
system("jq -S . < $filename > $tempdir/github-refs-sorted.json");
|
system("jq -S . < $filename > $tempdir/github-refs-sorted.json");
|
||||||
my $storePath = trim(qx{nix-store --add "$tempdir/github-refs-sorted.json"}
|
my $storePath = addToStore("$tempdir/github-refs-sorted.json");
|
||||||
or die "cannot copy path $filename to the Nix store.\n");
|
|
||||||
chomp $storePath;
|
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -21,6 +21,7 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
|
use Hydra::Helper::Nix;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
|
|
||||||
@@ -86,9 +87,7 @@ sub fetchInput {
|
|||||||
print $fh encode_json \%pulls;
|
print $fh encode_json \%pulls;
|
||||||
close $fh;
|
close $fh;
|
||||||
system("jq -S . < $filename > $tempdir/gitlab-pulls-sorted.json");
|
system("jq -S . < $filename > $tempdir/gitlab-pulls-sorted.json");
|
||||||
my $storePath = trim(`nix-store --add "$tempdir/gitlab-pulls-sorted.json"`
|
my $storePath = addToStore("$tempdir/gitlab-pulls-sorted.json");
|
||||||
or die "cannot copy path $filename to the Nix store.\n");
|
|
||||||
chomp $storePath;
|
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -5,6 +5,7 @@ use warnings;
|
|||||||
use parent 'Hydra::Plugin';
|
use parent 'Hydra::Plugin';
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use Hydra::Helper::Nix;
|
use Hydra::Helper::Nix;
|
||||||
|
use IPC::Run3;
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -37,11 +38,16 @@ sub fetchInput {
|
|||||||
|
|
||||||
print STDERR "copying input ", $name, " from $uri\n";
|
print STDERR "copying input ", $name, " from $uri\n";
|
||||||
if ( $uri =~ /^\// ) {
|
if ( $uri =~ /^\// ) {
|
||||||
$storePath = `nix-store --add "$uri"`
|
$storePath = addToStore($uri);
|
||||||
or die "cannot copy path $uri to the Nix store.\n";
|
|
||||||
} else {
|
} else {
|
||||||
$storePath = `PRINT_PATH=1 nix-prefetch-url "$uri" | tail -n 1`
|
# Run nix-prefetch-url with PRINT_PATH=1
|
||||||
or die "cannot fetch $uri to the Nix store.\n";
|
my ($stdout, $stderr);
|
||||||
|
local $ENV{PRINT_PATH} = 1;
|
||||||
|
run3(['nix-prefetch-url', $uri], \undef, \$stdout, \$stderr);
|
||||||
|
die "cannot fetch $uri to the Nix store: $stderr\n" if $? != 0;
|
||||||
|
# Get the last line (which is the store path)
|
||||||
|
my @output_lines = split /\n/, $stdout;
|
||||||
|
$storePath = $output_lines[-1] if @output_lines;
|
||||||
}
|
}
|
||||||
chomp $storePath;
|
chomp $storePath;
|
||||||
|
|
||||||
|
@@ -9,6 +9,7 @@ use Net::Statsd;
|
|||||||
use File::Slurper qw(read_text);
|
use File::Slurper qw(read_text);
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Getopt::Long qw(:config gnu_getopt);
|
use Getopt::Long qw(:config gnu_getopt);
|
||||||
|
use IPC::Run3;
|
||||||
|
|
||||||
STDERR->autoflush(1);
|
STDERR->autoflush(1);
|
||||||
binmode STDERR, ":encoding(utf8)";
|
binmode STDERR, ":encoding(utf8)";
|
||||||
@@ -25,10 +26,11 @@ sub gauge {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sub sendQueueRunnerStats {
|
sub sendQueueRunnerStats {
|
||||||
my $s = `hydra-queue-runner --status`;
|
my ($stdout, $stderr);
|
||||||
die "cannot get queue runner stats\n" if $? != 0;
|
run3(['hydra-queue-runner', '--status'], \undef, \$stdout, \$stderr);
|
||||||
|
die "cannot get queue runner stats: $stderr\n" if $? != 0;
|
||||||
|
|
||||||
my $json = decode_json($s) or die "cannot decode queue runner status";
|
my $json = decode_json($stdout) or die "cannot decode queue runner status";
|
||||||
|
|
||||||
gauge("hydra.queue.up", $json->{status} eq "up" ? 1 : 0);
|
gauge("hydra.queue.up", $json->{status} eq "up" ? 1 : 0);
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user