fix for large data
This commit is contained in:
42
DDNS.pm
42
DDNS.pm
@@ -8,7 +8,7 @@ use JSON::PP qw/decode_json encode_json/;
|
|||||||
|
|
||||||
memoize('_gethosts');
|
memoize('_gethosts');
|
||||||
|
|
||||||
our $VERSION = '0.6';
|
our $VERSION = '0.7';
|
||||||
|
|
||||||
# Control plane update keyfile (zone-management-key)
|
# Control plane update keyfile (zone-management-key)
|
||||||
our $keyfile = '/etc/bind/zone-management.key';
|
our $keyfile = '/etc/bind/zone-management.key';
|
||||||
@@ -83,17 +83,28 @@ sub __docmd {
|
|||||||
close $tmpfh;
|
close $tmpfh;
|
||||||
|
|
||||||
my $fh;
|
my $fh;
|
||||||
open($fh, "|nsupdate -k $this->{keyfile} > $filename")
|
# Capture BOTH stdout and stderr into the temp file so errors are visible,
|
||||||
|
# and so we can fail the caller when nsupdate fails.
|
||||||
|
open($fh, "|nsupdate -k $this->{keyfile} > $filename 2>&1")
|
||||||
|| die "Can't open nsupdate: $!";
|
|| die "Can't open nsupdate: $!";
|
||||||
|
|
||||||
print $fh "server localhost\nzone private.invalid.\n$cmd\nshow\nsend\n";
|
print $fh "server localhost\nzone private.invalid.\n$cmd\nshow\nsend\n";
|
||||||
close $fh;
|
|
||||||
|
|
||||||
open($fh, $filename) || die "Can't re-open tmpfile $filename: $!";
|
# If nsupdate fails, close() will return false and $? will be non-zero.
|
||||||
while (<$fh>) {
|
close($fh) or do {
|
||||||
|
open(my $rfh, $filename) || die "Can't re-open tmpfile $filename: $!";
|
||||||
|
my $out = do { local $/; <$rfh> };
|
||||||
|
close $rfh;
|
||||||
|
unlink $filename;
|
||||||
|
die "nsupdate failed (exit=$?):\n$out\n";
|
||||||
|
};
|
||||||
|
|
||||||
|
# Echo nsupdate output (useful for operators/logs)
|
||||||
|
open(my $rfh, $filename) || die "Can't re-open tmpfile $filename: $!";
|
||||||
|
while (<$rfh>) {
|
||||||
print;
|
print;
|
||||||
}
|
}
|
||||||
close $fh;
|
close $rfh;
|
||||||
|
|
||||||
unlink $filename;
|
unlink $filename;
|
||||||
}
|
}
|
||||||
@@ -157,12 +168,22 @@ sub _parse_txt_payload {
|
|||||||
return ($v, $payload);
|
return ($v, $payload);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Produce BIND nsupdate TXT RDATA as one or more quoted strings.
|
||||||
|
# BIND limits each TXT "character-string" to 255 bytes, so we chunk.
|
||||||
sub _quote_txt_rdata {
|
sub _quote_txt_rdata {
|
||||||
my ($s) = @_;
|
my ($s) = @_;
|
||||||
$s = '' unless defined $s;
|
$s = '' unless defined $s;
|
||||||
|
|
||||||
|
# Escape for inclusion inside a quoted TXT character-string
|
||||||
$s =~ s/\\/\\\\/g;
|
$s =~ s/\\/\\\\/g;
|
||||||
$s =~ s/\"/\\\"/g;
|
$s =~ s/\"/\\\"/g;
|
||||||
return "\"$s\"";
|
|
||||||
|
# Conservative chunk size to stay well under 255 bytes after escaping
|
||||||
|
my $chunk_len = 200;
|
||||||
|
my @chunks = ($s =~ /.{1,$chunk_len}/gs);
|
||||||
|
|
||||||
|
# Emit as: "chunk1" "chunk2" "chunk3"
|
||||||
|
return join(' ', map { "\"$_\"" } @chunks);
|
||||||
}
|
}
|
||||||
|
|
||||||
sub _gethosts {
|
sub _gethosts {
|
||||||
@@ -231,7 +252,10 @@ sub set {
|
|||||||
_lookupOrDie($this, $dom, $type);
|
_lookupOrDie($this, $dom, $type);
|
||||||
my $fqdn = _fqdn($dom, $type);
|
my $fqdn = _fqdn($dom, $type);
|
||||||
|
|
||||||
$this->__docmd("update delete $fqdn TXT\nupdate add $fqdn 60 TXT " . _quote_txt_rdata($txt_payload));
|
$this->__docmd(
|
||||||
|
"update delete $fqdn TXT\n" .
|
||||||
|
"update add $fqdn 60 TXT " . _quote_txt_rdata($txt_payload)
|
||||||
|
);
|
||||||
$this->cleanup();
|
$this->cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -313,7 +337,7 @@ sub set_config {
|
|||||||
my ($this, $cfg_hashref) = @_;
|
my ($this, $cfg_hashref) = @_;
|
||||||
die "Config must be a hashref" unless (defined $cfg_hashref && ref($cfg_hashref) eq 'HASH');
|
die "Config must be a hashref" unless (defined $cfg_hashref && ref($cfg_hashref) eq 'HASH');
|
||||||
|
|
||||||
# Always store canonical JSON
|
# Always store compact canonical JSON (encode_json is compact)
|
||||||
my $txt = encode_json($cfg_hashref);
|
my $txt = encode_json($cfg_hashref);
|
||||||
|
|
||||||
my $dom = "global.";
|
my $dom = "global.";
|
||||||
|
|||||||
Reference in New Issue
Block a user