Revision 11e50931
Von Sven Schöling vor etwa 8 Jahren hinzugefügt
SL/Controller/Gdpdu.pm | ||
---|---|---|
1 |
package SL::Controller::Gdpdu; |
|
2 |
|
|
3 |
# TODO: |
|
4 |
# - depending exclusive checkboses via javascript |
|
5 |
|
|
6 |
use strict; |
|
7 |
|
|
8 |
use parent qw(SL::Controller::Base); |
|
9 |
|
|
10 |
use SL::GDPDU; |
|
11 |
use SL::Locale::String qw(t8); |
|
12 |
use SL::Helper::Flash; |
|
13 |
|
|
14 |
use Rose::Object::MakeMethods::Generic ( |
|
15 |
'scalar --get_set_init' => [ qw(from to tables) ], |
|
16 |
); |
|
17 |
|
|
18 |
__PACKAGE__->run_before('check_auth'); |
|
19 |
|
|
20 |
sub action_filter { |
|
21 |
my ($self) = @_; |
|
22 |
|
|
23 |
$self->from(DateTime->today->add(years => -1)->add(days => 1)) if !$self->from; |
|
24 |
$self->to(DateTime->today) if !$self->to; |
|
25 |
|
|
26 |
$self->render('gdpdu/filter', title => t8('GDPDU Export')); |
|
27 |
} |
|
28 |
|
|
29 |
sub action_export { |
|
30 |
my ($self) = @_; |
|
31 |
|
|
32 |
if (!$self->check_inputs) { |
|
33 |
$self->action_filter; |
|
34 |
return; |
|
35 |
} |
|
36 |
|
|
37 |
my $gdpdu = SL::GDPDU->new( |
|
38 |
company => $::instance_conf->get_company, |
|
39 |
location => $::instance_conf->get_address, |
|
40 |
from => $self->from, |
|
41 |
to => $self->to, |
|
42 |
tables => $self->tables, |
|
43 |
all_tables => !@{ $self->tables } && $::form->{all_tables}, |
|
44 |
); |
|
45 |
|
|
46 |
my $filename = $gdpdu->generate_export; |
|
47 |
|
|
48 |
$self->send_file($filename, name => t8('gdpdu-#1-#2.zip', $self->from->ymd, $self->to->ymd), unlink => 1); |
|
49 |
} |
|
50 |
|
|
51 |
#--- other stuff |
|
52 |
|
|
53 |
sub check_auth { $::auth->assert('report') } |
|
54 |
|
|
55 |
sub check_inputs { |
|
56 |
my ($self) = @_; |
|
57 |
|
|
58 |
my $error = 0; |
|
59 |
|
|
60 |
if ($::form->{tables}) { |
|
61 |
$self->tables([ keys %{ $::form->{tables} } ]); |
|
62 |
# theese three get inferred |
|
63 |
push @{ $self->tables }, 'invoice' if $::form->{tables}{ar} || $::form->{tables}{ap}; |
|
64 |
push @{ $self->tables }, 'orderitems' if $::form->{tables}{oe}; |
|
65 |
push @{ $self->tables }, 'delivery_order_items' if $::form->{tables}{delivery_orders}; |
|
66 |
} |
|
67 |
|
|
68 |
if (!@{ $self->tables } && !$::form->{all_tables}) { |
|
69 |
flash('error', t8('No, I really do need checked tables to export.')); |
|
70 |
$error = 1; |
|
71 |
} |
|
72 |
|
|
73 |
if (!$::form->{from}) { |
|
74 |
my $epoch = DateTime->new(day => 1, month => 1, year => 1900); |
|
75 |
flash('info', t8('No start date given, setting to #1', $epoch->to_kivitendo)); |
|
76 |
$self->from($epoch); |
|
77 |
} |
|
78 |
|
|
79 |
if (!$::form->{to}) { |
|
80 |
flash('info', t8('No end date given, setting to today')); |
|
81 |
$self->to(DateTime->today); |
|
82 |
} |
|
83 |
|
|
84 |
!$error; |
|
85 |
} |
|
86 |
|
|
87 |
sub init_from { DateTime->from_kivitendo($::form->{from}) } |
|
88 |
sub init_to { DateTime->from_kivitendo($::form->{to}) } |
|
89 |
sub init_tables { [ ] } |
|
90 |
|
|
91 |
1; |
SL/GDPDU.pm | ||
---|---|---|
1 |
package SL::GDPDU; |
|
2 |
|
|
3 |
# TODO: |
|
4 |
# translations |
|
5 |
# optional: background jobable |
|
6 |
|
|
7 |
use strict; |
|
8 |
use utf8; |
|
9 |
|
|
10 |
use parent qw(Rose::Object); |
|
11 |
|
|
12 |
use Text::CSV_XS; |
|
13 |
use XML::Writer; |
|
14 |
use Archive::Zip; |
|
15 |
use File::Temp (); |
|
16 |
use File::Spec (); |
|
17 |
use List::UtilsBy qw(partition_by); |
|
18 |
|
|
19 |
use SL::DB::Helper::ALL; # since we work on meta data, we need everything |
|
20 |
use SL::DB::Helper::Mappings; |
|
21 |
use SL::Locale::String qw(t8); |
|
22 |
|
|
23 |
use Rose::Object::MakeMethods::Generic ( |
|
24 |
scalar => [ qw(from to tables writer company location) ], |
|
25 |
'scalar --get_set_init' => [ qw(files tempfiles export_ids) ], |
|
26 |
); |
|
27 |
|
|
28 |
# in this we find: |
|
29 |
# key: table name |
|
30 |
# name: short name, translated |
|
31 |
# description: long description, translated |
|
32 |
# transdate: column used to filter from/to, empty if table is filtered otherwise |
|
33 |
# keep: arrayref of columns that should be saved for further referencing |
|
34 |
# tables: arrayref with one column and one or many table.column references that were kept earlier |
|
35 |
my %known_tables = ( |
|
36 |
ar => { name => t8('Invoice'), description => t8('Sales Invoices and Accounts Receivables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, |
|
37 |
ap => { name => t8('Purchase Invoice'), description => t8('Purchase Invoices and Accounts Payables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, |
|
38 |
oe => { name => t8('Orders'), description => t8('Orders and Quotations, Sales and Purchase'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, |
|
39 |
delivery_orders => { name => t8('Delivery Orders'), description => t8('Delivery Orders'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, |
|
40 |
gl => { name => t8('General Ledger'), description => t8('General Ledger Entries'), keep => [ qw(id) ], transdate => 'transdate', }, |
|
41 |
invoice => { name => t8('Invoice Positions'), description => t8('Positions for all Invoices'), keep => [ qw(parts_id) ], tables => [ trans_id => "ar.id", "ap.id" ] }, |
|
42 |
orderitems => { name => t8('OrderItems'), description => t8('Positions for all Orders'), keep => [ qw(parts_id) ], tables => [ trans_id => "oe.id" ] }, |
|
43 |
delivery_order_items => { name => t8('Delivery Order Items'), description => t8('Positions for all Delivery Orders'), keep => [ qw(parts_id) ], tables => [ delivery_order_id => "delivery_orders.id" ] }, |
|
44 |
acc_trans => { name => t8('Transactions'), description => t8('All general ledger entries'), keep => [ qw(chart_id) ], tables => [ trans_id => "ar.id", "ap.id", "oe.id", "delivery_orders.id", "gl.id" ] }, |
|
45 |
chart => { name => t8('Charts'), description => t8('Chart of Accounts'), tables => [ id => "acc_trans.chart_id" ] }, |
|
46 |
customer => { name => t8('Customers'), description => t8('Customer Master Data'), tables => [ id => "ar.customer_id", "ap.customer_id", "oe.customer_id", "delivery_orders.customer_id" ] }, |
|
47 |
vendor => { name => t8('Vendors'), description => t8('Vendor Master Data'), tables => [ id => "ar.vendor_id", "ap.vendor_id", "oe.vendor_id", "delivery_orders.vendor_id" ] }, |
|
48 |
parts => { name => t8('Parts'), description => t8('Parts, Services, and Assemblies'), tables => [ id => "invoice.parts_id", "orderitems.parts_id", "delivery_order_items.parts_id" ] }, |
|
49 |
); |
|
50 |
|
|
51 |
# rows in this listing are tiers. |
|
52 |
# tables may depend on ids in a tier above them |
|
53 |
my @export_table_order = qw( |
|
54 |
ar ap gl oe delivery_orders |
|
55 |
invoice orderitems delivery_order_items |
|
56 |
customer vendor |
|
57 |
parts |
|
58 |
acc_trans |
|
59 |
chart |
|
60 |
); |
|
61 |
|
|
62 |
# needed because the standard dbh sets datestyle german and we don't want to mess with that |
|
63 |
my $date_format = 'DD.MM.YYYY'; |
|
64 |
|
|
65 |
# callbacks that produce the xml spec for these column types |
|
66 |
my %column_types = ( |
|
67 |
'Rose::DB::Object::Metadata::Column::Integer' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) }, |
|
68 |
'Rose::DB::Object::Metadata::Column::BigInt' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) }, |
|
69 |
'Rose::DB::Object::Metadata::Column::Text' => sub { $_[0]->tag('AlphaNumeric') }, |
|
70 |
'Rose::DB::Object::Metadata::Column::Varchar' => sub { $_[0]->tag('AlphaNumeric') }, |
|
71 |
'Rose::DB::Object::Metadata::Column::Character' => sub { $_[0]->tag('AlphaNumeric') }, |
|
72 |
'Rose::DB::Object::Metadata::Column::Numeric' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 5) }) }, |
|
73 |
'Rose::DB::Object::Metadata::Column::Date' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) }, |
|
74 |
'Rose::DB::Object::Metadata::Column::Timestamp' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) }, |
|
75 |
'Rose::DB::Object::Metadata::Column::Float' => sub { $_[0]->tag('Numeric') }, |
|
76 |
'Rose::DB::Object::Metadata::Column::Boolean' => sub { $_[0]->tag('AlphaNumeric', sub { $_[0] |
|
77 |
->tag('Map', sub { $_[0] |
|
78 |
->tag('From', 1) |
|
79 |
->tag('To', t8('true')) |
|
80 |
}) |
|
81 |
->tag('Map', sub { $_[0] |
|
82 |
->tag('From', 0) |
|
83 |
->tag('To', t8('false')) |
|
84 |
}) |
|
85 |
->tag('Map', sub { $_[0] |
|
86 |
->tag('From', 0) |
|
87 |
->tag('To', t8('false')) |
|
88 |
}) |
|
89 |
}) }, |
|
90 |
); |
|
91 |
|
|
92 |
sub generate_export { |
|
93 |
my ($self) = @_; |
|
94 |
|
|
95 |
# verify data |
|
96 |
$self->from && 'DateTime' eq ref $self->from or die 'need from date'; |
|
97 |
$self->to && 'DateTime' eq ref $self->to or die 'need to date'; |
|
98 |
$self->from <= $self->to or die 'from date must be earlier or equal than to date'; |
|
99 |
$self->tables && @{ $self->tables } or die 'need tables'; |
|
100 |
for (@{ $self->tables }) { |
|
101 |
next if $known_tables{$_}; |
|
102 |
die "unknown table '$_'"; |
|
103 |
} |
|
104 |
|
|
105 |
# get data from those tables and save to csv |
|
106 |
# for that we need to build queries that fetch all the columns |
|
107 |
for ($self->sorted_tables) { |
|
108 |
$self->do_csv_export($_); |
|
109 |
} |
|
110 |
|
|
111 |
# write xml file |
|
112 |
$self->do_xml_file; |
|
113 |
|
|
114 |
# add dtd |
|
115 |
$self->files->{'gdpdu-01-08-2002.dtd'} = File::Spec->catfile('users', 'gdpdu-01-08-2002.dtd'); |
|
116 |
|
|
117 |
# make zip |
|
118 |
my ($fh, $zipfile) = File::Temp::tempfile(); |
|
119 |
my $zip = Archive::Zip->new; |
|
120 |
|
|
121 |
while (my ($name, $file) = each %{ $self->files }) { |
|
122 |
$zip->addFile($file, $name); |
|
123 |
} |
|
124 |
|
|
125 |
$zip->writeToFileHandle($fh) == Archive::Zip::AZ_OK() or die 'error writing zip file'; |
|
126 |
close($fh); |
|
127 |
|
|
128 |
return $zipfile; |
|
129 |
} |
|
130 |
|
|
131 |
sub do_xml_file { |
|
132 |
my ($self) = @_; |
|
133 |
|
|
134 |
my ($fh, $filename) = File::Temp::tempfile(); |
|
135 |
binmode($fh, ':utf8'); |
|
136 |
|
|
137 |
$self->files->{'INDEX.XML'} = $filename; |
|
138 |
push @{ $self->tempfiles }, $filename; |
|
139 |
|
|
140 |
my $writer = XML::Writer->new( |
|
141 |
OUTPUT => $fh, |
|
142 |
ENCODING => 'UTF-8', |
|
143 |
); |
|
144 |
|
|
145 |
$self->writer($writer); |
|
146 |
$self->writer->xmlDecl('UTF-8'); |
|
147 |
$self->writer->doctype('DataSet', undef, "gdpdu-01-08-2002.dtd"); |
|
148 |
$self->tag('DataSet', sub { $self |
|
149 |
->tag('Version', '1.0') |
|
150 |
->tag('DataSupplier', sub { $self |
|
151 |
->tag('Name', $self->client_name) |
|
152 |
->tag('Location', $self->client_location) |
|
153 |
->tag('Comment', $self->make_comment) |
|
154 |
}) |
|
155 |
->tag('Media', sub { $self |
|
156 |
->tag('Name', t8('DataSet #1', 1)); |
|
157 |
for (@{ $self->tables }) { $self |
|
158 |
->table($_) |
|
159 |
} |
|
160 |
}) |
|
161 |
}); |
|
162 |
close($fh); |
|
163 |
} |
|
164 |
|
|
165 |
sub table { |
|
166 |
my ($self, $table) = @_; |
|
167 |
my $writer = $self->writer; |
|
168 |
|
|
169 |
$self->tag('Table', sub { $self |
|
170 |
->tag('URL', "$table.csv") |
|
171 |
->tag('Name', $known_tables{$table}{name}) |
|
172 |
->tag('Description', $known_tables{$table}{description}) |
|
173 |
->tag('Validity', sub { $self |
|
174 |
->tag('Range', sub { $self |
|
175 |
->tag('From', $self->from->to_kivitendo(dateformat => 'dd.mm.yyyy')) |
|
176 |
->tag('To', $self->to->to_kivitendo(dateformat => 'dd.mm.yyyy')) |
|
177 |
}) |
|
178 |
->tag('Format', $date_format) |
|
179 |
}) |
|
180 |
->tag('DecimalSymbol', '.') |
|
181 |
->tag('DigitGroupingSymbol', '') |
|
182 |
->tag('VariableLength', sub { $self |
|
183 |
->columns($table) |
|
184 |
->foreign_keys($table) |
|
185 |
}) |
|
186 |
}); |
|
187 |
} |
|
188 |
|
|
189 |
sub _table_columns { |
|
190 |
my ($table) = @_; |
|
191 |
my $package = SL::DB::Helper::Mappings::get_package_for_table($table); |
|
192 |
|
|
193 |
# PrimaryKeys must come before regular columns, so partition first |
|
194 |
partition_by { 1 * $_->is_primary_key_member } $package->meta->columns; |
|
195 |
} |
|
196 |
|
|
197 |
sub columns { |
|
198 |
my ($self, $table) = @_; |
|
199 |
|
|
200 |
my %cols_by_primary_key = _table_columns($table); |
|
201 |
|
|
202 |
for my $column (@{ $cols_by_primary_key{1} }) { |
|
203 |
my $type = $column_types{ ref $column }; |
|
204 |
|
|
205 |
die "unknown col type @{[ ref $column ]}" unless $type; |
|
206 |
|
|
207 |
$self->tag('VariablePrimaryKey', sub { $self |
|
208 |
->tag('Name', $column->name); |
|
209 |
$type->($self); |
|
210 |
}) |
|
211 |
} |
|
212 |
|
|
213 |
for my $column (@{ $cols_by_primary_key{0} }) { |
|
214 |
my $type = $column_types{ ref $column }; |
|
215 |
|
|
216 |
die "unknown col type @{[ ref $column]}" unless $type; |
|
217 |
|
|
218 |
$self->tag('VariableColumn', sub { $self |
|
219 |
->tag('Name', $column->name); |
|
220 |
$type->($self); |
|
221 |
}) |
|
222 |
} |
|
223 |
|
|
224 |
$self; |
|
225 |
} |
|
226 |
|
|
227 |
sub foreign_keys { |
|
228 |
my ($self, $table) = @_; |
|
229 |
my $package = SL::DB::Helper::Mappings::get_package_for_table($table); |
|
230 |
|
|
231 |
my %requested = map { $_ => 1 } @{ $self->tables }; |
|
232 |
|
|
233 |
for my $rel ($package->meta->foreign_keys) { |
|
234 |
next unless $requested{ $rel->class->meta->table }; |
|
235 |
|
|
236 |
# ok, now extract the columns used as foreign key |
|
237 |
my %key_columns = $rel->key_columns; |
|
238 |
|
|
239 |
if (1 != keys %key_columns) { |
|
240 |
die "multi keys? we don't support this currently. fix it please"; |
|
241 |
} |
|
242 |
|
|
243 |
if ($table eq $rel->class->meta->table) { |
|
244 |
# self referential foreign keys are a PITA to export correctly. skip! |
|
245 |
next; |
|
246 |
} |
|
247 |
|
|
248 |
$self->tag('ForeignKey', sub { |
|
249 |
$_[0]->tag('Name', $_) for keys %key_columns; |
|
250 |
$_[0]->tag('References', $rel->class->meta->table); |
|
251 |
}); |
|
252 |
} |
|
253 |
} |
|
254 |
|
|
255 |
sub do_csv_export { |
|
256 |
my ($self, $table) = @_; |
|
257 |
|
|
258 |
my $csv = Text::CSV_XS->new({ binary => 1, eol => "\n", sep_char => ",", quote_char => '"' }); |
|
259 |
|
|
260 |
my ($fh, $filename) = File::Temp::tempfile(); |
|
261 |
binmode($fh, ':utf8'); |
|
262 |
|
|
263 |
$self->files->{"$table.csv"} = $filename; |
|
264 |
push @{ $self->tempfiles }, $filename; |
|
265 |
|
|
266 |
# in the right order (primary keys first) |
|
267 |
my %cols_by_primary_key = _table_columns($table); |
|
268 |
my @columns = (@{ $cols_by_primary_key{1} }, @{ $cols_by_primary_key{0} }); |
|
269 |
my %col_index = do { my $i = 0; map {; "$_" => $i++ } @columns }; |
|
270 |
|
|
271 |
# and normalize date stuff |
|
272 |
my @select_tokens = map { (ref $_) =~ /Time/ ? $_->name . '::date' : $_->name } @columns; |
|
273 |
|
|
274 |
my @where_tokens; |
|
275 |
my @values; |
|
276 |
if ($known_tables{$table}{transdate}) { |
|
277 |
if ($self->from) { |
|
278 |
push @where_tokens, "$known_tables{$table}{transdate} >= ?"; |
|
279 |
push @values, $self->from; |
|
280 |
} |
|
281 |
if ($self->to) { |
|
282 |
push @where_tokens, "$known_tables{$table}{transdate} <= ?"; |
|
283 |
push @values, $self->to; |
|
284 |
} |
|
285 |
} |
|
286 |
if ($known_tables{$table}{tables}) { |
|
287 |
my ($col, @col_specs) = @{ $known_tables{$table}{tables} }; |
|
288 |
my %ids; |
|
289 |
for (@col_specs) { |
|
290 |
my ($ftable, $fkey) = split /\./, $_; |
|
291 |
if (!exists $self->export_ids->{$ftable}{$fkey}) { |
|
292 |
# check if we forgot to keep it |
|
293 |
if (!grep { $_ eq $fkey } @{ $known_tables{$ftable}{keep} || [] }) { |
|
294 |
die "unknown table spec '$_' for table $table, did you forget to keep $fkey in $ftable?" |
|
295 |
} else { |
|
296 |
# hmm, most likely just an empty set. |
|
297 |
$self->export_ids->{$ftable}{$fkey} = {}; |
|
298 |
} |
|
299 |
} |
|
300 |
$ids{$_}++ for keys %{ $self->export_ids->{$ftable}{$fkey} }; |
|
301 |
} |
|
302 |
if (keys %ids) { |
|
303 |
push @where_tokens, "$col IN (@{[ join ',', ('?') x keys %ids ]})"; |
|
304 |
push @values, keys %ids; |
|
305 |
} else { |
|
306 |
push @where_tokens, '1=0'; |
|
307 |
} |
|
308 |
} |
|
309 |
|
|
310 |
my $where_clause = @where_tokens ? 'WHERE ' . join ' AND ', @where_tokens : ''; |
|
311 |
|
|
312 |
my $query = "SELECT " . join(', ', @select_tokens) . " FROM $table $where_clause"; |
|
313 |
|
|
314 |
my $sth = $::form->get_standard_dbh->prepare($query); |
|
315 |
$sth->execute(@values) or die "error executing query $query: " . $sth->errstr; |
|
316 |
|
|
317 |
while (my $row = $sth->fetch) { |
|
318 |
for my $keep_col (@{ $known_tables{$table}{keep} || [] }) { |
|
319 |
next if !$row->[$col_index{$keep_col}]; |
|
320 |
$self->export_ids->{$table}{$keep_col} ||= {}; |
|
321 |
$self->export_ids->{$table}{$keep_col}{$row->[$col_index{$keep_col}]}++; |
|
322 |
} |
|
323 |
$csv->print($fh, $row) or $csv->error_diag; |
|
324 |
} |
|
325 |
$sth->finish(); |
|
326 |
} |
|
327 |
|
|
328 |
sub tag { |
|
329 |
my ($self, $tag, $content) = @_; |
|
330 |
|
|
331 |
$self->writer->startTag($tag); |
|
332 |
if ('CODE' eq ref $content) { |
|
333 |
$content->($self); |
|
334 |
} else { |
|
335 |
$self->writer->characters($content); |
|
336 |
} |
|
337 |
$self->writer->endTag; |
|
338 |
return $self; |
|
339 |
} |
|
340 |
|
|
341 |
sub make_comment { |
|
342 |
my $gdpdu_version = API_VERSION(); |
|
343 |
my $kivi_version = $::form->read_version; |
|
344 |
my $person = $::myconfig{name}; |
|
345 |
my $contact = join ', ', |
|
346 |
(t8("Email") . ": $::myconfig{email}" ) x!! $::myconfig{email}, |
|
347 |
(t8("Tel") . ": $::myconfig{tel}" ) x!! $::myconfig{tel}, |
|
348 |
(t8("Fax") . ": $::myconfig{fax}" ) x!! $::myconfig{fax}; |
|
349 |
|
|
350 |
t8('DataSet for GDPdU version #1. Created with kivitendo #2 by #3 (#4)', |
|
351 |
$gdpdu_version, $kivi_version, $person, $contact |
|
352 |
); |
|
353 |
} |
|
354 |
|
|
355 |
sub client_name { |
|
356 |
$_[0]->company |
|
357 |
} |
|
358 |
|
|
359 |
sub client_location { |
|
360 |
$_[0]->location |
|
361 |
} |
|
362 |
|
|
363 |
sub sorted_tables { |
|
364 |
my ($self) = @_; |
|
365 |
|
|
366 |
my %given = map { $_ => 1 } @{ $self->tables }; |
|
367 |
|
|
368 |
grep { $given{$_} } @export_table_order; |
|
369 |
} |
|
370 |
|
|
371 |
sub all_tables { |
|
372 |
my ($self, $yesno) = @_; |
|
373 |
|
|
374 |
$self->tables(\@export_table_order) if $yesno; |
|
375 |
} |
|
376 |
|
|
377 |
sub init_files { +{} } |
|
378 |
sub init_export_ids { +{} } |
|
379 |
sub init_tempfiles { [] } |
|
380 |
|
|
381 |
sub API_VERSION { |
|
382 |
DateTime->new(year => 2002, month => 8, day => 14)->to_kivitendo; |
|
383 |
} |
|
384 |
|
|
385 |
sub DESTROY { |
|
386 |
unlink $_ for @{ $_[0]->tempfiles || [] }; |
|
387 |
} |
|
388 |
|
|
389 |
1; |
|
390 |
|
|
391 |
__END__ |
|
392 |
|
|
393 |
=encoding utf-8 |
|
394 |
|
|
395 |
=head1 NAME |
|
396 |
|
|
397 |
SL::GDPDU - IDEA export generator |
|
398 |
|
|
399 |
=head1 FUNCTIONS |
|
400 |
|
|
401 |
=over 4 |
|
402 |
|
|
403 |
=item C<new PARAMS> |
|
404 |
|
|
405 |
Create new export object. C<PARAMS> may contain: |
|
406 |
|
|
407 |
=over 4 |
|
408 |
|
|
409 |
=item company |
|
410 |
|
|
411 |
The name of the company, needed for the supplier header |
|
412 |
|
|
413 |
=item location |
|
414 |
|
|
415 |
Location of the company, needed for the suupplier header |
|
416 |
|
|
417 |
=item from |
|
418 |
|
|
419 |
=item to |
|
420 |
|
|
421 |
Will only include records in the specified date range. Data pulled from other |
|
422 |
tables will be culled to match what is needed for these records. |
|
423 |
|
|
424 |
=item tables |
|
425 |
|
|
426 |
A list of tables to be exported. |
|
427 |
|
|
428 |
=item all_tables |
|
429 |
|
|
430 |
Alternative to C<tables>, enables all known tables. |
|
431 |
|
|
432 |
=back |
|
433 |
|
|
434 |
=item C<generate_export> |
|
435 |
|
|
436 |
Do the work. Will return an absolut path to a temp file where all export files |
|
437 |
are zipped together. |
|
438 |
|
|
439 |
=back |
|
440 |
|
|
441 |
=head1 CAVEATS |
|
442 |
|
|
443 |
=over 4 |
|
444 |
|
|
445 |
=item * |
|
446 |
|
|
447 |
Date format is shit. The official docs state that only C<YY>, C<YYYY>, C<MM>, |
|
448 |
and C<DD> are supported, timestamps do not exist. |
|
449 |
|
|
450 |
=item * |
|
451 |
|
|
452 |
Number pasing seems to be fragile. Official docs state that behaviour for too |
|
453 |
low C<Accuracy> settings is undefined. |
|
454 |
|
|
455 |
There is no dedicated integer type. |
|
456 |
|
|
457 |
=item * |
|
458 |
|
|
459 |
Currently C<ar> and C<ap> have a foreign key to themself with the name |
|
460 |
C<storno_id>. If this foreign key is present in the C<INDEX.XML> then the |
|
461 |
storno records have to be too. Since this is extremely awkward to code and |
|
462 |
confusing for the examiner as to why there are records outside of the time |
|
463 |
range, this export skips all self-referential foreign keys. |
|
464 |
|
|
465 |
=item * |
|
466 |
|
|
467 |
Documentation for foreign keys is extremely weird. Instead of giving column |
|
468 |
maps it assumes that foreign keys map to the primary keys given for the target |
|
469 |
table, and in that order. Foreign keys to keys that are not primary seems to be |
|
470 |
impossible. Changing type is also not allowed (which actually makes sense). |
|
471 |
Hopefully there are no bugs there. |
|
472 |
|
|
473 |
=item * |
|
474 |
|
|
475 |
It's currently disallowed to export the whole dataset. It's not clear if this |
|
476 |
is wanted. |
|
477 |
|
|
478 |
=back |
|
479 |
|
|
480 |
=head1 AUTHOR |
|
481 |
|
|
482 |
Sven Schöling E<lt>s.schoeling@linet-services.deE<gt> |
|
483 |
|
|
484 |
=cut |
menus/user/00-erp.yaml | ||
---|---|---|
854 | 854 |
access: report |
855 | 855 |
params: |
856 | 856 |
action: LiquidityProjection/show |
857 |
- parent: reports |
|
858 |
id: reports_gdpdu_export |
|
859 |
name: GDPDU Export |
|
860 |
order: 1000 |
|
861 |
access: report |
|
862 |
params: |
|
863 |
action: Gdpdu/filter |
|
857 | 864 |
- id: batch_printing |
858 | 865 |
name: Batch Printing |
859 | 866 |
icon: printing |
templates/webpages/gdpdu/filter.html | ||
---|---|---|
1 |
[%- USE HTML %] |
|
2 |
[%- USE T8 %] |
|
3 |
[%- USE L %] |
|
4 |
[%- USE LxERP %] |
|
5 |
|
|
6 |
<h1>[% title | html %]</h1> |
|
7 |
|
|
8 |
[%- PROCESS 'common/flash.html' %] |
|
9 |
|
|
10 |
<p>[% 'This export will include all records in the given time range and all supplicant information from checked entities. You will receive a single zip file. Please extract this file onto the data medium requested by your auditor.' | $T8 %]</p> |
|
11 |
|
|
12 |
<form> |
|
13 |
|
|
14 |
<table> |
|
15 |
<tr> |
|
16 |
<td>[% 'From Date' | $T8 %]</td> |
|
17 |
<td>[% L.date_tag('from', SELF.from) %]</td> |
|
18 |
</tr> |
|
19 |
<tr> |
|
20 |
<td>[% 'To Date' | $T8 %]</td> |
|
21 |
<td>[% L.date_tag('to', SELF.to) %]</td> |
|
22 |
</tr> |
|
23 |
<tr> |
|
24 |
<td>[% 'Include in Report' | $T8 %]</td> |
|
25 |
<td> |
|
26 |
[% L.checkbox_tag('tables.ar', label=LxERP.t8('Invoices'), checked=1) %] |
|
27 |
[% L.checkbox_tag('tables.ap', label=LxERP.t8('Purchase Invoices'), checked=1) %] |
|
28 |
[% L.checkbox_tag('tables.gl', label=LxERP.t8('GL Transactions'), checked=1) %] |
|
29 |
[% L.checkbox_tag('tables.delivery_orders', label=LxERP.t8('Delivery Orders'), checked=1) %] |
|
30 |
[% L.checkbox_tag('tables.oe', label=LxERP.t8('Quotations and orders'), checked=1) %] |
|
31 |
[% L.checkbox_tag('tables.customer', label=LxERP.t8('Customers'), checked=1) %] |
|
32 |
[% L.checkbox_tag('tables.vendor', label=LxERP.t8('Vendors'), checked=1) %] |
|
33 |
[% L.checkbox_tag('tables.parts', label=LxERP.t8('Parts'), checked=1) %] |
|
34 |
[% L.checkbox_tag('tables.acc_trans', label=LxERP.t8('Transactions'), checked=1) %] |
|
35 |
[% L.checkbox_tag('tables.chart', label=LxERP.t8('Charts'), checked=1) %] |
|
36 |
</td> |
|
37 |
</tr> |
|
38 |
</table> |
|
39 |
|
|
40 |
[% L.hidden_tag('action', 'Gdpdu/dispatch') %] |
|
41 |
|
|
42 |
[% L.submit_tag('action_export', LxERP.t8('Export')) %] |
|
43 |
|
|
44 |
</form> |
users/gdpdu-01-08-2002.dtd | ||
---|---|---|
1 |
<?xml version="1.0" encoding="UTF-8"?> |
|
2 |
<!--Versions available: |
|
3 |
1.1 (August-01-2002) |
|
4 |
--> |
|
5 |
|
|
6 |
<!-- Start Simple Types --> |
|
7 |
|
|
8 |
<!-- Supplementary Vocabulary --> |
|
9 |
<!ELEMENT Version (#PCDATA)> |
|
10 |
<!ELEMENT Location (#PCDATA)> |
|
11 |
<!ELEMENT Comment (#PCDATA)> |
|
12 |
<!ELEMENT Length (#PCDATA)> |
|
13 |
<!ELEMENT References (#PCDATA)> |
|
14 |
<!ELEMENT From (#PCDATA)> |
|
15 |
<!ELEMENT To (#PCDATA)> |
|
16 |
|
|
17 |
<!-- Specifying a maximum length for a VariableLength column can |
|
18 |
reduce a VariableLength tables' import time. If MaxLength |
|
19 |
is not specified then we parse URL to determine the MaxLength |
|
20 |
for each column. |
|
21 |
|
|
22 |
* Only applies to VariableLength tables. --> |
|
23 |
<!ELEMENT MaxLength (#PCDATA)> |
|
24 |
|
|
25 |
<!-- Specifies which character (if any) encapsulates a |
|
26 |
VariableLength AlphaNumeric column. |
|
27 |
|
|
28 |
Doublequote is the default TextEncapsulator " |
|
29 |
|
|
30 |
* Only applies to VariableLength tables. (Optional) --> |
|
31 |
<!ELEMENT TextEncapsulator (#PCDATA)> |
|
32 |
|
|
33 |
<!-- Specifies how many digits appear to the right of the decimal symbol. |
|
34 |
|
|
35 |
CAUTION: Results are undefined when importing numeric data with |
|
36 |
greater Accuracy than the Accuracy defined in index.xml |
|
37 |
|
|
38 |
For example trying to import the value 1000,25 with an |
|
39 |
accuracy of 0 might result in 1000 or an error. This |
|
40 |
behavior is specific to the implementation. |
|
41 |
|
|
42 |
Zero is the default Accuracy '0' (Optional) |
|
43 |
--> |
|
44 |
<!ELEMENT Accuracy (#PCDATA)> |
|
45 |
|
|
46 |
<!-- The decimal place is not always stored with numbers. If each number |
|
47 |
is supposed to have decimal places use ImpliedAccuracy --> |
|
48 |
<!ELEMENT ImpliedAccuracy (#PCDATA)> |
|
49 |
|
|
50 |
<!-- Enables you to change how GDPdU displays dates. |
|
51 |
DD.MM.YYYY is the default Format --> |
|
52 |
<!ELEMENT Format (#PCDATA)> |
|
53 |
|
|
54 |
<!-- Specifies the symbol that indicates decimal values. |
|
55 |
Comma is the default DecimalSymbol. ',' |
|
56 |
Specified once per Table. --> |
|
57 |
<!ELEMENT DecimalSymbol (#PCDATA)> |
|
58 |
|
|
59 |
<!-- Specifies the symbol that groups the digits in large numbers. |
|
60 |
Dot is the default DigitGroupingSymbol or ThousandsSeperator. '.' |
|
61 |
Specified once per Table --> |
|
62 |
<!ELEMENT DigitGroupingSymbol (#PCDATA)> |
|
63 |
|
|
64 |
<!-- Command(s) are executed in the following manner |
|
65 |
* before the import process |
|
66 |
* after the import process |
|
67 |
* before a Media is imported |
|
68 |
* after a Media is imported |
|
69 |
--> |
|
70 |
<!ELEMENT Command (#PCDATA)> |
|
71 |
|
|
72 |
<!-- Only the file protocol is supported at this time. |
|
73 |
|
|
74 |
* The standard uses relative URLs. |
|
75 |
|
|
76 |
Absolute URLs are not allowed. The following are all invalid: |
|
77 |
* http://www.somewhere.com/data/Accounts.dat |
|
78 |
* ftp://ftp.somewhere.com/data/Accounts.dat |
|
79 |
* file://localhost/Accounts.dat |
|
80 |
* file:///Accounts.dat |
|
81 |
|
|
82 |
The following are valid examples |
|
83 |
* Accounts.dat |
|
84 |
* data/Accounts.dat |
|
85 |
* data/january/Accounts.dat |
|
86 |
* ../Accounts.dat |
|
87 |
--> |
|
88 |
<!ELEMENT URL (#PCDATA)> |
|
89 |
|
|
90 |
<!-- Textual description of specified element (Optional) --> |
|
91 |
<!ELEMENT Description (#PCDATA)> |
|
92 |
|
|
93 |
<!-- The logical name of specified element. |
|
94 |
Sometimes referred to business name. |
|
95 |
|
|
96 |
If missing, URL will be used in place of Name. --> |
|
97 |
<!ELEMENT Name (#PCDATA)> |
|
98 |
|
|
99 |
<!-- Y2K Window Any year before Epoch is 2000+ |
|
100 |
Default value 30. --> |
|
101 |
<!ELEMENT Epoch (#PCDATA)> |
|
102 |
|
|
103 |
<!-- Element(s) that separate columns or records. |
|
104 |
Semicolon is the default ColumnDelimiter. ';' |
|
105 |
CRLF or is the default RecordDelimiter. --> |
|
106 |
<!ELEMENT ColumnDelimiter (#PCDATA)> |
|
107 |
<!ELEMENT RecordDelimiter (#PCDATA)> |
|
108 |
|
|
109 |
<!-- The number of bytes skipped before reading of URL commences. |
|
110 |
Zero is the default when not specified. '0' |
|
111 |
--> |
|
112 |
<!ELEMENT SkipNumBytes (#PCDATA)> |
|
113 |
|
|
114 |
<!-- End Simple Types --> |
|
115 |
<!-- Start Complex Types --> |
|
116 |
<!-- Self-explanatory --> |
|
117 |
<!ELEMENT Range (From, (To | Length)?)> |
|
118 |
<!ELEMENT FixedRange (From, (To | Length))> |
|
119 |
|
|
120 |
<!-- The document element --> |
|
121 |
<!ELEMENT DataSet (Version, DataSupplier?, Command*, Media+, Command*)> |
|
122 |
|
|
123 |
<!-- Supported datatypes (mandatory) --> |
|
124 |
<!ELEMENT AlphaNumeric EMPTY> |
|
125 |
<!ELEMENT Date (Format?)> |
|
126 |
<!ELEMENT Numeric ((ImpliedAccuracy | Accuracy)?)> |
|
127 |
|
|
128 |
<!-- Supported codepages: |
|
129 |
Be careful to explicitly define RecordDelimiter when using |
|
130 |
a non-default codepage. |
|
131 |
|
|
132 |
ANSI is the default codepage when not specified --> |
|
133 |
<!ELEMENT ANSI EMPTY> |
|
134 |
<!ELEMENT Macintosh EMPTY> |
|
135 |
<!ELEMENT OEM EMPTY> |
|
136 |
<!ELEMENT UTF16 EMPTY> |
|
137 |
<!ELEMENT UTF7 EMPTY> |
|
138 |
<!ELEMENT UTF8 EMPTY> |
|
139 |
|
|
140 |
<!-- Supported file formats: |
|
141 |
FixedLength |
|
142 |
VariableLength --> |
|
143 |
<!ELEMENT FixedLength ((Length | RecordDelimiter)?, ((FixedPrimaryKey+, FixedColumn*) | (FixedColumn+)), ForeignKey*)> |
|
144 |
<!ELEMENT FixedColumn (Name, Description?, (Numeric | AlphaNumeric | Date), Map*, FixedRange)> |
|
145 |
<!ELEMENT FixedPrimaryKey (Name, Description?, (Numeric | AlphaNumeric | Date), Map*, FixedRange)> |
|
146 |
<!ELEMENT VariableLength (ColumnDelimiter?, RecordDelimiter?, TextEncapsulator?, ((VariablePrimaryKey+, VariableColumn*) | (VariableColumn+)), ForeignKey*)> |
|
147 |
<!ELEMENT VariableColumn (Name, Description?, (Numeric | (AlphaNumeric, MaxLength?) | Date), Map*)> |
|
148 |
<!ELEMENT VariablePrimaryKey (Name, Description?, (Numeric | (AlphaNumeric, MaxLength?) | Date), Map*)> |
|
149 |
|
|
150 |
<!-- Description of the entity supplying the data. (Optional) --> |
|
151 |
<!ELEMENT DataSupplier (Name, Location, Comment)> |
|
152 |
|
|
153 |
<!-- The first Media will contain index.xml. Importing will process each media listed --> |
|
154 |
<!ELEMENT Media (Name, Command*, Table+, Command*)> |
|
155 |
|
|
156 |
<!-- Elements common to FixedLength & VariableLength are propagated to Table. --> |
|
157 |
<!ELEMENT Table (URL, Name?, Description?, Validity?, (ANSI | Macintosh | OEM | UTF16 | UTF7 | UTF8)?, (DecimalSymbol, DigitGroupingSymbol)?, SkipNumBytes?, Range?, Epoch?, (VariableLength | FixedLength))> |
|
158 |
|
|
159 |
<!-- ForeignKeys denote joins or relationships between tables. |
|
160 |
To successfully join two tables make sure both the PrimaryKey |
|
161 |
and the referenced column (foreignkey) are of the same datatype. |
|
162 |
Results are undefined when joining two tables with different |
|
163 |
key datatypes. Most likely an error will occur. --> |
|
164 |
<!ELEMENT ForeignKey (Name+, References)> |
|
165 |
|
|
166 |
<!-- Maps AlphaNumeric columns from 'From' to 'To' |
|
167 |
ie. From To |
|
168 |
============ ============= |
|
169 |
True 1 |
|
170 |
True -1 |
|
171 |
False 0 |
|
172 |
|
|
173 |
Basically, a map is an associative container. |
|
174 |
|
|
175 |
The standard implementation only supports |
|
176 |
AlphaNumeric datatypes. The following |
|
177 |
conversions are NOT supported. |
|
178 |
|
|
179 |
Numeric -> AlphaNumeric |
|
180 |
Date -> AlphaNumeric |
|
181 |
AplhaNumeric -> Date |
|
182 |
AlphaNumeric -> Numeric |
|
183 |
--> |
|
184 |
<!ELEMENT Map (Description?, From, To)> |
|
185 |
|
|
186 |
<!-- Documentation for table validity. --> |
|
187 |
<!ELEMENT Validity (Range, Format?)> |
|
188 |
|
|
189 |
<!-- End Complex Types --> |
Auch abrufbar als: Unified diff
GDPdU Export - erste Version