packaging one directory
|
1 |
package DBIx::Custom::Result; |
update document
|
2 | |
updatedd pod
|
3 |
use Object::Simple -base; |
cleanup
|
4 | |
packaging one directory
|
5 |
use Carp 'croak'; |
cleanup
|
6 |
use DBIx::Custom::Util qw/_array_to_hash _subname/; |
packaging one directory
|
7 | |
added EXPERIMENTAL DBIx::Cus...
|
8 |
has [qw/filters filter_off sth type_rule_off/], |
updatedd pod
|
9 |
stash => sub { {} }; |
cleanup
|
10 | |
- added DBIx::Custom::Result...
|
11 |
*all = \&fetch_hash_all; |
12 | ||
cleanup
|
13 |
sub filter { |
14 |
my $self = shift; |
|
cleanup
|
15 |
|
16 |
if (@_) { |
|
all filter can receive array...
|
17 |
my $filter = {}; |
cleanup
|
18 |
|
all filter can receive array...
|
19 |
if (ref $_[0] eq 'HASH') { |
20 |
$filter = $_[0]; |
|
21 |
} |
|
22 |
else { |
|
cleanup
|
23 |
$filter = _array_to_hash( |
all filter can receive array...
|
24 |
@_ > 1 ? [@_] : $_[0] |
25 |
); |
|
26 |
} |
|
27 |
|
|
cleanup
|
28 |
foreach my $column (keys %$filter) { |
29 |
my $fname = $filter->{$column}; |
|
fix bug : filter can't over...
|
30 | |
31 |
if (exists $filter->{$column} |
|
32 |
&& defined $fname |
|
33 |
&& ref $fname ne 'CODE') |
|
34 |
{ |
|
cleanup
|
35 |
croak qq{Filter "$fname" is not registered" } . _subname |
cleanup
|
36 |
unless exists $self->filters->{$fname}; |
37 |
|
|
38 |
$filter->{$column} = $self->filters->{$fname}; |
|
39 |
} |
|
cleanup
|
40 |
} |
cleanup
|
41 |
|
added experimental DBIx::Cus...
|
42 |
$self->{filter} = {%{$self->filter}, %$filter}; |
cleanup
|
43 |
|
44 |
return $self; |
|
cleanup
|
45 |
} |
46 |
|
|
added experimental DBIx::Cus...
|
47 |
return $self->{filter} ||= {}; |
48 |
} |
|
49 | ||
packaging one directory
|
50 |
sub fetch { |
changed argument of tag proc...
|
51 |
my $self = shift; |
52 |
|
|
cleanup
|
53 |
# Filter |
added experimental DBIx::Cus...
|
54 |
my $filter = $self->filter; |
55 |
|
|
56 |
# End filter |
|
cleanup
|
57 |
my $end_filter = $self->{end_filter} || {}; |
packaging one directory
|
58 |
|
59 |
# Fetch |
|
changed argument of tag proc...
|
60 |
my @row = $self->{sth}->fetchrow_array; |
packaging one directory
|
61 |
|
cleanup
|
62 |
# No row |
update document
|
63 |
return unless @row; |
added check_filter attribute
|
64 |
|
cleanup
|
65 |
# Filtering |
added experimental iterate_a...
|
66 |
my $columns = $self->{sth}->{NAME}; |
added type_rule method and f...
|
67 |
my $types = $self->{sth}->{TYPE}; |
68 |
my $type_rule = $self->type_rule || {}; |
|
69 |
|
|
cleanup
|
70 |
for (my $i = 0; $i < @$columns; $i++) { |
added type_rule method and f...
|
71 |
|
changed argument of tag proc...
|
72 |
# Filter name |
cleanup
|
73 |
my $column = $columns->[$i]; |
cleanup
|
74 |
my $f = exists $filter->{$column} |
75 |
? $filter->{$column} |
|
cleanup
|
76 |
: $self->{default_filter}; |
added experimental DBIx::Cus...
|
77 |
my $ef = $end_filter->{$column}; |
some changed
|
78 |
|
cleanup
|
79 |
# Filtering |
DBIx::Custom::Result filter ...
|
80 |
if ($f && !$self->filter_off) { |
81 |
$row[$i] = $f->($row[$i]); |
|
82 |
} |
|
83 |
elsif (!$self->type_rule_off && (my $rule = $type_rule->{lc($types->[$i])})) |
|
84 |
{ |
|
85 |
$row[$i] = $rule->($row[$i]); |
|
86 |
} |
|
87 | ||
type_rule can receive filter...
|
88 |
$row[$i] = $ef->($row[$i]) if $ef && !$self->filter_off; |
packaging one directory
|
89 |
} |
many many changes
|
90 | |
removed reconnect method
|
91 |
return \@row; |
92 |
} |
|
93 | ||
cleanup
|
94 |
sub fetch_all { |
95 |
my $self = shift; |
|
96 |
|
|
97 |
# Fetch all rows |
|
98 |
my $rows = []; |
|
99 |
while(my $row = $self->fetch) { |
|
100 |
push @$rows, $row; |
|
101 |
} |
|
102 |
return $rows; |
|
103 |
} |
|
104 | ||
removed reconnect method
|
105 |
sub fetch_first { |
106 |
my $self = shift; |
|
107 |
|
|
108 |
# Fetch |
|
109 |
my $row = $self->fetch; |
|
110 |
|
|
cleanup
|
111 |
# No row |
removed reconnect method
|
112 |
return unless $row; |
113 |
|
|
114 |
# Finish statement handle |
|
115 |
$self->sth->finish; |
|
116 |
|
|
117 |
return $row; |
|
118 |
} |
|
119 | ||
packaging one directory
|
120 |
sub fetch_hash { |
changed argument of tag proc...
|
121 |
my $self = shift; |
122 |
|
|
cleanup
|
123 |
# Filter |
added experimental DBIx::Cus...
|
124 |
my $filter = $self->filter; |
125 |
|
|
126 |
# End filter |
|
cleanup
|
127 |
my $end_filter = $self->{end_filter} || {}; |
packaging one directory
|
128 |
|
129 |
# Fetch |
|
changed argument of tag proc...
|
130 |
my $row = $self->{sth}->fetchrow_arrayref; |
packaging one directory
|
131 |
|
132 |
# Cannot fetch |
|
133 |
return unless $row; |
|
added check_filter attribute
|
134 | |
packaging one directory
|
135 |
# Filter |
136 |
my $row_hash = {}; |
|
added experimental iterate_a...
|
137 |
my $columns = $self->{sth}->{NAME}; |
added type_rule method and f...
|
138 |
my $types = $self->{sth}->{TYPE}; |
139 |
my $type_rule = $self->type_rule || {}; |
|
cleanup
|
140 |
for (my $i = 0; $i < @$columns; $i++) { |
update document
|
141 |
|
changed argument of tag proc...
|
142 |
# Filter name |
cleanup
|
143 |
my $column = $columns->[$i]; |
cleanup
|
144 |
my $f = exists $filter->{$column} |
145 |
? $filter->{$column} |
|
cleanup
|
146 |
: $self->{default_filter}; |
added experimental DBIx::Cus...
|
147 |
my $ef = $end_filter->{$column}; |
add query filter error check
|
148 |
|
cleanup
|
149 |
# Filtering |
DBIx::Custom::Result filter ...
|
150 |
if ($f && !$self->filter_off) { |
151 |
$row_hash->{$column} = $f->($row->[$i]); |
|
152 |
} |
|
153 |
elsif (!$self->type_rule_off && (my $rule = $type_rule->{lc($types->[$i])})) |
|
154 |
{ |
|
155 |
$row_hash->{$column} = $rule->($row->[$i]); |
|
156 |
} |
|
157 |
else { $row_hash->{$column} = $row->[$i] } |
|
type_rule can receive filter...
|
158 |
$row_hash->{$column} = $ef->($row_hash->{$column}) |
159 |
if $ef && !$self->filter_off; |
|
packaging one directory
|
160 |
} |
161 |
|
|
removed reconnect method
|
162 |
return $row_hash; |
packaging one directory
|
163 |
} |
164 | ||
cleanup
|
165 |
sub fetch_hash_all { |
166 |
my $self = shift; |
|
167 |
|
|
168 |
# Fetch all rows as hash |
|
169 |
my $rows = []; |
|
170 |
while(my $row = $self->fetch_hash) { |
|
171 |
push @$rows, $row; |
|
172 |
} |
|
173 |
|
|
174 |
return $rows; |
|
175 |
} |
|
176 | ||
removed reconnect method
|
177 |
sub fetch_hash_first { |
packaging one directory
|
178 |
my $self = shift; |
179 |
|
|
180 |
# Fetch hash |
|
181 |
my $row = $self->fetch_hash; |
|
182 |
|
|
cleanup
|
183 |
# No row |
packaging one directory
|
184 |
return unless $row; |
185 |
|
|
186 |
# Finish statement handle |
|
some changed
|
187 |
$self->sth->finish; |
packaging one directory
|
188 |
|
removed reconnect method
|
189 |
return $row; |
packaging one directory
|
190 |
} |
191 | ||
renamed fetch_rows to fetch_...
|
192 |
sub fetch_hash_multi { |
packaging one directory
|
193 |
my ($self, $count) = @_; |
194 |
|
|
cleanup
|
195 |
# Row count not specified |
cleanup
|
196 |
croak 'Row count must be specified ' . _subname |
packaging one directory
|
197 |
unless $count; |
198 |
|
|
199 |
# Fetch multi rows |
|
200 |
my $rows = []; |
|
201 |
for (my $i = 0; $i < $count; $i++) { |
|
removed reconnect method
|
202 |
my $row = $self->fetch_hash; |
203 |
last unless $row; |
|
204 |
push @$rows, $row; |
|
packaging one directory
|
205 |
} |
206 |
|
|
207 |
return unless @$rows; |
|
removed reconnect method
|
208 |
return $rows; |
packaging one directory
|
209 |
} |
210 | ||
cleanup
|
211 |
sub fetch_multi { |
212 |
my ($self, $count) = @_; |
|
packaging one directory
|
213 |
|
cleanup
|
214 |
# Row count not specifed |
cleanup
|
215 |
croak 'Row count must be specified ' . _subname |
cleanup
|
216 |
unless $count; |
217 |
|
|
218 |
# Fetch multi rows |
|
packaging one directory
|
219 |
my $rows = []; |
cleanup
|
220 |
for (my $i = 0; $i < $count; $i++) { |
221 |
my $row = $self->fetch; |
|
222 |
last unless $row; |
|
removed reconnect method
|
223 |
push @$rows, $row; |
packaging one directory
|
224 |
} |
changed argument of tag proc...
|
225 |
|
cleanup
|
226 |
return unless @$rows; |
removed reconnect method
|
227 |
return $rows; |
packaging one directory
|
228 |
} |
229 | ||
- added DBIx::Custom::Result...
|
230 |
*one = \&fetch_hash_first; |
231 | ||
added EXPERIMENTAL DBIx::Cus...
|
232 |
sub type_rule { |
233 |
my $self = shift; |
|
234 |
|
|
235 |
# Merge type rule |
|
236 |
if (@_) { |
|
237 |
my $type_rule = @_ == 1 ? $_[0] : [@_]; |
|
238 |
$type_rule = _array_to_hash($type_rule) || {}; |
|
239 |
foreach my $data_type (keys %{$type_rule || {}}) { |
|
240 |
croak qq{data type of into section must be lower case or number} |
|
241 |
if $data_type =~ /[A-Z]/; |
|
242 |
my $fname = $type_rule->{$data_type}; |
|
243 |
if (defined $fname && ref $fname ne 'CODE') { |
|
244 |
croak qq{Filter "$fname" is not registered" } . _subname |
|
245 |
unless exists $self->filters->{$fname}; |
|
246 |
|
|
247 |
$type_rule->{$data_type} = $self->filters->{$fname}; |
|
248 |
} |
|
249 |
} |
|
250 |
$self->{type_rule} = {%{$self->type_rule}, %$type_rule}; |
|
251 |
} |
|
252 |
|
|
253 |
return $self->{type_rule} ||= {}; |
|
254 |
} |
|
255 | ||
256 |
sub clear_type_rule { |
|
257 |
my $self = shift; |
|
258 |
$self->{type_rule} = {}; |
|
259 |
return $self; |
|
260 |
} |
|
261 | ||
cleanup
|
262 |
# DEPRECATED! |
263 |
sub end_filter { |
|
264 |
my $self = shift; |
|
265 |
|
|
266 |
if (@_) { |
|
267 |
my $end_filter = {}; |
|
268 |
|
|
269 |
if (ref $_[0] eq 'HASH') { |
|
270 |
$end_filter = $_[0]; |
|
271 |
} |
|
272 |
else { |
|
273 |
$end_filter = _array_to_hash( |
|
274 |
@_ > 1 ? [@_] : $_[0] |
|
275 |
); |
|
276 |
} |
|
277 |
|
|
278 |
foreach my $column (keys %$end_filter) { |
|
279 |
my $fname = $end_filter->{$column}; |
|
280 |
|
|
281 |
if (exists $end_filter->{$column} |
|
282 |
&& defined $fname |
|
283 |
&& ref $fname ne 'CODE') |
|
284 |
{ |
|
285 |
croak qq{Filter "$fname" is not registered" } . _subname |
|
286 |
unless exists $self->filters->{$fname}; |
|
287 |
|
|
288 |
$end_filter->{$column} = $self->filters->{$fname}; |
|
289 |
} |
|
290 |
} |
|
291 |
|
|
292 |
$self->{end_filter} = {%{$self->end_filter}, %$end_filter}; |
|
293 |
|
|
294 |
return $self; |
|
295 |
} |
|
296 |
|
|
297 |
return $self->{end_filter} ||= {}; |
|
298 |
} |
|
299 | ||
cleanup
|
300 |
# DEPRECATED! |
added experimental DBIx::Cus...
|
301 |
sub remove_end_filter { |
302 |
my $self = shift; |
|
303 |
|
|
cleanup
|
304 |
warn "remove_end_filter is DEPRECATED! use filter_off attribute instead"; |
305 |
|
|
added experimental DBIx::Cus...
|
306 |
$self->{end_filter} = {}; |
307 |
|
|
308 |
return $self; |
|
309 |
} |
|
310 | ||
cleanup
|
311 |
# DEPRECATED! |
added experimental DBIx::Cus...
|
312 |
sub remove_filter { |
313 |
my $self = shift; |
|
cleanup
|
314 | |
315 |
warn "remove_filter is DEPRECATED! use filter_off attribute instead"; |
|
added experimental DBIx::Cus...
|
316 |
|
317 |
$self->{filter} = {}; |
|
318 |
|
|
319 |
return $self; |
|
320 |
} |
|
321 | ||
cleanup
|
322 |
# DEPRECATED! |
cleanup
|
323 |
sub default_filter { |
324 |
my $self = shift; |
|
cleanup
|
325 | |
cleanup
|
326 |
warn "default_filter is DEPRECATED!"; |
cleanup
|
327 |
|
328 |
if (@_) { |
|
329 |
my $fname = $_[0]; |
|
330 |
if (@_ && !$fname) { |
|
331 |
$self->{default_filter} = undef; |
|
332 |
} |
|
333 |
else { |
|
many changed
|
334 |
croak qq{Filter "$fname" is not registered} |
cleanup
|
335 |
unless exists $self->filters->{$fname}; |
336 |
|
|
337 |
$self->{default_filter} = $self->filters->{$fname}; |
|
338 |
} |
|
339 |
|
|
340 |
return $self; |
|
341 |
} |
|
342 |
|
|
343 |
return $self->{default_filter}; |
|
344 |
} |
|
345 | ||
cleanup
|
346 |
# DEPRECATED! |
cleanup
|
347 |
has 'filter_check'; |
cleanup
|
348 | |
update document
|
349 |
1; |
350 | ||
packaging one directory
|
351 |
=head1 NAME |
352 | ||
cleanup
|
353 |
DBIx::Custom::Result - Result of select statement |
packaging one directory
|
354 | |
update document
|
355 |
=head1 SYNOPSIS |
cleanup
|
356 | |
357 |
Get the result of select statement. |
|
358 | ||
removed reconnect method
|
359 |
# Result |
360 |
my $result = $dbi->select(table => 'books'); |
|
cleanup
|
361 | |
362 |
Fetch row into array. |
|
removed reconnect method
|
363 |
|
364 |
# Fetch a row into array |
|
365 |
while (my $row = $result->fetch) { |
|
cleanup
|
366 |
my $author = $row->[0]; |
367 |
my $title = $row->[1]; |
|
removed reconnect method
|
368 |
|
version 0.0901
|
369 |
} |
370 |
|
|
cleanup
|
371 |
# Fetch only a first row into array |
removed reconnect method
|
372 |
my $row = $result->fetch_first; |
373 |
|
|
374 |
# Fetch multiple rows into array of array |
|
375 |
while (my $rows = $result->fetch_multi(5)) { |
|
cleanup
|
376 |
my $first_author = $rows->[0][0]; |
377 |
my $first_title = $rows->[0][1]; |
|
378 |
my $second_author = $rows->[1][0]; |
|
379 |
my $second_value = $rows->[1][1]; |
|
380 |
|
|
removed reconnect method
|
381 |
} |
382 |
|
|
383 |
# Fetch all rows into array of array |
|
384 |
my $rows = $result->fetch_all; |
|
cleanup
|
385 | |
386 |
Fetch row into hash. |
|
387 | ||
388 |
# Fetch a row into hash |
|
removed reconnect method
|
389 |
while (my $row = $result->fetch_hash) { |
cleanup
|
390 |
my $title = $row->{title}; |
391 |
my $author = $row->{author}; |
|
removed reconnect method
|
392 |
|
packaging one directory
|
393 |
} |
removed reconnect method
|
394 |
|
cleanup
|
395 |
# Fetch only a first row into hash |
removed reconnect method
|
396 |
my $row = $result->fetch_hash_first; |
397 |
|
|
398 |
# Fetch multiple rows into array of hash |
|
cleanup
|
399 |
while (my $rows = $result->fetch_hash_multi(5)) { |
400 |
my $first_title = $rows->[0]{title}; |
|
401 |
my $first_author = $rows->[0]{author}; |
|
402 |
my $second_title = $rows->[1]{title}; |
|
403 |
my $second_author = $rows->[1]{author}; |
|
removed reconnect method
|
404 |
} |
405 |
|
|
406 |
# Fetch all rows into array of hash |
|
407 |
my $rows = $result->fetch_hash_all; |
|
packaging one directory
|
408 | |
update document
|
409 |
=head1 ATTRIBUTES |
packaging one directory
|
410 | |
cleanup
|
411 |
Filters when a row is fetched. |
412 |
This overwrites C<default_filter>. |
|
removed DESTROY method(not b...
|
413 | |
updated_pod
|
414 |
=head2 C<filter_off> EXPERIMENTAL |
415 | ||
416 |
my $filter_off = $resutl->filter_off; |
|
417 |
$result = $result->filter_off(1); |
|
418 | ||
419 |
Turn filter off. |
|
420 | ||
cleanup
|
421 |
=head2 C<filters> |
422 | ||
423 |
my $filters = $result->filters; |
|
424 |
$result = $result->filters(\%filters); |
|
425 | ||
426 |
Resistered filters. |
|
427 | ||
428 |
=head2 C<sth> |
|
429 | ||
430 |
my $sth = $reuslt->sth |
|
431 |
$result = $result->sth($sth); |
|
432 | ||
433 |
Statement handle of L<DBI>. |
|
434 | ||
added EXPERIMENTAL execute()...
|
435 |
=head2 C<type_rule_off> EXPERIMENTAL |
436 | ||
437 |
my $type_rule_off = $result->type_rule_off; |
|
438 |
$result = $result->type_rule_off(1); |
|
439 | ||
440 |
Turn type rule off. |
|
441 | ||
update document
|
442 |
=head1 METHODS |
443 | ||
renamed build_query to creat...
|
444 |
L<DBIx::Custom::Result> inherits all methods from L<Object::Simple> |
cleanup
|
445 |
and implements the following new ones. |
packaging one directory
|
446 | |
updated pod
|
447 |
=head2 C<all> |
448 | ||
449 |
my $rows = $result->all; |
|
450 | ||
451 |
This is alias for C<fetch_hash_all>. |
|
452 | ||
removed DBIx::Custom commit ...
|
453 |
=head2 C<fetch> |
packaging one directory
|
454 | |
cleanup
|
455 |
my $row = $result->fetch; |
version 0.0901
|
456 | |
cleanup
|
457 |
Fetch a row into array. |
packaging one directory
|
458 | |
removed DBIx::Custom commit ...
|
459 |
=head2 C<fetch_all> |
packaging one directory
|
460 | |
cleanup
|
461 |
my $rows = $result->fetch_all; |
version 0.0901
|
462 | |
removed DESTROY method(not b...
|
463 |
Fetch all rows into array of array. |
packaging one directory
|
464 | |
cleanup
|
465 |
=head2 C<fetch_first> |
466 | ||
467 |
my $row = $result->fetch_first; |
|
468 | ||
469 |
Fetch only a first row into array and finish statment handle. |
|
470 | ||
removed DESTROY method(not b...
|
471 |
=head2 C<fetch_hash> |
packaging one directory
|
472 | |
cleanup
|
473 |
my $row = $result->fetch_hash; |
packaging one directory
|
474 | |
removed DESTROY method(not b...
|
475 |
Fetch a row into hash |
update document
|
476 | |
cleanup
|
477 |
=head2 C<fetch_hash_all> |
478 | ||
479 |
my $rows = $result->fetch_hash_all; |
|
480 | ||
481 |
Fetch all rows into array of hash. |
|
482 | ||
removed DBIx::Custom commit ...
|
483 |
=head2 C<fetch_hash_first> |
removed reconnect method
|
484 |
|
cleanup
|
485 |
my $row = $result->fetch_hash_first; |
packaging one directory
|
486 | |
removed DESTROY method(not b...
|
487 |
Fetch only first row into hash and finish statment handle. |
packaging one directory
|
488 | |
removed DESTROY method(not b...
|
489 |
=head2 C<fetch_hash_multi> |
update document
|
490 | |
cleanup
|
491 |
my $rows = $result->fetch_hash_multi(5); |
update document
|
492 |
|
removed DESTROY method(not b...
|
493 |
Fetch multiple rows into array of hash |
cleanup
|
494 |
Row count must be specified. |
update document
|
495 | |
cleanup
|
496 |
=head2 C<fetch_multi> |
packaging one directory
|
497 | |
cleanup
|
498 |
my $rows = $result->fetch_multi(5); |
499 |
|
|
500 |
Fetch multiple rows into array of array. |
|
501 |
Row count must be specified. |
|
removed DESTROY method(not b...
|
502 | |
cleanup
|
503 |
=head2 C<filter> |
504 | ||
added experimental DBIx::Cus...
|
505 |
$result = $result->filter(title => 'to_something', |
506 |
author => 'to_something'); |
|
added experimental DBIx::Cus...
|
507 | |
all filter can receive array...
|
508 |
$result = $result->filter([qw/title author/] => 'to_something'); |
509 | ||
added experimental DBIx::Cus...
|
510 |
Filters. |
511 |
These each filters override the filters applied by C<apply_filter> of |
|
512 |
L<DBIx::Custom>. |
|
cleanup
|
513 | |
updated pod
|
514 |
=head2 C<one> |
515 | ||
516 |
my $row = $result->one; |
|
517 | ||
518 |
This is alias for C<fetch_hash_first>. |
|
519 | ||
- removed DEPRECATED DBIx::C...
|
520 |
=head2 C<remove_filter> |
added experimental DBIx::Cus...
|
521 | |
522 |
$result->remove_filter; |
|
523 | ||
524 |
Remove filter. End filter is not removed. |
|
525 | ||
- removed DEPRECATED DBIx::C...
|
526 |
=head2 C<stash> |
added experimental DBIx::Cus...
|
527 | |
528 |
my $stash = $result->stash; |
|
529 |
my $foo = $result->stash->{foo}; |
|
530 |
$result->stash->{foo} = $foo; |
|
531 | ||
532 |
Stash is hash reference to save your data. |
|
533 | ||
added EXPERIMENTAL DBIx::Cus...
|
534 |
=head2 C<type_rule> EXPERIMENTAL |
535 | ||
536 |
$result->type_rule( |
|
537 |
# DATE |
|
538 |
9 => sub { ... }, |
|
539 |
# DATETIME or TIMESTAMP |
|
540 |
11 => sub { ... } |
|
541 |
); |
|
542 | ||
543 |
This override L<DBIx::Custom>'s C<type_rule> C<from> section. |
|
544 | ||
EXPERIMENTAL type_rule_off i...
|
545 |
=head2 C<remove_end_filter> DEPRECATED! |
546 | ||
547 |
$result->remove_end_filter; |
|
548 | ||
549 |
Remove end filter. |
|
550 | ||
551 |
=head2 C<end_filter> DEPRECATED! |
|
552 | ||
553 |
$result = $result->end_filter(title => 'to_something', |
|
554 |
author => 'to_something'); |
|
555 | ||
556 |
$result = $result->end_filter([qw/title author/] => 'to_something'); |
|
557 | ||
558 |
End filters. |
|
559 |
These each filters is executed after the filters applied by C<apply_filter> of |
|
560 |
L<DBIx::Custom> or C<filter> method. |
|
561 | ||
packaging one directory
|
562 |
=cut |