packaging one directory
|
1 |
package DBIx::Custom::Result; |
updatedd pod
|
2 |
use Object::Simple -base; |
cleanup
|
3 | |
packaging one directory
|
4 |
use Carp 'croak'; |
cleanup
|
5 |
use DBIx::Custom::Util qw/_array_to_hash _subname/; |
packaging one directory
|
6 | |
EXPERIMENTAL type_rule argum...
|
7 |
has [qw/filters filter_off sth type_rule_off type_rule1_off type_rule2_off/]; |
cleanup
|
8 |
has stash => sub { {} }; |
cleanup
|
9 | |
- added DBIx::Custom::Result...
|
10 |
*all = \&fetch_hash_all; |
11 | ||
cleanup
|
12 |
sub filter { |
13 |
my $self = shift; |
|
cleanup
|
14 |
|
cleanup
|
15 |
# Set |
cleanup
|
16 |
if (@_) { |
17 |
|
|
cleanup
|
18 |
# Convert filter name to subroutine |
19 |
my $filter = @_ == 1 ? $_[0] : [@_]; |
|
20 |
$filter = _array_to_hash($filter); |
|
cleanup
|
21 |
foreach my $column (keys %$filter) { |
22 |
my $fname = $filter->{$column}; |
|
fix bug : filter can't over...
|
23 |
if (exists $filter->{$column} |
24 |
&& defined $fname |
|
25 |
&& ref $fname ne 'CODE') |
|
26 |
{ |
|
cleanup
|
27 |
croak qq{Filter "$fname" is not registered" } . _subname |
cleanup
|
28 |
unless exists $self->filters->{$fname}; |
29 |
$filter->{$column} = $self->filters->{$fname}; |
|
30 |
} |
|
cleanup
|
31 |
} |
cleanup
|
32 |
|
cleanup
|
33 |
# Merge |
added experimental DBIx::Cus...
|
34 |
$self->{filter} = {%{$self->filter}, %$filter}; |
cleanup
|
35 |
|
36 |
return $self; |
|
cleanup
|
37 |
} |
38 |
|
|
added experimental DBIx::Cus...
|
39 |
return $self->{filter} ||= {}; |
40 |
} |
|
41 | ||
packaging one directory
|
42 |
sub fetch { |
changed argument of tag proc...
|
43 |
my $self = shift; |
44 |
|
|
packaging one directory
|
45 |
# Fetch |
changed argument of tag proc...
|
46 |
my @row = $self->{sth}->fetchrow_array; |
update document
|
47 |
return unless @row; |
added check_filter attribute
|
48 |
|
cleanup
|
49 |
# Filtering |
added experimental iterate_a...
|
50 |
my $columns = $self->{sth}->{NAME}; |
added type_rule method and f...
|
51 |
my $types = $self->{sth}->{TYPE}; |
EXPERIMENTAL type_rule argum...
|
52 |
my $type_rule1 = $self->type_rule->{from1} || {}; |
53 |
my $type_rule2 = $self->type_rule->{from2} || {}; |
|
separate DBIx::Custom::Resul...
|
54 |
my $filter = $self->filter; |
55 |
my $end_filter = $self->end_filter; |
|
cleanup
|
56 |
for (my $i = 0; $i < @$columns; $i++) { |
added type_rule method and f...
|
57 |
|
separate DBIx::Custom::Resul...
|
58 |
# Column |
cleanup
|
59 |
my $column = $columns->[$i]; |
some changed
|
60 |
|
separate DBIx::Custom::Resul...
|
61 |
# Type rule |
EXPERIMENTAL type_rule argum...
|
62 |
my $type_filter1 = $type_rule1->{lc($types->[$i])}; |
63 |
$row[$i] = $type_filter1->($row[$i]) |
|
64 |
if $type_filter1 && !$self->{type_rule_off} |
|
65 |
&& !$self->{type_rule1_off}; |
|
66 |
my $type_filter2 = $type_rule2->{lc($types->[$i])}; |
|
67 |
$row[$i] = $type_filter2->($row[$i]) |
|
68 |
if $type_filter2 && !$self->{type_rule_off} |
|
69 |
&& !$self->{type_rule2_off}; |
|
separate DBIx::Custom::Resul...
|
70 |
|
71 |
# Filter |
|
72 |
my $filter = $filter->{$column} || $self->{default_filter}; |
|
73 |
$row[$i] = $filter->($row[$i]) |
|
74 |
if $filter && !$self->{filter_off}; |
|
75 |
$row[$i] = $end_filter->{$column}->($row[$i]) |
|
76 |
if $end_filter->{$column} && !$self->{filter_off}; |
|
packaging one directory
|
77 |
} |
many many changes
|
78 | |
removed reconnect method
|
79 |
return \@row; |
80 |
} |
|
81 | ||
cleanup
|
82 |
sub fetch_all { |
83 |
my $self = shift; |
|
84 |
|
|
85 |
# Fetch all rows |
|
86 |
my $rows = []; |
|
cleanup
|
87 |
while(my $row = $self->fetch) { push @$rows, $row} |
88 |
|
|
cleanup
|
89 |
return $rows; |
90 |
} |
|
91 | ||
removed reconnect method
|
92 |
sub fetch_first { |
93 |
my $self = shift; |
|
94 |
|
|
95 |
# Fetch |
|
96 |
my $row = $self->fetch; |
|
97 |
return unless $row; |
|
98 |
|
|
99 |
# Finish statement handle |
|
100 |
$self->sth->finish; |
|
101 |
|
|
102 |
return $row; |
|
103 |
} |
|
104 | ||
packaging one directory
|
105 |
sub fetch_hash { |
changed argument of tag proc...
|
106 |
my $self = shift; |
107 |
|
|
packaging one directory
|
108 |
# Fetch |
changed argument of tag proc...
|
109 |
my $row = $self->{sth}->fetchrow_arrayref; |
packaging one directory
|
110 |
return unless $row; |
added check_filter attribute
|
111 | |
packaging one directory
|
112 |
# Filter |
separate DBIx::Custom::Resul...
|
113 |
my $hash_row = {}; |
114 |
my $filter = $self->filter; |
|
115 |
my $end_filter = $self->end_filter || {}; |
|
added experimental iterate_a...
|
116 |
my $columns = $self->{sth}->{NAME}; |
added type_rule method and f...
|
117 |
my $types = $self->{sth}->{TYPE}; |
EXPERIMENTAL type_rule argum...
|
118 |
my $type_rule1 = $self->type_rule->{from1} || {}; |
119 |
my $type_rule2 = $self->type_rule->{from2} || {}; |
|
cleanup
|
120 |
for (my $i = 0; $i < @$columns; $i++) { |
update document
|
121 |
|
separate DBIx::Custom::Resul...
|
122 |
# Column |
cleanup
|
123 |
my $column = $columns->[$i]; |
EXPERIMENTAL type_rule argum...
|
124 |
$hash_row->{$column} = $row->[$i]; |
add query filter error check
|
125 |
|
separate DBIx::Custom::Resul...
|
126 |
# Type rule |
EXPERIMENTAL type_rule argum...
|
127 |
my $type_filter1 = $type_rule1->{lc($types->[$i])}; |
128 |
$hash_row->{$column} = $type_filter1->($hash_row->{$column}) |
|
129 |
if !$self->{type_rule_off} && !$self->{type_rule1_off} |
|
130 |
&& $type_filter1; |
|
131 |
my $type_filter2 = $type_rule2->{lc($types->[$i])}; |
|
132 |
$hash_row->{$column} = $type_filter2->($hash_row->{$column}) |
|
133 |
if !$self->{type_rule_off} && !$self->{type_rule2_off} |
|
134 |
&& $type_filter2; |
|
separate DBIx::Custom::Resul...
|
135 |
|
136 |
# Filter |
|
137 |
my $f = $filter->{$column} || $self->{default_filter}; |
|
138 |
$hash_row->{$column} = $f->($hash_row->{$column}) |
|
139 |
if $f && !$self->{filter_off}; |
|
140 |
$hash_row->{$column} = $end_filter->{$column}->($hash_row->{$column}) |
|
141 |
if $end_filter->{$column} && !$self->{filter_off}; |
|
packaging one directory
|
142 |
} |
143 |
|
|
separate DBIx::Custom::Resul...
|
144 |
return $hash_row; |
packaging one directory
|
145 |
} |
146 | ||
cleanup
|
147 |
sub fetch_hash_all { |
148 |
my $self = shift; |
|
149 |
|
|
150 |
# Fetch all rows as hash |
|
151 |
my $rows = []; |
|
cleanup
|
152 |
while(my $row = $self->fetch_hash) { push @$rows, $row } |
cleanup
|
153 |
|
154 |
return $rows; |
|
155 |
} |
|
156 | ||
removed reconnect method
|
157 |
sub fetch_hash_first { |
packaging one directory
|
158 |
my $self = shift; |
159 |
|
|
160 |
# Fetch hash |
|
161 |
my $row = $self->fetch_hash; |
|
162 |
return unless $row; |
|
163 |
|
|
164 |
# Finish statement handle |
|
some changed
|
165 |
$self->sth->finish; |
packaging one directory
|
166 |
|
removed reconnect method
|
167 |
return $row; |
packaging one directory
|
168 |
} |
169 | ||
renamed fetch_rows to fetch_...
|
170 |
sub fetch_hash_multi { |
packaging one directory
|
171 |
my ($self, $count) = @_; |
172 |
|
|
cleanup
|
173 |
# Fetch multiple rows |
cleanup
|
174 |
croak 'Row count must be specified ' . _subname |
packaging one directory
|
175 |
unless $count; |
176 |
my $rows = []; |
|
177 |
for (my $i = 0; $i < $count; $i++) { |
|
removed reconnect method
|
178 |
my $row = $self->fetch_hash; |
179 |
last unless $row; |
|
180 |
push @$rows, $row; |
|
packaging one directory
|
181 |
} |
182 |
|
|
183 |
return unless @$rows; |
|
removed reconnect method
|
184 |
return $rows; |
packaging one directory
|
185 |
} |
186 | ||
cleanup
|
187 |
sub fetch_multi { |
188 |
my ($self, $count) = @_; |
|
packaging one directory
|
189 |
|
cleanup
|
190 |
# Row count not specifed |
cleanup
|
191 |
croak 'Row count must be specified ' . _subname |
cleanup
|
192 |
unless $count; |
193 |
|
|
194 |
# Fetch multi rows |
|
packaging one directory
|
195 |
my $rows = []; |
cleanup
|
196 |
for (my $i = 0; $i < $count; $i++) { |
197 |
my $row = $self->fetch; |
|
198 |
last unless $row; |
|
removed reconnect method
|
199 |
push @$rows, $row; |
packaging one directory
|
200 |
} |
changed argument of tag proc...
|
201 |
|
cleanup
|
202 |
return unless @$rows; |
removed reconnect method
|
203 |
return $rows; |
packaging one directory
|
204 |
} |
205 | ||
- added DBIx::Custom::Result...
|
206 |
*one = \&fetch_hash_first; |
207 | ||
added EXPERIMENTAL DBIx::Cus...
|
208 |
sub type_rule { |
209 |
my $self = shift; |
|
210 |
|
|
211 |
if (@_) { |
|
EXPERIMENTAL type_rule argum...
|
212 |
my $type_rule = ref $_[0] eq 'HASH' ? $_[0] : {@_}; |
213 | ||
214 |
# From |
|
215 |
foreach my $i (1 .. 2) { |
|
216 |
$type_rule->{"from$i"} = _array_to_hash($type_rule->{"from$i"}); |
|
217 |
foreach my $data_type (keys %{$type_rule->{"from$i"} || {}}) { |
|
218 |
croak qq{data type of from$i section must be lower case or number} |
|
219 |
if $data_type =~ /[A-Z]/; |
|
220 |
my $fname = $type_rule->{"from$i"}{$data_type}; |
|
221 |
if (defined $fname && ref $fname ne 'CODE') { |
|
222 |
croak qq{Filter "$fname" is not registered" } . _subname |
|
223 |
unless exists $self->filters->{$fname}; |
|
224 |
|
|
225 |
$type_rule->{"from$i"}{$data_type} = $self->filters->{$fname}; |
|
226 |
} |
|
added EXPERIMENTAL DBIx::Cus...
|
227 |
} |
228 |
} |
|
EXPERIMENTAL type_rule argum...
|
229 |
$self->{type_rule} = $type_rule; |
DBIx::Custom::Result type_ru...
|
230 |
|
231 |
return $self; |
|
added EXPERIMENTAL DBIx::Cus...
|
232 |
} |
233 |
|
|
EXPERIMENTAL type_rule argum...
|
234 |
return $self->{type_rule} || {}; |
added EXPERIMENTAL DBIx::Cus...
|
235 |
} |
236 | ||
cleanup
|
237 |
# DEPRECATED! |
238 |
sub end_filter { |
|
239 |
my $self = shift; |
|
240 |
if (@_) { |
|
241 |
my $end_filter = {}; |
|
cleanup
|
242 |
if (ref $_[0] eq 'HASH') { $end_filter = $_[0] } |
243 |
else { |
|
cleanup
|
244 |
$end_filter = _array_to_hash( |
245 |
@_ > 1 ? [@_] : $_[0] |
|
246 |
); |
|
247 |
} |
|
248 |
foreach my $column (keys %$end_filter) { |
|
249 |
my $fname = $end_filter->{$column}; |
|
250 |
if (exists $end_filter->{$column} |
|
251 |
&& defined $fname |
|
252 |
&& ref $fname ne 'CODE') |
|
253 |
{ |
|
254 |
croak qq{Filter "$fname" is not registered" } . _subname |
|
255 |
unless exists $self->filters->{$fname}; |
|
256 |
$end_filter->{$column} = $self->filters->{$fname}; |
|
257 |
} |
|
258 |
} |
|
259 |
$self->{end_filter} = {%{$self->end_filter}, %$end_filter}; |
|
260 |
return $self; |
|
261 |
} |
|
262 |
return $self->{end_filter} ||= {}; |
|
263 |
} |
|
264 | ||
cleanup
|
265 |
# DEPRECATED! |
added experimental DBIx::Cus...
|
266 |
sub remove_end_filter { |
267 |
my $self = shift; |
|
cleanup
|
268 |
warn "remove_end_filter is DEPRECATED! use filter_off attribute instead"; |
added experimental DBIx::Cus...
|
269 |
$self->{end_filter} = {}; |
270 |
return $self; |
|
271 |
} |
|
272 | ||
cleanup
|
273 |
# DEPRECATED! |
added experimental DBIx::Cus...
|
274 |
sub remove_filter { |
275 |
my $self = shift; |
|
cleanup
|
276 |
warn "remove_filter is DEPRECATED! use filter_off attribute instead"; |
added experimental DBIx::Cus...
|
277 |
$self->{filter} = {}; |
278 |
return $self; |
|
279 |
} |
|
280 | ||
cleanup
|
281 |
# DEPRECATED! |
cleanup
|
282 |
sub default_filter { |
283 |
my $self = shift; |
|
cleanup
|
284 |
warn "default_filter is DEPRECATED!"; |
cleanup
|
285 |
if (@_) { |
286 |
my $fname = $_[0]; |
|
287 |
if (@_ && !$fname) { |
|
288 |
$self->{default_filter} = undef; |
|
289 |
} |
|
290 |
else { |
|
many changed
|
291 |
croak qq{Filter "$fname" is not registered} |
cleanup
|
292 |
unless exists $self->filters->{$fname}; |
293 |
$self->{default_filter} = $self->filters->{$fname}; |
|
294 |
} |
|
295 |
return $self; |
|
296 |
} |
|
297 |
return $self->{default_filter}; |
|
298 |
} |
|
299 | ||
cleanup
|
300 |
# DEPRECATED! |
cleanup
|
301 |
has 'filter_check'; |
cleanup
|
302 | |
update document
|
303 |
1; |
304 | ||
packaging one directory
|
305 |
=head1 NAME |
306 | ||
cleanup
|
307 |
DBIx::Custom::Result - Result of select statement |
packaging one directory
|
308 | |
update document
|
309 |
=head1 SYNOPSIS |
cleanup
|
310 | |
removed reconnect method
|
311 |
# Result |
cleanup
|
312 |
my $result = $dbi->select(table => 'book'); |
cleanup
|
313 | |
cleanup
|
314 |
# Fetch a row and put it into array reference |
removed reconnect method
|
315 |
while (my $row = $result->fetch) { |
cleanup
|
316 |
my $author = $row->[0]; |
317 |
my $title = $row->[1]; |
|
version 0.0901
|
318 |
} |
319 |
|
|
cleanup
|
320 |
# Fetch only a first row and put it into array reference |
removed reconnect method
|
321 |
my $row = $result->fetch_first; |
322 |
|
|
cleanup
|
323 |
# Fetch all rows and put them into array of array reference |
removed reconnect method
|
324 |
my $rows = $result->fetch_all; |
cleanup
|
325 | |
cleanup
|
326 |
# Fetch a row and put it into hash reference |
removed reconnect method
|
327 |
while (my $row = $result->fetch_hash) { |
cleanup
|
328 |
my $title = $row->{title}; |
329 |
my $author = $row->{author}; |
|
packaging one directory
|
330 |
} |
removed reconnect method
|
331 |
|
cleanup
|
332 |
# Fetch only a first row and put it into hash reference |
removed reconnect method
|
333 |
my $row = $result->fetch_hash_first; |
cleanup
|
334 |
my $row = $result->one; # Same as fetch_hash_first |
removed reconnect method
|
335 |
|
cleanup
|
336 |
# Fetch all rows and put them into array of hash reference |
removed reconnect method
|
337 |
my $rows = $result->fetch_hash_all; |
cleanup
|
338 |
my $rows = $result->all; # Same as fetch_hash_all |
packaging one directory
|
339 | |
update document
|
340 |
=head1 ATTRIBUTES |
packaging one directory
|
341 | |
updated_pod
|
342 |
=head2 C<filter_off> EXPERIMENTAL |
343 | ||
344 |
my $filter_off = $resutl->filter_off; |
|
345 |
$result = $result->filter_off(1); |
|
346 | ||
cleanup
|
347 |
Filtering by C<filter> method is turned off. |
updated_pod
|
348 | |
cleanup
|
349 |
=head2 C<filters> |
350 | ||
351 |
my $filters = $result->filters; |
|
cleanup
|
352 |
$result = $result->filters(\%filters); |
cleanup
|
353 | |
cleanup
|
354 |
Filters. |
cleanup
|
355 | |
356 |
=head2 C<sth> |
|
357 | ||
358 |
my $sth = $reuslt->sth |
|
359 |
$result = $result->sth($sth); |
|
360 | ||
361 |
Statement handle of L<DBI>. |
|
362 | ||
added EXPERIMENTAL execute()...
|
363 |
=head2 C<type_rule_off> EXPERIMENTAL |
364 | ||
365 |
my $type_rule_off = $result->type_rule_off; |
|
366 |
$result = $result->type_rule_off(1); |
|
367 | ||
EXPERIMENTAL type_rule argum...
|
368 |
Turn C<from1> and C<from2> type rule off. |
369 | ||
370 |
=head2 C<type_rule1_off> EXPERIMENTAL |
|
371 | ||
372 |
my $type_rule1_off = $result->type_rule1_off; |
|
373 |
$result = $result->type_rule1_off(1); |
|
374 | ||
375 |
Turn C<from1> type rule off. |
|
376 | ||
377 |
=head2 C<type_rule2_off> EXPERIMENTAL |
|
378 | ||
379 |
my $type_rule2_off = $result->type_rule2_off; |
|
380 |
$result = $result->type_rule2_off(1); |
|
381 | ||
382 |
Turn C<from2> type rule off. |
|
added EXPERIMENTAL execute()...
|
383 | |
update document
|
384 |
=head1 METHODS |
385 | ||
renamed build_query to creat...
|
386 |
L<DBIx::Custom::Result> inherits all methods from L<Object::Simple> |
cleanup
|
387 |
and implements the following new ones. |
packaging one directory
|
388 | |
updated pod
|
389 |
=head2 C<all> |
390 | ||
391 |
my $rows = $result->all; |
|
392 | ||
cleanup
|
393 |
Same as C<fetch_hash_all>. |
updated pod
|
394 | |
removed DBIx::Custom commit ...
|
395 |
=head2 C<fetch> |
packaging one directory
|
396 | |
cleanup
|
397 |
my $row = $result->fetch; |
version 0.0901
|
398 | |
cleanup
|
399 |
Fetch a row and put it into array reference. |
packaging one directory
|
400 | |
removed DBIx::Custom commit ...
|
401 |
=head2 C<fetch_all> |
packaging one directory
|
402 | |
cleanup
|
403 |
my $rows = $result->fetch_all; |
version 0.0901
|
404 | |
cleanup
|
405 |
Fetch all rows and put them into array of array reference. |
packaging one directory
|
406 | |
cleanup
|
407 |
=head2 C<fetch_first> |
408 | ||
409 |
my $row = $result->fetch_first; |
|
410 | ||
cleanup
|
411 |
Fetch only a first row and put it into array reference, |
412 |
and finish statment handle. |
|
cleanup
|
413 | |
removed DESTROY method(not b...
|
414 |
=head2 C<fetch_hash> |
packaging one directory
|
415 | |
cleanup
|
416 |
my $row = $result->fetch_hash; |
packaging one directory
|
417 | |
cleanup
|
418 |
Fetch a row and put it into hash reference. |
update document
|
419 | |
cleanup
|
420 |
=head2 C<fetch_hash_all> |
421 | ||
422 |
my $rows = $result->fetch_hash_all; |
|
423 | ||
cleanup
|
424 |
Fetch all rows and put them into array of hash reference. |
cleanup
|
425 | |
removed DBIx::Custom commit ...
|
426 |
=head2 C<fetch_hash_first> |
removed reconnect method
|
427 |
|
cleanup
|
428 |
my $row = $result->fetch_hash_first; |
packaging one directory
|
429 | |
cleanup
|
430 |
Fetch only a first row and put it into hash reference, |
431 |
and finish statment handle. |
|
packaging one directory
|
432 | |
removed DESTROY method(not b...
|
433 |
=head2 C<fetch_hash_multi> |
update document
|
434 | |
cleanup
|
435 |
my $rows = $result->fetch_hash_multi(5); |
update document
|
436 |
|
cleanup
|
437 |
Fetch multiple rows and put them into array of hash reference. |
update document
|
438 | |
cleanup
|
439 |
=head2 C<fetch_multi> |
packaging one directory
|
440 | |
cleanup
|
441 |
my $rows = $result->fetch_multi(5); |
442 |
|
|
cleanup
|
443 |
Fetch multiple rows and put them into array of array reference. |
removed DESTROY method(not b...
|
444 | |
cleanup
|
445 |
=head2 C<filter> |
446 | ||
cleanup
|
447 |
$result->filter(title => sub { uc $_[0] }, author => 'to_upper'); |
448 |
$result->filter([qw/title author/] => 'to_upper'); |
|
added experimental DBIx::Cus...
|
449 | |
cleanup
|
450 |
Set filter for column. |
451 |
You can use subroutine or filter name as filter. |
|
- DBIx::Custom Model filter ...
|
452 |
This filter is executed after C<type_rule> filter. |
cleanup
|
453 | |
updated pod
|
454 |
=head2 C<one> |
455 | ||
456 |
my $row = $result->one; |
|
457 | ||
cleanup
|
458 |
Same as C<fetch_hash_first>. |
added experimental DBIx::Cus...
|
459 | |
- removed DEPRECATED DBIx::C...
|
460 |
=head2 C<stash> |
added experimental DBIx::Cus...
|
461 | |
462 |
my $stash = $result->stash; |
|
463 |
my $foo = $result->stash->{foo}; |
|
464 |
$result->stash->{foo} = $foo; |
|
465 | ||
cleanup
|
466 |
Stash is hash reference for data. |
added experimental DBIx::Cus...
|
467 | |
added EXPERIMENTAL DBIx::Cus...
|
468 |
=head2 C<type_rule> EXPERIMENTAL |
cleanup
|
469 |
|
470 |
# Merge type rule |
|
added EXPERIMENTAL DBIx::Cus...
|
471 |
$result->type_rule( |
472 |
# DATE |
|
473 |
9 => sub { ... }, |
|
474 |
# DATETIME or TIMESTAMP |
|
475 |
11 => sub { ... } |
|
476 |
); |
|
477 | ||
cleanup
|
478 |
# Replace type rule(by reference) |
479 |
$result->type_rule([ |
|
480 |
# DATE |
|
481 |
9 => sub { ... }, |
|
482 |
# DATETIME or TIMESTAMP |
|
483 |
11 => sub { ... } |
|
484 |
]); |
|
EXPERIMENTAL type_rule_off i...
|
485 | |
cleanup
|
486 |
This is same as L<DBIx::Custom>'s C<type_rule>'s <from>. |
EXPERIMENTAL type_rule_off i...
|
487 | |
packaging one directory
|
488 |
=cut |