packaging one directory
|
1 |
package DBIx::Custom::Result; |
updatedd pod
|
2 |
use Object::Simple -base; |
cleanup
|
3 | |
packaging one directory
|
4 |
use Carp 'croak'; |
cleanup
|
5 |
use DBIx::Custom::Util qw/_array_to_hash _subname/; |
packaging one directory
|
6 | |
sub module use DBIx::Custom ...
|
7 |
has [qw/dbi sth/], |
- changed EXPERIMENTAL DBIx:...
|
8 |
stash => sub { {} }; |
cleanup
|
9 | |
- added DBIx::Custom::Result...
|
10 |
*all = \&fetch_hash_all; |
11 | ||
- added EXPERIMENTAL DBIx::C...
|
12 |
sub column { |
13 |
my $self = shift; |
|
14 |
|
|
15 |
my $column = []; |
|
16 |
my $rows = $self->fetch_all; |
|
17 |
push @$column, $_->[0] for @$rows; |
|
18 |
return $column; |
|
19 |
} |
|
20 | ||
cleanup
|
21 |
sub filter { |
22 |
my $self = shift; |
|
cleanup
|
23 |
|
cleanup
|
24 |
# Set |
cleanup
|
25 |
if (@_) { |
26 |
|
|
cleanup
|
27 |
# Convert filter name to subroutine |
28 |
my $filter = @_ == 1 ? $_[0] : [@_]; |
|
29 |
$filter = _array_to_hash($filter); |
|
cleanup
|
30 |
for my $column (keys %$filter) { |
cleanup
|
31 |
my $fname = $filter->{$column}; |
fix bug : filter can't over...
|
32 |
if (exists $filter->{$column} |
33 |
&& defined $fname |
|
34 |
&& ref $fname ne 'CODE') |
|
35 |
{ |
|
cleanup
|
36 |
croak qq{Filter "$fname" is not registered" } . _subname |
sub module use DBIx::Custom ...
|
37 |
unless exists $self->dbi->filters->{$fname}; |
38 |
$filter->{$column} = $self->dbi->filters->{$fname}; |
|
cleanup
|
39 |
} |
cleanup
|
40 |
} |
cleanup
|
41 |
|
cleanup
|
42 |
# Merge |
added experimental DBIx::Cus...
|
43 |
$self->{filter} = {%{$self->filter}, %$filter}; |
cleanup
|
44 |
|
45 |
return $self; |
|
cleanup
|
46 |
} |
47 |
|
|
added experimental DBIx::Cus...
|
48 |
return $self->{filter} ||= {}; |
49 |
} |
|
50 | ||
micro optimization
|
51 |
sub fetch { |
52 |
my $self = shift; |
|
53 |
|
|
54 |
# Info |
|
55 |
$self->_cache unless $self->{_cache}; |
|
56 |
|
|
57 |
# Fetch |
|
58 |
my @row = $self->{sth}->fetchrow_array; |
|
59 |
return unless @row; |
|
60 |
|
|
61 |
# Type rule |
|
- fixed bug DBIx::Custom::Re...
|
62 |
if ($self->{type_rule}->{from1} && !$self->{type_rule_off} && !$self->{type_rule1_off}) { |
63 |
my $from = $self->{type_rule}->{from1}; |
|
micro optimization
|
64 |
for my $type (keys %$from) { |
65 |
for my $column (@{$self->{_type_map}->{$type}}) { |
|
66 |
$row[$_] = $from->{$type}->($row[$_]) |
|
- fixed bug DBIx::Custom::Re...
|
67 |
for @{$self->{_pos}{$column} || []}; |
micro optimization
|
68 |
} |
69 |
} |
|
70 |
} |
|
- fixed bug DBIx::Custom::Re...
|
71 |
if ($self->{type_rule}->{from2} && !$self->{type_rule_off} && !$self->{type_rule2_off}) { |
72 |
my $from = $self->{type_rule}->{from2}; |
|
micro optimization
|
73 |
for my $type (keys %$from) { |
74 |
for my $column (@{$self->{_type_map}->{$type}}) { |
|
75 |
$row[$_] = $from->{$type}->($row[$_]) |
|
- fixed bug DBIx::Custom::Re...
|
76 |
for @{$self->{_pos}{$column} || []}; |
micro optimization
|
77 |
} |
78 |
} |
|
79 |
} |
|
80 |
|
|
81 |
# Filter |
|
82 |
if (($self->{filter} || $self->{default_filter}) && !$self->{filter_off}) { |
|
- fixed bug DBIx::Custom::Re...
|
83 |
my @columns = $self->{default_filter} ? keys %{$self->{_columns}} |
84 |
: keys %{$self->{filter}}; |
|
85 |
|
|
86 |
for my $column (@columns) { |
|
87 |
my $filter = exists $self->{filter}->{$column} ? $self->{filter}->{$column} |
|
88 |
: $self->{default_filter}; |
|
89 |
next unless $filter; |
|
90 |
$row[$_] = $filter->($row[$_]) |
|
91 |
for @{$self->{_pos}{$column} || []}; |
|
92 |
} |
|
micro optimization
|
93 |
} |
94 |
if ($self->{end_filter} && !$self->{filter_off}) { |
|
95 |
for my $column (keys %{$self->{end_filter}}) { |
|
- fixed bug DBIx::Custom::Re...
|
96 |
next unless $self->{end_filter}->{$column}; |
micro optimization
|
97 |
$row[$_] = $self->{end_filter}->{$column}->($row[$_]) |
- fixed bug DBIx::Custom::Re...
|
98 |
for @{$self->{_pos}{$column} || []}; |
micro optimization
|
99 |
} |
100 |
} |
|
101 | ||
102 |
return \@row; |
|
103 |
} |
|
104 | ||
105 |
sub fetch_hash { |
|
106 |
my $self = shift; |
|
107 |
|
|
108 |
# Info |
|
109 |
$self->_cache unless $self->{_cache}; |
|
110 |
|
|
111 |
# Fetch |
|
112 |
return unless my $row = $self->{sth}->fetchrow_hashref; |
|
113 |
|
|
114 |
# Type rule |
|
115 |
if ($self->{type_rule}->{from1} && |
|
116 |
!$self->{type_rule_off} && !$self->{type_rule1_off}) |
|
117 |
{ |
|
118 |
my $from = $self->{type_rule}->{from1}; |
|
119 |
for my $type (keys %$from) { |
|
120 |
$from->{$type} and $row->{$_} = $from->{$type}->($row->{$_}) |
|
121 |
for @{$self->{_type_map}->{$type}}; |
|
122 |
} |
|
123 |
} |
|
124 |
if ($self->{type_rule}->{from2} && |
|
125 |
!$self->{type_rule_off} && !$self->{type_rule2_off}) |
|
126 |
{ |
|
127 |
my $from = $self->{type_rule}->{from2}; |
|
128 |
for my $type (keys %{$self->{type_rule}->{from2}}) { |
|
129 |
$from->{$type} and $row->{$_} = $from->{$type}->($row->{$_}) |
|
130 |
for @{$self->{_type_map}->{$type}}; |
|
131 |
} |
|
132 |
} |
|
133 |
# Filter |
|
134 |
if (($self->{filter} || $self->{default_filter}) && |
|
135 |
!$self->{filter_off}) |
|
136 |
{ |
|
137 |
my @columns = $self->{default_filter} ? keys %{$self->{_columns}} |
|
138 |
: keys %{$self->{filter}}; |
|
139 |
|
|
140 |
for my $column (@columns) { |
|
141 |
next unless exists $row->{$column}; |
|
142 |
my $filter = exists $self->{filter}->{$column} ? $self->{filter}->{$column} |
|
143 |
: $self->{default_filter}; |
|
144 |
$row->{$column} = $filter->($row->{$column}) if $filter; |
|
145 |
} |
|
146 |
} |
|
147 |
if ($self->{end_filter} && !$self->{filter_off}) { |
|
148 |
exists $self->{_columns}{$_} && $self->{end_filter}->{$_} and |
|
149 |
$row->{$_} = $self->{end_filter}->{$_}->($row->{$_}) |
|
150 |
for keys %{$self->{end_filter}}; |
|
151 |
} |
|
152 |
$row; |
|
153 |
} |
|
154 | ||
cleanup
|
155 |
sub fetch_all { |
156 |
my $self = shift; |
|
157 |
|
|
158 |
# Fetch all rows |
|
159 |
my $rows = []; |
|
cleanup
|
160 |
while(my $row = $self->fetch) { push @$rows, $row} |
161 |
|
|
cleanup
|
162 |
return $rows; |
163 |
} |
|
164 | ||
165 |
sub fetch_hash_all { |
|
166 |
my $self = shift; |
|
167 |
|
|
168 |
# Fetch all rows as hash |
|
169 |
my $rows = []; |
|
cleanup
|
170 |
while(my $row = $self->fetch_hash) { push @$rows, $row } |
cleanup
|
171 |
|
172 |
return $rows; |
|
173 |
} |
|
174 | ||
- renamed DBIx::Custom::Resu...
|
175 |
sub fetch_hash_one { |
packaging one directory
|
176 |
my $self = shift; |
177 |
|
|
178 |
# Fetch hash |
|
179 |
my $row = $self->fetch_hash; |
|
180 |
return unless $row; |
|
181 |
|
|
182 |
# Finish statement handle |
|
some changed
|
183 |
$self->sth->finish; |
packaging one directory
|
184 |
|
removed reconnect method
|
185 |
return $row; |
packaging one directory
|
186 |
} |
187 | ||
renamed fetch_rows to fetch_...
|
188 |
sub fetch_hash_multi { |
packaging one directory
|
189 |
my ($self, $count) = @_; |
190 |
|
|
cleanup
|
191 |
# Fetch multiple rows |
cleanup
|
192 |
croak 'Row count must be specified ' . _subname |
packaging one directory
|
193 |
unless $count; |
- fixed bug DBIx::Custom::Re...
|
194 |
|
195 |
return if $self->{_finished}; |
|
196 | ||
packaging one directory
|
197 |
my $rows = []; |
198 |
for (my $i = 0; $i < $count; $i++) { |
|
removed reconnect method
|
199 |
my $row = $self->fetch_hash; |
- fixed bug DBIx::Custom::Re...
|
200 |
unless ($row) { |
201 |
$self->{_finished} = 1; |
|
202 |
last; |
|
203 |
} |
|
removed reconnect method
|
204 |
push @$rows, $row; |
packaging one directory
|
205 |
} |
206 |
|
|
207 |
return unless @$rows; |
|
removed reconnect method
|
208 |
return $rows; |
packaging one directory
|
209 |
} |
210 | ||
cleanup
|
211 |
sub fetch_multi { |
212 |
my ($self, $count) = @_; |
|
packaging one directory
|
213 |
|
cleanup
|
214 |
# Row count not specifed |
cleanup
|
215 |
croak 'Row count must be specified ' . _subname |
cleanup
|
216 |
unless $count; |
217 |
|
|
- fixed bug DBIx::Custom::Re...
|
218 |
return if $self->{_finished}; |
219 |
|
|
cleanup
|
220 |
# Fetch multi rows |
packaging one directory
|
221 |
my $rows = []; |
cleanup
|
222 |
for (my $i = 0; $i < $count; $i++) { |
223 |
my $row = $self->fetch; |
|
- fixed bug DBIx::Custom::Re...
|
224 |
unless ($row) { |
225 |
$self->{_finished} = 1; |
|
226 |
last; |
|
227 |
} |
|
removed reconnect method
|
228 |
push @$rows, $row; |
packaging one directory
|
229 |
} |
changed argument of tag proc...
|
230 |
|
cleanup
|
231 |
return unless @$rows; |
removed reconnect method
|
232 |
return $rows; |
packaging one directory
|
233 |
} |
234 | ||
- renamed DBIx::Custom::Resu...
|
235 | |
236 |
sub fetch_one { |
|
237 |
my $self = shift; |
|
238 |
|
|
239 |
# Fetch |
|
240 |
my $row = $self->fetch; |
|
241 |
return unless $row; |
|
242 |
|
|
243 |
# Finish statement handle |
|
244 |
$self->sth->finish; |
|
245 |
|
|
246 |
return $row; |
|
247 |
} |
|
248 | ||
added EXPERIMENTAL DBIx::Cus...
|
249 |
sub header { shift->sth->{NAME} } |
250 | ||
- renamed DBIx::Custom::Resu...
|
251 |
*one = \&fetch_hash_one; |
- added DBIx::Custom::Result...
|
252 | |
added EXPERIMENTAL DBIx::Cus...
|
253 |
sub type_rule { |
254 |
my $self = shift; |
|
255 |
|
|
256 |
if (@_) { |
|
EXPERIMENTAL type_rule argum...
|
257 |
my $type_rule = ref $_[0] eq 'HASH' ? $_[0] : {@_}; |
258 | ||
259 |
# From |
|
cleanup
|
260 |
for my $i (1 .. 2) { |
EXPERIMENTAL type_rule argum...
|
261 |
$type_rule->{"from$i"} = _array_to_hash($type_rule->{"from$i"}); |
cleanup
|
262 |
for my $data_type (keys %{$type_rule->{"from$i"} || {}}) { |
EXPERIMENTAL type_rule argum...
|
263 |
croak qq{data type of from$i section must be lower case or number} |
264 |
if $data_type =~ /[A-Z]/; |
|
265 |
my $fname = $type_rule->{"from$i"}{$data_type}; |
|
266 |
if (defined $fname && ref $fname ne 'CODE') { |
|
267 |
croak qq{Filter "$fname" is not registered" } . _subname |
|
sub module use DBIx::Custom ...
|
268 |
unless exists $self->dbi->filters->{$fname}; |
EXPERIMENTAL type_rule argum...
|
269 |
|
sub module use DBIx::Custom ...
|
270 |
$type_rule->{"from$i"}{$data_type} = $self->dbi->filters->{$fname}; |
EXPERIMENTAL type_rule argum...
|
271 |
} |
added EXPERIMENTAL DBIx::Cus...
|
272 |
} |
273 |
} |
|
EXPERIMENTAL type_rule argum...
|
274 |
$self->{type_rule} = $type_rule; |
DBIx::Custom::Result type_ru...
|
275 |
|
276 |
return $self; |
|
added EXPERIMENTAL DBIx::Cus...
|
277 |
} |
278 |
|
|
EXPERIMENTAL type_rule argum...
|
279 |
return $self->{type_rule} || {}; |
added EXPERIMENTAL DBIx::Cus...
|
280 |
} |
281 | ||
- changed EXPERIMENTAL DBIx:...
|
282 |
sub type_rule_off { |
283 |
my $self = shift; |
|
284 |
$self->{type_rule_off} = 1; |
|
285 |
return $self; |
|
286 |
} |
|
287 | ||
288 |
sub type_rule_on { |
|
289 |
my $self = shift; |
|
290 |
$self->{type_rule_off} = 0; |
|
291 |
return $self; |
|
292 |
} |
|
293 | ||
294 |
sub type_rule1_off { |
|
295 |
my $self = shift; |
|
296 |
$self->{type_rule1_off} = 1; |
|
297 |
return $self; |
|
298 |
} |
|
299 | ||
300 |
sub type_rule1_on { |
|
301 |
my $self = shift; |
|
302 |
$self->{type_rule1_off} = 0; |
|
303 |
return $self; |
|
304 |
} |
|
305 | ||
306 |
sub type_rule2_off { |
|
307 |
my $self = shift; |
|
308 |
$self->{type_rule2_off} = 1; |
|
309 |
return $self; |
|
310 |
} |
|
311 | ||
312 |
sub type_rule2_on { |
|
313 |
my $self = shift; |
|
314 |
$self->{type_rule2_off} = 0; |
|
315 |
return $self; |
|
316 |
} |
|
317 | ||
- added EXPERIMENTAL DBIx::C...
|
318 |
sub value { |
319 |
my $self = shift; |
|
- renamed DBIx::Custom::Resu...
|
320 |
my $row = $self->fetch_one; |
- added EXPERIMENTAL DBIx::C...
|
321 |
my $value = $row ? $row->[0] : undef; |
322 |
return $value; |
|
323 |
} |
|
324 | ||
- fixed bug DBIx::Custom::Re...
|
325 |
sub _cache { |
326 |
my $self = shift; |
|
327 |
$self->{_type_map} = {}; |
|
328 |
$self->{_pos} = {}; |
|
329 |
$self->{_columns} = {}; |
|
330 |
for (my $i = 0; $i < @{$self->{sth}->{NAME}}; $i++) { |
|
331 |
my $type = lc $self->{sth}{TYPE}[$i]; |
|
332 |
my $name = $self->{sth}{NAME}[$i]; |
|
333 |
$self->{_type_map}{$type} ||= []; |
|
334 |
push @{$self->{_type_map}{$type}}, $name; |
|
335 |
$self->{_pos}{$name} ||= []; |
|
336 |
push @{$self->{_pos}{$name}}, $i; |
|
337 |
$self->{_columns}{$name} = 1; |
|
338 |
} |
|
339 |
$self->{_cache} = 1; |
|
340 |
} |
|
341 | ||
- renamed DBIx::Custom::Resu...
|
342 |
# DEPRECATED! |
343 |
sub fetch_hash_first { |
|
344 |
my $self = shift; |
|
345 |
warn "DBIx::Custom::Result::fetch_hash_first is DEPRECATED! use fetch_hash_one instead"; |
|
346 |
return $self->fetch_hash_one(@_); |
|
347 |
} |
|
348 | ||
349 |
# DEPRECATED! |
|
350 |
sub fetch_first { |
|
351 |
my $self = shift; |
|
352 |
warn "DBIx::Custom::Result::fetch_first is DEPRECATED! use fetch_one instead"; |
|
353 |
return $self->fetch_one(@_); |
|
354 |
} |
|
355 | ||
- DBIx::Custom::Result filte...
|
356 |
# DEPRECATED! |
357 |
sub filter_off { |
|
358 |
warn "filter_off method is DEPRECATED!"; |
|
359 |
my $self = shift; |
|
360 |
$self->{filter_off} = 1; |
|
361 |
return $self; |
|
362 |
} |
|
363 | ||
364 |
# DEPRECATED! |
|
365 |
sub filter_on { |
|
366 |
warn "filter_on method is DEPRECATED!"; |
|
367 |
my $self = shift; |
|
368 |
$self->{filter_off} = 0; |
|
369 |
return $self; |
|
370 |
} |
|
371 | ||
cleanup
|
372 |
# DEPRECATED! |
373 |
sub end_filter { |
|
- added EXPERIMENTAL order m...
|
374 |
warn "end_filter method is DEPRECATED!"; |
cleanup
|
375 |
my $self = shift; |
376 |
if (@_) { |
|
377 |
my $end_filter = {}; |
|
cleanup
|
378 |
if (ref $_[0] eq 'HASH') { $end_filter = $_[0] } |
379 |
else { |
|
cleanup
|
380 |
$end_filter = _array_to_hash( |
381 |
@_ > 1 ? [@_] : $_[0] |
|
382 |
); |
|
383 |
} |
|
cleanup
|
384 |
for my $column (keys %$end_filter) { |
cleanup
|
385 |
my $fname = $end_filter->{$column}; |
386 |
if (exists $end_filter->{$column} |
|
387 |
&& defined $fname |
|
388 |
&& ref $fname ne 'CODE') |
|
389 |
{ |
|
390 |
croak qq{Filter "$fname" is not registered" } . _subname |
|
sub module use DBIx::Custom ...
|
391 |
unless exists $self->dbi->filters->{$fname}; |
392 |
$end_filter->{$column} = $self->dbi->filters->{$fname}; |
|
cleanup
|
393 |
} |
394 |
} |
|
395 |
$self->{end_filter} = {%{$self->end_filter}, %$end_filter}; |
|
396 |
return $self; |
|
397 |
} |
|
398 |
return $self->{end_filter} ||= {}; |
|
399 |
} |
|
cleanup
|
400 |
# DEPRECATED! |
added experimental DBIx::Cus...
|
401 |
sub remove_end_filter { |
- added EXPERIMENTAL order m...
|
402 |
warn "remove_end_filter is DEPRECATED!"; |
added experimental DBIx::Cus...
|
403 |
my $self = shift; |
404 |
$self->{end_filter} = {}; |
|
405 |
return $self; |
|
406 |
} |
|
cleanup
|
407 |
# DEPRECATED! |
added experimental DBIx::Cus...
|
408 |
sub remove_filter { |
- added EXPERIMENTAL order m...
|
409 |
warn "remove_filter is DEPRECATED!"; |
added experimental DBIx::Cus...
|
410 |
my $self = shift; |
411 |
$self->{filter} = {}; |
|
412 |
return $self; |
|
413 |
} |
|
cleanup
|
414 |
# DEPRECATED! |
cleanup
|
415 |
sub default_filter { |
cleanup
|
416 |
warn "default_filter is DEPRECATED!"; |
- added EXPERIMENTAL order m...
|
417 |
my $self = shift; |
cleanup
|
418 |
if (@_) { |
419 |
my $fname = $_[0]; |
|
420 |
if (@_ && !$fname) { |
|
421 |
$self->{default_filter} = undef; |
|
422 |
} |
|
423 |
else { |
|
many changed
|
424 |
croak qq{Filter "$fname" is not registered} |
sub module use DBIx::Custom ...
|
425 |
unless exists $self->dbi->filters->{$fname}; |
426 |
$self->{default_filter} = $self->dbi->filters->{$fname}; |
|
cleanup
|
427 |
} |
428 |
return $self; |
|
429 |
} |
|
430 |
return $self->{default_filter}; |
|
431 |
} |
|
cleanup
|
432 |
# DEPRECATED! |
cleanup
|
433 |
has 'filter_check'; |
cleanup
|
434 | |
update document
|
435 |
1; |
436 | ||
packaging one directory
|
437 |
=head1 NAME |
438 | ||
cleanup
|
439 |
DBIx::Custom::Result - Result of select statement |
packaging one directory
|
440 | |
update document
|
441 |
=head1 SYNOPSIS |
cleanup
|
442 | |
removed reconnect method
|
443 |
# Result |
cleanup
|
444 |
my $result = $dbi->select(table => 'book'); |
cleanup
|
445 | |
cleanup
|
446 |
# Fetch a row and put it into array reference |
removed reconnect method
|
447 |
while (my $row = $result->fetch) { |
cleanup
|
448 |
my $author = $row->[0]; |
449 |
my $title = $row->[1]; |
|
version 0.0901
|
450 |
} |
451 |
|
|
cleanup
|
452 |
# Fetch only a first row and put it into array reference |
- renamed DBIx::Custom::Resu...
|
453 |
my $row = $result->fetch_one; |
removed reconnect method
|
454 |
|
cleanup
|
455 |
# Fetch all rows and put them into array of array reference |
removed reconnect method
|
456 |
my $rows = $result->fetch_all; |
cleanup
|
457 | |
cleanup
|
458 |
# Fetch a row and put it into hash reference |
removed reconnect method
|
459 |
while (my $row = $result->fetch_hash) { |
cleanup
|
460 |
my $title = $row->{title}; |
461 |
my $author = $row->{author}; |
|
packaging one directory
|
462 |
} |
removed reconnect method
|
463 |
|
cleanup
|
464 |
# Fetch only a first row and put it into hash reference |
- renamed DBIx::Custom::Resu...
|
465 |
my $row = $result->fetch_hash_one; |
466 |
my $row = $result->one; # Alias for "fetch_hash_one" |
|
removed reconnect method
|
467 |
|
cleanup
|
468 |
# Fetch all rows and put them into array of hash reference |
removed reconnect method
|
469 |
my $rows = $result->fetch_hash_all; |
- renamed DBIx::Custom::Resu...
|
470 |
my $rows = $result->all; # Alias for "fetch_hash_all" |
packaging one directory
|
471 | |
update document
|
472 |
=head1 ATTRIBUTES |
packaging one directory
|
473 | |
sub module use DBIx::Custom ...
|
474 |
=head2 C<dbi> |
cleanup
|
475 | |
sub module use DBIx::Custom ...
|
476 |
my $dbi = $result->dbi; |
477 |
$result = $result->dbi($dbi); |
|
cleanup
|
478 | |
sub module use DBIx::Custom ...
|
479 |
L<DBIx::Custom> object. |
cleanup
|
480 | |
481 |
=head2 C<sth> |
|
482 | ||
483 |
my $sth = $reuslt->sth |
|
484 |
$result = $result->sth($sth); |
|
485 | ||
486 |
Statement handle of L<DBI>. |
|
487 | ||
update document
|
488 |
=head1 METHODS |
489 | ||
renamed build_query to creat...
|
490 |
L<DBIx::Custom::Result> inherits all methods from L<Object::Simple> |
cleanup
|
491 |
and implements the following new ones. |
packaging one directory
|
492 | |
updated pod
|
493 |
=head2 C<all> |
494 | ||
495 |
my $rows = $result->all; |
|
496 | ||
cleanup
|
497 |
Same as C<fetch_hash_all>. |
updated pod
|
498 | |
- added EXPERIMENTAL DBIx::C...
|
499 |
=head2 C<column> EXPERIMENTAL |
500 | ||
501 |
my $column = $result->column; |
|
502 | ||
503 |
Get first column's all values. |
|
504 | ||
505 |
my $names = $dbi->select('name', table => 'book')->column; |
|
506 | ||
removed DBIx::Custom commit ...
|
507 |
=head2 C<fetch> |
packaging one directory
|
508 | |
cleanup
|
509 |
my $row = $result->fetch; |
version 0.0901
|
510 | |
cleanup
|
511 |
Fetch a row and put it into array reference. |
packaging one directory
|
512 | |
removed DBIx::Custom commit ...
|
513 |
=head2 C<fetch_all> |
packaging one directory
|
514 | |
cleanup
|
515 |
my $rows = $result->fetch_all; |
version 0.0901
|
516 | |
cleanup
|
517 |
Fetch all rows and put them into array of array reference. |
packaging one directory
|
518 | |
- renamed DBIx::Custom::Resu...
|
519 |
=head2 C<fetch_one> |
cleanup
|
520 | |
- renamed DBIx::Custom::Resu...
|
521 |
my $row = $result->fetch_one; |
cleanup
|
522 | |
cleanup
|
523 |
Fetch only a first row and put it into array reference, |
524 |
and finish statment handle. |
|
cleanup
|
525 | |
removed DESTROY method(not b...
|
526 |
=head2 C<fetch_hash> |
packaging one directory
|
527 | |
cleanup
|
528 |
my $row = $result->fetch_hash; |
packaging one directory
|
529 | |
cleanup
|
530 |
Fetch a row and put it into hash reference. |
update document
|
531 | |
cleanup
|
532 |
=head2 C<fetch_hash_all> |
533 | ||
534 |
my $rows = $result->fetch_hash_all; |
|
535 | ||
cleanup
|
536 |
Fetch all rows and put them into array of hash reference. |
cleanup
|
537 | |
- renamed DBIx::Custom::Resu...
|
538 |
=head2 C<fetch_hash_one> |
removed reconnect method
|
539 |
|
- renamed DBIx::Custom::Resu...
|
540 |
my $row = $result->fetch_hash_one; |
packaging one directory
|
541 | |
cleanup
|
542 |
Fetch only a first row and put it into hash reference, |
543 |
and finish statment handle. |
|
packaging one directory
|
544 | |
removed DESTROY method(not b...
|
545 |
=head2 C<fetch_hash_multi> |
update document
|
546 | |
cleanup
|
547 |
my $rows = $result->fetch_hash_multi(5); |
update document
|
548 |
|
cleanup
|
549 |
Fetch multiple rows and put them into array of hash reference. |
update document
|
550 | |
cleanup
|
551 |
=head2 C<fetch_multi> |
packaging one directory
|
552 | |
cleanup
|
553 |
my $rows = $result->fetch_multi(5); |
554 |
|
|
cleanup
|
555 |
Fetch multiple rows and put them into array of array reference. |
removed DESTROY method(not b...
|
556 | |
cleanup
|
557 |
=head2 C<filter> |
558 | ||
cleanup
|
559 |
$result->filter(title => sub { uc $_[0] }, author => 'to_upper'); |
560 |
$result->filter([qw/title author/] => 'to_upper'); |
|
added experimental DBIx::Cus...
|
561 | |
cleanup
|
562 |
Set filter for column. |
563 |
You can use subroutine or filter name as filter. |
|
- DBIx::Custom Model filter ...
|
564 |
This filter is executed after C<type_rule> filter. |
cleanup
|
565 | |
- removed EXPERIMENTAL statu...
|
566 |
=head2 C<header> |
added EXPERIMENTAL DBIx::Cus...
|
567 | |
568 |
my $header = $result->header; |
|
569 | ||
570 |
Get header column names. |
|
571 | ||
updated pod
|
572 |
=head2 C<one> |
573 | ||
574 |
my $row = $result->one; |
|
575 | ||
- renamed DBIx::Custom::Resu...
|
576 |
Alias for C<fetch_hash_one>. |
added experimental DBIx::Cus...
|
577 | |
- removed DEPRECATED DBIx::C...
|
578 |
=head2 C<stash> |
added experimental DBIx::Cus...
|
579 | |
580 |
my $stash = $result->stash; |
|
581 |
my $foo = $result->stash->{foo}; |
|
582 |
$result->stash->{foo} = $foo; |
|
583 | ||
- added EXPERIMENTAL DBIx::C...
|
584 |
Stash is hash reference to save some data. |
added experimental DBIx::Cus...
|
585 | |
- removed EXPERIMENTAL flag ...
|
586 |
=head2 C<type_rule> |
cleanup
|
587 |
|
588 |
# Merge type rule |
|
added EXPERIMENTAL DBIx::Cus...
|
589 |
$result->type_rule( |
590 |
# DATE |
|
591 |
9 => sub { ... }, |
|
592 |
# DATETIME or TIMESTAMP |
|
593 |
11 => sub { ... } |
|
594 |
); |
|
595 | ||
cleanup
|
596 |
# Replace type rule(by reference) |
597 |
$result->type_rule([ |
|
598 |
# DATE |
|
599 |
9 => sub { ... }, |
|
600 |
# DATETIME or TIMESTAMP |
|
601 |
11 => sub { ... } |
|
602 |
]); |
|
EXPERIMENTAL type_rule_off i...
|
603 | |
cleanup
|
604 |
This is same as L<DBIx::Custom>'s C<type_rule>'s <from>. |
EXPERIMENTAL type_rule_off i...
|
605 | |
- removed EXPERIMENTAL flag ...
|
606 |
=head2 C<type_rule_off> |
- changed EXPERIMENTAL DBIx:...
|
607 | |
608 |
$result = $result->type_rule_off; |
|
609 | ||
610 |
Turn C<from1> and C<from2> type rule off. |
|
611 |
By default, type rule is on. |
|
612 | ||
- removed EXPERIMENTAL flag ...
|
613 |
=head2 C<type_rule_on> |
- changed EXPERIMENTAL DBIx:...
|
614 | |
615 |
$result = $result->type_rule_on; |
|
616 | ||
617 |
Turn C<from1> and C<from2> type rule on. |
|
618 |
By default, type rule is on. |
|
619 | ||
- removed EXPERIMENTAL flag ...
|
620 |
=head2 C<type_rule1_off> |
- changed EXPERIMENTAL DBIx:...
|
621 | |
622 |
$result = $result->type_rule1_off; |
|
623 | ||
624 |
Turn C<from1> type rule off. |
|
625 |
By default, type rule is on. |
|
626 | ||
- removed EXPERIMENTAL flag ...
|
627 |
=head2 C<type_rule1_on> |
- changed EXPERIMENTAL DBIx:...
|
628 | |
629 |
$result = $result->type_rule1_on; |
|
630 | ||
631 |
Turn C<from1> type rule on. |
|
632 |
By default, type rule is on. |
|
633 | ||
- removed EXPERIMENTAL flag ...
|
634 |
=head2 C<type_rule2_off> |
- changed EXPERIMENTAL DBIx:...
|
635 | |
636 |
$result = $result->type_rule2_off; |
|
637 | ||
638 |
Turn C<from2> type rule off. |
|
639 |
By default, type rule is on. |
|
640 | ||
- removed EXPERIMENTAL flag ...
|
641 |
=head2 C<type_rule2_on> |
- changed EXPERIMENTAL DBIx:...
|
642 | |
643 |
$result = $result->type_rule2_on; |
|
644 | ||
645 |
Turn C<from2> type rule on. |
|
646 |
By default, type rule is on. |
|
647 | ||
- added EXPERIMENTAL DBIx::C...
|
648 |
=head2 C<value> EXPERIMENTAL |
649 | ||
650 |
my $value = $result->value; |
|
651 | ||
652 |
Get first column's first value. |
|
653 | ||
654 |
my $count = $dbi->select('count(*)')->value; |
|
655 | ||
packaging one directory
|
656 |
=cut |