use OpenILS::Utils::CStoreEditor q/:funcs/;
use OpenSRF::Utils::Cache;
use Encode;
+use Email::Send;
+use Email::Simple;
use OpenSRF::Utils::Logger qw/:logger/;
-
-use OpenSRF::Utils::JSON;
-
-use Time::HiRes qw(time);
+use Time::HiRes qw(time sleep);
use OpenSRF::EX qw(:try);
use Digest::MD5 qw(md5_hex);
my( $self, $client, $id ) = @_;
return undef unless defined $id;
- if(ref($id) and ref($id) == 'ARRAY') {
+ if(ref($id) and ref($id) eq 'ARRAY') {
return _records_to_mods( @$id );
}
my $mods_list = _records_to_mods( $id );
}
sub multiclass_query {
- my($self, $conn, $arghash, $query, $docache) = @_;
-
- $logger->debug("initial search query => $query");
- my $orig_query = $query;
-
- $query =~ s/\+/ /go;
- $query =~ s/^\s+//go;
-
- # convert convenience classes (e.g. kw for keyword) to the full class name
- # ensure that the convenience class isn't part of a word (e.g. 'playhouse')
- $query =~ s/(^|\s)kw(:|\|)/$1keyword$2/go;
- $query =~ s/(^|\s)ti(:|\|)/$1title$2/go;
- $query =~ s/(^|\s)au(:|\|)/$1author$2/go;
- $query =~ s/(^|\s)su(:|\|)/$1subject$2/go;
- $query =~ s/(^|\s)se(:|\|)/$1series$2/go;
- $query =~ s/(^|\s)name(:|\|)/$1author$2/og;
-
- $logger->debug("cleansed query string => $query");
- my $search = {};
-
- my $simple_class_re = qr/((?:\w+(?:\|\w+)?):[^:]+?)$/;
- my $class_list_re = qr/(?:keyword|title|author|subject|series)/;
- my $modifier_list_re = qr/(?:site|dir|sort|lang|available|preflib)/;
-
- my $tmp_value = '';
- while ($query =~ s/$simple_class_re//so) {
-
- my $qpart = $1;
- my $where = index($qpart,':');
- my $type = substr($qpart, 0, $where++);
- my $value = substr($qpart, $where);
-
- if ($type !~ /^(?:$class_list_re|$modifier_list_re)/o) {
- $tmp_value = "$qpart $tmp_value";
- next;
- }
-
- if ($type =~ /$class_list_re/o ) {
- $value .= $tmp_value;
- $tmp_value = '';
- }
-
- next unless $type and $value;
-
- $value =~ s/^\s*//og;
- $value =~ s/\s*$//og;
- $type = 'sort_dir' if $type eq 'dir';
-
- if($type eq 'site') {
- # 'site' is the org shortname. when using this, we also want
- # to search at the requested org's depth
- my $e = new_editor();
- if(my $org = $e->search_actor_org_unit({shortname => $value})->[0]) {
- $arghash->{org_unit} = $org->id if $org;
- $arghash->{depth} = $e->retrieve_actor_org_unit_type($org->ou_type)->depth;
- } else {
- $logger->warn("'site:' query used on invalid org shortname: $value ... ignoring");
- }
- } elsif($type eq 'pref_ou') {
- # 'pref_ou' is the preferred org shortname.
- my $e = new_editor();
- if(my $org = $e->search_actor_org_unit({shortname => $value})->[0]) {
- $arghash->{pref_ou} = $org->id if $org;
- } else {
- $logger->warn("'pref_ou:' query used on invalid org shortname: $value ... ignoring");
- }
-
- } elsif($type eq 'available') {
- # limit to available
- $arghash->{available} = 1 unless $value eq 'false' or $value eq '0';
-
- } elsif($type eq 'lang') {
- # collect languages into an array of languages
- $arghash->{language} = [] unless $arghash->{language};
- push(@{$arghash->{language}}, $value);
-
- } elsif($type =~ /^sort/o) {
- # sort and sort_dir modifiers
- $arghash->{$type} = $value;
-
- } else {
- # append the search term to the term under construction
- $search->{$type} = {} unless $search->{$type};
- $search->{$type}->{term} =
- ($search->{$type}->{term}) ? $search->{$type}->{term} . " $value" : $value;
- }
- }
-
- $query .= " $tmp_value";
- $query =~ s/\s+/ /go;
- $query =~ s/^\s+//go;
- $query =~ s/\s+$//go;
-
- my $type = $arghash->{default_class} || 'keyword';
- $type = ($type eq '-') ? 'keyword' : $type;
- $type = ($type !~ /^(title|author|keyword|subject|series)(?:\|\w+)?$/o) ? 'keyword' : $type;
+ # arghash only really supports limit/offset anymore
+ my($self, $conn, $arghash, $query, $docache, $phys_loc) = @_;
- if($query) {
- # This is the front part of the string before any special tokens were
- # parsed OR colon-separated strings that do not denote a class.
- # Add this data to the default search class
- $search->{$type} = {} unless $search->{$type};
- $search->{$type}->{term} =
- ($search->{$type}->{term}) ? $search->{$type}->{term} . " $query" : $query;
+ if ($query) {
+ $query =~ s/\+/ /go;
+ $query =~ s/^\s+//go;
+ $query =~ s/\s+/ /go;
+ $arghash->{query} = $query
}
- my $real_search = $arghash->{searches} = { $type => { term => $orig_query } };
-
- # capture the original limit because the search method alters the limit internally
- my $ol = $arghash->{limit};
-
- my $sclient = OpenSRF::Utils::SettingsClient->new;
-
- (my $method = $self->api_name) =~ s/\.query//o;
-
- $method =~ s/multiclass/multiclass.staged/
- if $sclient->config_value(apps => 'open-ils.search',
- app_settings => 'use_staged_search') =~ /true/i;
-
- # XXX This stops the session locale from doing the right thing.
- # XXX Revisit this and have it translate to a lang instead of a locale.
- #$arghash->{preferred_language} = $U->get_org_locale($arghash->{org_unit})
- # unless $arghash->{preferred_language};
-
- $method = $self->method_lookup($method);
- my ($data) = $method->run($arghash, $docache);
- $arghash->{searches} = $search if (!$data->{complex_query});
+ $logger->debug("initial search query => $query") if $query;
- $arghash->{limit} = $ol if $ol;
- $data->{compiled_search} = $arghash;
- $data->{query} = $orig_query;
+ (my $method = $self->api_name) =~ s/\.query/.staged/o;
+ return $self->method_lookup($method)->dispatch($arghash, $docache, $phys_loc);
- $logger->info("compiled search is " . OpenSRF::Utils::JSON->perl2JSON($arghash));
-
- return $data;
}
__PACKAGE__->register_method(
$$searchhash{searches}{keyword}{term} = $$args{search}{keyword} if $$args{search}{keyword};
$$searchhash{searches}{'identifier|isbn'}{term} = $$args{search}{isbn} if $$args{search}{isbn};
$$searchhash{searches}{'identifier|issn'}{term} = $$args{search}{issn} if $$args{search}{issn};
+ $$searchhash{searches}{'identifier|upc'}{term} = $$args{search}{upc} if $$args{search}{upc};
$$searchhash{searches}{keyword}{term} .= join ' ', $$searchhash{searches}{keyword}{term}, $$args{search}{tcn} if $$args{search}{tcn};
$$searchhash{searches}{keyword}{term} .= join ' ', $$searchhash{searches}{keyword}{term}, $$args{search}{publisher} if $$args{search}{publisher};
$$searchhash{searches}{keyword}{term} .= join ' ', $$searchhash{searches}{keyword}{term}, $$args{search}{pubdate} if $$args{search}{pubdate};
$$searchhash{searches}{keyword}{term} .= join ' ', $$searchhash{searches}{keyword}{term}, $$args{search}{item_type} if $$args{search}{item_type};
- my ($list) = $self->method_lookup('open-ils.search.biblio.multiclass.staged')->run( $searchhash );
+ my $method = 'open-ils.search.biblio.multiclass.staged';
+ $method .= '.staff' if $self->api_name =~ /staff$/;
+
+ my ($list) = $self->method_lookup($method)->run( $searchhash );
if ($list->{count} > 0 and @{$list->{ids}}) {
$result->{count} = $list->{count};
signature => q/The .staff search includes hidden bibs, hidden items and bibs with no items. Otherwise, @see open-ils.search.biblio.multiclass.staged/
);
+my $estimation_strategy;
sub staged_search {
- my($self, $conn, $search_hash, $docache) = @_;
+ my($self, $conn, $search_hash, $docache, $phys_loc) = @_;
+
+ $phys_loc ||= $U->get_org_tree->id;
my $IAmMetabib = ($self->api_name =~ /metabib/) ? 1 : 0;
$method .= '.staff' if $self->api_name =~ /staff$/;
$method .= '.atomic';
- return {count => 0} unless (
- $search_hash and
- $search_hash->{searches} and
- scalar( keys %{$search_hash->{searches}} ));
+ if (!$search_hash->{query}) {
+ return {count => 0} unless (
+ $search_hash and
+ $search_hash->{searches} and
+ scalar( keys %{$search_hash->{searches}} ));
+ }
my $search_duration;
my $user_offset = $search_hash->{offset} || 0; # user-specified offset
$search_hash->{core_limit} = $superpage_size * $max_superpages;
# Set the configured estimation strategy, defaults to 'inclusion'.
- my $estimation_strategy = OpenSRF::Utils::SettingsClient
- ->new
- ->config_value(
- apps => 'open-ils.search', app_settings => 'estimation_strategy'
- ) || 'inclusion';
+ unless ($estimation_strategy) {
+ $estimation_strategy = OpenSRF::Utils::SettingsClient
+ ->new
+ ->config_value(
+ apps => 'open-ils.search', app_settings => 'estimation_strategy'
+ ) || 'inclusion';
+ }
$search_hash->{estimation_strategy} = $estimation_strategy;
# pull any existing results from the cache
# fulfill the user-specified limit and offset
my $all_results = [];
my $page; # current superpage
- my $est_hit_count = 0;
my $current_page_summary = {};
my $global_summary = {checked => 0, visible => 0, excluded => 0, deleted => 0, total => 0};
- my $is_real_hit_count = 0;
my $new_ids = [];
for($page = 0; $page < $max_superpages; $page++) {
# retrieve the window of results from the database
$logger->debug("staged search: fetching results from the database");
$search_hash->{skip_check} = $page * $superpage_size;
+ $search_hash->{return_query} = $page == 0 ? 1 : 0;
+
my $start = time;
$results = $U->storagereq($method, %$search_hash);
$search_duration = time - $start;
$logger->info("staged search: DB call took $search_duration seconds and returned ".scalar(@$results)." rows, including summary");
- my $hc = $summary->{estimated_hit_count} || $summary->{visible};
- if($hc == 0) {
- $logger->info("search returned 0 results: duration=$search_duration: params=".
- OpenSRF::Utils::JSON->perl2JSON($search_hash));
- }
-
# Create backwards-compatible result structures
if($IAmMetabib) {
- $results = [map {[$_->{id}, $_->{rel}, $_->{record}]} @$results];
+ $results = [map {[$_->{id}, $_->{badges}, $_->{popularity}, $_->{rel}, $_->{record}]} @$results];
} else {
- $results = [map {[$_->{id}]} @$results];
+ $results = [map {[$_->{id}, $_->{badges}, $_->{popularity}]} @$results];
}
push @$new_ids, grep {defined($_)} map {$_->[0]} @$results;
my $current_count = scalar(@$all_results);
- $est_hit_count = $summary->{estimated_hit_count} || $summary->{visible}
- if $page == 0;
-
- $logger->debug("staged search: located $current_count, with estimated hits=".
- ($summary->{estimated_hit_count} || "none") .
- " : visible=" . ($summary->{visible} || "none") . ", checked=" .
- ($summary->{checked} || "none")
- );
-
- if (defined($summary->{estimated_hit_count})) {
- foreach (qw/ checked visible excluded deleted /) {
- $global_summary->{$_} += $summary->{$_};
+ if ($page == 0) { # all summaries are the same, just get the first
+ for (keys %$summary) {
+ $global_summary->{$_} = $summary->{$_};
}
- $global_summary->{total} = $summary->{total};
}
# we've found all the possible hits
- last if $current_count == $summary->{visible}
- and not defined $summary->{estimated_hit_count};
+ last if $current_count == $summary->{visible};
# we've found enough results to satisfy the requested limit/offset
last if $current_count >= ($user_limit + $user_offset);
# we've scanned all possible hits
- if($summary->{checked} < $superpage_size) {
- $est_hit_count = scalar(@$all_results);
- # we have all possible results in hand, so we know the final hit count
- $is_real_hit_count = 1;
- last;
- }
+ last if($summary->{checked} < $superpage_size);
}
# Let other backends grab our data now that we're done.
$cache->put_cache($key, $cache_data, $cache_timeout);
}
- my @results = grep {defined $_} @$all_results[$user_offset..($user_offset + $user_limit - 1)];
+ my $setting_names = [ qw/
+ opac.did_you_mean.max_suggestions
+ opac.did_you_mean.low_result_threshold
+ search.symspell.min_suggestion_use_threshold
+ search.symspell.soundex.weight
+ search.symspell.pg_trgm.weight
+ search.symspell.keyboard_distance.weight/ ];
+ my %suggest_settings = $U->ou_ancestor_setting_batch_insecure(
+ $phys_loc, $setting_names
+ );
- # refine the estimate if we have more than one superpage
- if ($page > 0 and not $is_real_hit_count) {
- if ($global_summary->{checked} >= $global_summary->{total}) {
- $est_hit_count = $global_summary->{visible};
- } else {
- my $updated_hit_count = $U->storagereq(
- 'open-ils.storage.fts_paging_estimate',
- $global_summary->{checked},
- $global_summary->{visible},
- $global_summary->{excluded},
- $global_summary->{deleted},
- $global_summary->{total}
- );
- $est_hit_count = $updated_hit_count->{$estimation_strategy};
+ # Defaults...
+ $suggest_settings{$_} ||= {value=>undef} for @$setting_names;
+
+ # Pull this one off the front, it's not used for the function call
+ my $max_suggestions_setting = shift @$setting_names;
+ my $sugg_low_thresh_setting = shift @$setting_names;
+ $max_suggestions_setting = $suggest_settings{$max_suggestions_setting}{value} // -1;
+ my $suggest_low_threshold = $suggest_settings{$sugg_low_thresh_setting}{value} || 0;
+
+ if ($global_summary->{visible} <= $suggest_low_threshold and $max_suggestions_setting != 0) {
+ # For now, we're doing one-class/one-term suggestions only
+ my ($class, $term) = one_class_one_term($global_summary->{query_struct});
+ if ($class && $term) { # check for suggestions!
+ my $suggestion_verbosity = 4;
+ if ($max_suggestions_setting == -1) { # special value that means "only best suggestion, and not always"
+ $max_suggestions_setting = 1;
+ $suggestion_verbosity = 0;
+ }
+
+ my @settings_params = map { $suggest_settings{$_}{value} } @$setting_names;
+ my $suggs = new_editor()->json_query({
+ from => [
+ 'search.symspell_lookup',
+ $term, $class,
+ $suggestion_verbosity,
+ 1, # case transfer
+ @settings_params
+ ],
+ limit => $max_suggestions_setting
+ });
+ if (@$suggs and $$suggs[0]{suggestion} ne $term) {
+ $global_summary->{suggestions}{'one_class_one_term'} = {
+ class => $class,
+ term => $term,
+ suggestions => $suggs
+ };
+ }
}
}
+ my @results = grep {defined $_} @$all_results[$user_offset..($user_offset + $user_limit - 1)];
+
$conn->respond_complete(
{
- count => $est_hit_count,
+ global_summary => $global_summary,
+ count => $global_summary->{visible},
core_limit => $search_hash->{core_limit},
+ superpage => $page,
superpage_size => $search_hash->{check_limit},
superpage_summary => $current_page_summary,
facet_key => $facet_key,
}
);
- cache_facets($facet_key, $new_ids, $IAmMetabib, $ignore_facet_classes) if $docache;
+ $logger->info("Completed canonicalized search is: $$global_summary{canonicalized_query}");
+
+ return cache_facets($facet_key, $new_ids, $IAmMetabib, $ignore_facet_classes) if $docache;
+}
+
+sub one_class_one_term {
+ my $qstruct = shift;
+ my $node = $$qstruct{children};
+
+ my $class = undef;
+ my $term = undef;
+ while ($node) {
+ last if (
+ $$node{'|'}
+ or @{$$node{'&'}} != 1
+ or ($$node{'&'}[0]{fields} and @{$$node{'&'}[0]{fields}} > 0)
+ );
+
+ $class ||= $$node{'&'}[0]{class};
+ $term ||= $$node{'&'}[0]{content};
+
+ last if ($term);
+
+ $node = $$node{'&'}[0]{children};
+ }
+
+ return ($class, $term);
+}
+
+sub fetch_display_fields {
+ my $self = shift;
+ my $conn = shift;
+ my $highlight_map = shift;
+ my @records = @_;
+
+ unless (@records) {
+ $conn->respond_complete;
+ return;
+ }
+
+ my $hl_map_string = "";
+ if (ref($highlight_map) =~ /HASH/) {
+ for my $tsq (keys %$highlight_map) {
+ my $field_list = join(',', @{$$highlight_map{$tsq}});
+ $hl_map_string .= ' || ' if $hl_map_string;
+ $hl_map_string .= "hstore(($tsq)\:\:TEXT,'$field_list')";
+ }
+ }
+
+ my $e = new_editor();
+
+ for my $record ( @records ) {
+ next unless ($record && $hl_map_string);
+ $conn->respond(
+ $e->json_query(
+ {from => ['search.highlight_display_fields', $record, $hl_map_string]}
+ )
+ );
+ }
return undef;
}
+__PACKAGE__->register_method(
+ method => 'fetch_display_fields',
+ api_name => 'open-ils.search.fetch.metabib.display_field.highlight',
+ stream => 1
+);
+
sub tag_circulated_records {
my ($auth, $results, $metabib) = @_;
my $query = {
select => { acn => [{ column => 'record', alias => 'tagme' }] },
- from => { acp => 'acn' },
- where => { id => { in => { from => ['action.usr_visible_circ_copies', $e->requestor->id] } } },
+ from => { auch => { acp => { join => 'acn' }} },
+ where => { usr => $e->requestor->id },
distinct => 1
};
if ($metabib) {
$query = {
- select => { mmsm => [{ column => 'metarecord', alias => 'tagme' }] },
- from => 'mmsm',
+ select => { mmrsm => [{ column => 'metarecord', alias => 'tagme' }] },
+ from => 'mmrsm',
where => { source => { in => $query } },
distinct => 1
};
return undef unless ($key and $key =~ /_facets$/);
+ eval {
+ local $SIG{ALRM} = sub {die};
+ alarm(10); # we'll sleep for as much as 10s
+ do {
+ die if $cache->get_cache($key . '_COMPLETE');
+ } while (sleep(0.05));
+ alarm(0);
+ };
+ alarm(0);
+
my $blob = $cache->get_cache($key) || {};
my $facets = {};
return undef unless (@$results);
- # The query we're constructing
- #
- # select mfae.field as id,
- # mfae.value,
- # count(distinct mmrsm.appropriate-id-field )
- # from metabib.facet_entry mfae
- # join metabib.metarecord_sourc_map mmrsm on (mfae.source = mmrsm.source)
- # where mmrsm.appropriate-id-field in IDLIST
- # group by 1,2;
-
- my $count_field = $metabib ? 'metarecord' : 'source';
+ my $facets_function = $metabib ? 'search.facets_for_metarecord_set'
+ : 'search.facets_for_record_set';
+ my $results_str = '{' . join(',', @$results) . '}';
+ my $ignore_str = ref($ignore) ? '{' . join(',', @$ignore) . '}'
+ : '{}';
my $query = {
- select => {
- mfae => [ { column => 'field', alias => 'id'}, 'value' ],
- mmrsm => [{
- transform => 'count',
- distinct => 1,
- column => $count_field,
- alias => 'count',
- aggregate => 1
- }]
- },
- from => {
- mfae => {
- mmrsm => { field => 'source', fkey => 'source' },
- cmf => { field => 'id', fkey => 'field' }
- }
- },
- where => {
- '+mmrsm' => { $count_field => $results },
- '+cmf' => { facet_field => 't' }
- }
+ from => [ $facets_function, $ignore_str, $results_str ]
};
- $query->{where}->{'+cmf'}->{field_class} = {'not in' => $ignore}
- if ref($ignore) and @$ignore > 0;
-
my $facets = OpenILS::Utils::CStoreEditor->new->json_query($query, {substream => 1});
for my $facet (@$facets) {
$logger->info("facet compilation: cached with key=$key");
$cache->put_cache($key, $data, $cache_timeout);
+ $cache->put_cache($key.'_COMPLETE', 1, $cache_timeout);
}
sub cache_staged_search_page {
}
__PACKAGE__->register_method(
+ method => "send_event_email_output",
+ api_name => "open-ils.search.biblio.record.email.send_output",
+);
+sub send_event_email_output {
+ my($self, $client, $auth, $event_id, $capkey, $capanswer) = @_;
+ return undef unless $event_id;
+
+ my $captcha_pass = 0;
+ my $real_answer;
+ if ($capkey) {
+ $real_answer = $cache->get_cache(md5_hex($capkey));
+ $captcha_pass++ if ($real_answer eq $capanswer);
+ }
+
+ my $e = new_editor(authtoken => $auth);
+ return $e->die_event unless $captcha_pass || $e->checkauth;
+
+ my $event = $e->retrieve_action_trigger_event([$event_id,{flesh => 1, flesh_fields => { atev => ['template_output']}}]);
+ return undef unless ($event and $event->template_output);
+
+ my $smtp = OpenSRF::Utils::SettingsClient
+ ->new
+ ->config_value('email_notify', 'smtp_server');
+
+ my $sender = Email::Send->new({mailer => 'SMTP'});
+ $sender->mailer_args([Host => $smtp]);
+
+ my $stat;
+ my $err;
+
+ my $email = Email::Simple->new($event->template_output->data);
+
+ for my $hfield (qw/From To Subject Bcc Cc Reply-To Sender/) {
+ my @headers = $email->header($hfield);
+ $email->header_set($hfield => map { encode("MIME-Header", $_) } @headers) if ($headers[0]);
+ }
+
+ $email->header_set('MIME-Version' => '1.0');
+ $email->header_set('Content-Type' => "text/plain; charset=UTF-8");
+ $email->header_set('Content-Transfer-Encoding' => '8bit');
+
+ try {
+ $stat = $sender->send($email);
+ } catch Error with {
+ $err = $stat = shift;
+ $logger->error("send_event_email_output: Email failed with error: $err");
+ };
+
+ if( !$err and $stat and $stat->type eq 'success' ) {
+ $logger->info("send_event_email_output: successfully sent email");
+ return 1;
+ } else {
+ $logger->warn("send_event_email_output: unable to send email: ".Dumper($stat));
+ return 0;
+ }
+}
+
+__PACKAGE__->register_method(
+ method => "format_biblio_record_entry",
+ api_name => "open-ils.search.biblio.record.print.preview",
+);
+
+__PACKAGE__->register_method(
+ method => "format_biblio_record_entry",
+ api_name => "open-ils.search.biblio.record.email.preview",
+);
+
+__PACKAGE__->register_method(
method => "format_biblio_record_entry",
api_name => "open-ils.search.biblio.record.print",
signature => {
desc => 'Returns a printable version of the specified bib record',
params => [
{ desc => 'Biblio record entry ID or array of IDs', type => 'number' },
+ { desc => 'Context library for holdings, if applicable', type => 'number' },
+ { desc => 'Sort order, if applicable', type => 'string' },
+ { desc => 'Sort direction, if applicable', type => 'string' },
+ { desc => 'Definition Group Member id', type => 'number' },
],
return => {
desc => q/An action_trigger.event object or error event./,
signature => {
desc => 'Emails an A/T templated version of the specified bib records to the authorized user',
params => [
- { desc => 'Authentication token', type => 'string'},
+ { desc => 'Authentication token', type => 'string'},
{ desc => 'Biblio record entry ID or array of IDs', type => 'number' },
+ { desc => 'Context library for holdings, if applicable', type => 'number' },
+ { desc => 'Sort order, if applicable', type => 'string' },
+ { desc => 'Sort direction, if applicable', type => 'string' },
+ { desc => 'Definition Group Member id', type => 'number' },
+ { desc => 'Whether to bypass auth due to captcha', type => 'bool' },
+ { desc => 'Email address, if none for the user', type => 'string' },
+ { desc => 'Subject, if customized', type => 'string' },
],
return => {
desc => q/Undefined on success, otherwise an error event./,
);
sub format_biblio_record_entry {
- my($self, $conn, $arg1, $arg2) = @_;
+ my ($self, $conn) = splice @_, 0, 2;
my $for_print = ($self->api_name =~ /print/);
my $for_email = ($self->api_name =~ /email/);
+ my $preview = ($self->api_name =~ /preview/);
+
+ my ($auth, $captcha_pass, $email, $subject);
+ if ($for_email) {
+ $auth = shift @_;
+ ($captcha_pass, $email, $subject) = splice @_, -3, 3;
+ }
+ my ($bib_id, $holdings_context_org, $bib_sort, $sort_dir, $group_member) = @_;
+ $holdings_context_org ||= $U->get_org_tree->id;
+ $bib_sort ||= 'author';
+ $sort_dir ||= 'ascending';
- my $e; my $auth; my $bib_id; my $context_org;
+ my $e; my $event_context_org; my $type = 'brief';
if ($for_print) {
- $bib_id = $arg1;
- $context_org = $arg2 || $U->get_org_tree->id;
+ $event_context_org = $holdings_context_org;
$e = new_editor(xact => 1);
} elsif ($for_email) {
- $auth = $arg1;
- $bib_id = $arg2;
$e = new_editor(authtoken => $auth, xact => 1);
- return $e->die_event unless $e->checkauth;
- $context_org = $e->requestor->home_ou;
+ return $e->die_event unless $captcha_pass || $e->checkauth;
+ $event_context_org = $e->requestor ? $e->requestor->home_ou : $holdings_context_org;
+ $email ||= $e->requestor ? $e->requestor->email : '';
+ }
+
+ if ($group_member) {
+ $group_member = $e->retrieve_action_trigger_event_def_group_member($group_member);
+ if ($group_member and $U->is_true($group_member->holdings)) {
+ $type = 'full';
+ }
}
+ $holdings_context_org = $e->retrieve_actor_org_unit($holdings_context_org);
+
my $bib_ids;
if (ref $bib_id ne 'ARRAY') {
$bib_ids = [ $bib_id ];
$bucket->btype('temp');
$bucket->name('format_biblio_record_entry ' . $U->create_uuid_string);
if ($for_email) {
- $bucket->owner($e->requestor)
+ $bucket->owner($e->requestor || 1)
} else {
$bucket->owner(1);
}
$e->commit;
+ my $usr_data = {
+ type => $type,
+ email => $email,
+ subject => $subject,
+ context_org => $holdings_context_org->shortname,
+ sort_by => $bib_sort,
+ sort_dir => $sort_dir,
+ preview => $preview
+ };
+
if ($for_print) {
- return $U->fire_object_event(undef, 'biblio.format.record_entry.print', [ $bucket ], $context_org);
+ return $U->fire_object_event(undef, 'biblio.format.record_entry.print', [ $bucket ], $event_context_org, undef, [ $usr_data ]);
} elsif ($for_email) {
- $U->create_events_for_hook('biblio.format.record_entry.email', $bucket, $context_org, undef, undef, 1);
+ return $U->fire_object_event(undef, 'biblio.format.record_entry.email', [ $bucket ], $event_context_org, undef, [ $usr_data ])
+ if ($preview);
+
+ $U->create_events_for_hook('biblio.format.record_entry.email', $bucket, $event_context_org, undef, $usr_data, 1);
}
return undef;
'See perldoc ' . __PACKAGE__ . ' for more detail.',
type => 'object'
},
- {desc => 'limit (optional)', type => 'number'},
- {desc => 'offset (optional)', type => 'number'}
+ {desc => 'timeout (optional)', type => 'number'}
],
return => {
desc => 'Results object like: { "count": $i, "ids": [...] }',
);
}
-=head3 open-ils.search.biblio.marc (arghash, limit, offset)
+=head3 open-ils.search.biblio.marc (arghash, timeout)
As elsewhere the arghash is the required argument, and must be a hashref. The keys are:
=cut
# FIXME: that example above isn't actually tested.
+# FIXME: sort and limit added. item_type not tested yet.
# TODO: docache option?
sub marc_search {
- my( $self, $conn, $args, $limit, $offset, $timeout ) = @_;
+ my( $self, $conn, $args, $timeout ) = @_;
my $method = 'open-ils.storage.biblio.full_rec.multi_search';
$method .= ".staff" if $self->api_name =~ /staff/;
$method .= ".atomic";
- $limit ||= 10; # FIXME: what about $args->{limit} ?
- $offset ||= 0; # FIXME: what about $args->{offset} ?
+ my $limit = $args->{limit} || 10;
+ my $offset = $args->{offset} || 0;
# allow caller to pass in a call timeout since MARC searches
# can take longer than the default 60-second timeout.
if($resp and $recs = $resp->content) {
put_cache($ckey, scalar(@$recs), $recs);
- $recs = [ @$recs[$offset..($offset + ($limit - 1))] ];
} else {
$recs = [];
}
},
distinct => 1,
}
- }
+ },
+ deleted => 'f'
},
order_by =>[{class=>'bmp', field=>'label_sortkey'}]
};
deleted => 'f'
},
"+ccs" => { holdable => 't' },
- "+acpl" => { holdable => 't' }
+ "+acpl" => { holdable => 't', deleted => 'f' }
}
};
return [ map { ($U->is_true($_->location->opac_visible) && $U->is_true($_->status->opac_visible)) ? ($_->id) : () } @$copies ];
}
+__PACKAGE__->register_method(
+ method => 'bib_copies',
+ api_name => 'open-ils.search.bib.copies',
+ stream => 1
+);
+__PACKAGE__->register_method(
+ method => 'bib_copies',
+ api_name => 'open-ils.search.bib.copies.staff',
+ stream => 1
+);
+
+sub bib_copies {
+ my ($self, $client, $rec_id, $org, $depth, $limit, $offset, $pref_ou) = @_;
+ my $is_staff = ($self->api_name =~ /staff/);
+
+ my $cstore = OpenSRF::AppSession->create('open-ils.cstore');
+ my $req = $cstore->request(
+ 'open-ils.cstore.json_query', mk_copy_query(
+ $rec_id, $org, $depth, $limit, $offset, $pref_ou, $is_staff));
+
+ my $resp;
+ while ($resp = $req->recv) {
+ $client->respond($resp->content);
+ }
+
+ return undef;
+}
+
+# TODO: this comes almost directly from WWW/EGCatLoader/Record.pm
+# Refactor to share
+sub mk_copy_query {
+ my $rec_id = shift;
+ my $org = shift;
+ my $depth = shift;
+ my $copy_limit = shift;
+ my $copy_offset = shift;
+ my $pref_ou = shift;
+ my $is_staff = shift;
+ my $base_query = shift;
+
+ my $query = $base_query || $U->basic_opac_copy_query(
+ $rec_id, undef, undef, $copy_limit, $copy_offset, $is_staff
+ );
+
+ if ($org) { # TODO: root org test
+ # no need to add the org join filter if we're not actually filtering
+ $query->{from}->{acp}->[1] = { aou => {
+ fkey => 'circ_lib',
+ field => 'id',
+ filter => {
+ id => {
+ in => {
+ select => {aou => [{
+ column => 'id',
+ transform => 'actor.org_unit_descendants',
+ result_field => 'id',
+ params => [$depth]
+ }]},
+ from => 'aou',
+ where => {id => $org}
+ }
+ }
+ }
+ }};
+
+ if ($pref_ou) {
+ # Make sure the pref OU is included in the results
+ my $in = $query->{from}->{acp}->[1]->{aou}->{filter}->{id}->{in};
+ delete $query->{from}->{acp}->[1]->{aou}->{filter}->{id};
+ $query->{from}->{acp}->[1]->{aou}->{filter}->{'-or'} = [
+ {id => {in => $in}},
+ {id => $pref_ou}
+ ];
+ }
+ };
+
+ # Unsure if we want these in the shared function, leaving here for now
+ unshift(@{$query->{order_by}},
+ { class => "aou", field => 'id',
+ transform => 'evergreen.rank_ou', params => [$org, $pref_ou]
+ }
+ );
+ push(@{$query->{order_by}},
+ { class => "acp", field => 'id',
+ transform => 'evergreen.rank_cp'
+ }
+ );
+
+ return $query;
+}
+
+__PACKAGE__->register_method(
+ method => 'record_urls',
+ api_name => 'open-ils.search.biblio.record.resource_urls.retrieve',
+ argc => 1,
+ stream => 1,
+ signature => {
+ desc => q/Returns bib record 856 URL content./,
+ params => [
+ {desc => 'Context org unit ID', type => 'number'},
+ {desc => 'Record ID or Array of Record IDs', type => 'number or array'}
+ ],
+ return => {
+ desc => 'Stream of URL objects, one collection object per record',
+ type => 'object'
+ }
+ }
+);
+
+sub record_urls {
+ my ($self, $client, $org_id, $record_ids) = @_;
+
+ $record_ids = [$record_ids] unless ref $record_ids eq 'ARRAY';
+
+ my $e = new_editor();
+
+ for my $record_id (@$record_ids) {
+
+ my @urls;
+
+ # Start with scoped located URIs
+ my $uris = $e->json_query({
+ from => ['evergreen.located_uris_as_uris', $record_id, $org_id]});
+
+ for my $uri (@$uris) {
+ push(@urls, {
+ href => $uri->{href},
+ label => $uri->{label},
+ note => $uri->{use_restriction}
+ });
+ }
+
+ # Logic copied from TPAC misc_utils.tts
+ my $bib = $e->retrieve_biblio_record_entry($record_id)
+ or return $e->event;
+
+ my $marc_doc = $U->marc_xml_to_doc($bib->marc);
+
+ for my $node ($marc_doc->findnodes('//*[@tag="856" and @ind1="4"]')) {
+
+ # asset.uri's
+ next if $node->findnodes('./*[@code="9" or @code="w" or @code="n"]');
+
+ my $url = {};
+ my ($label) = $node->findnodes('./*[@code="y"]');
+ my ($notes) = $node->findnodes('./*[@code="z" or @code="3"]');
+
+ my $first = 1;
+ for my $href_node ($node->findnodes('./*[@code="u"]')) {
+ next unless $href_node;
+
+ # it's possible for multiple $u's to exist within 1 856 tag.
+ # in that case, honor the label/notes data for the first $u, but
+ # leave any subsequent $u's as unadorned href's.
+ # use href/link/note keys to be consistent with args.uri's
+
+ my $href = $href_node->textContent;
+ push(@urls, {
+ href => $href,
+ label => ($first && $label) ? $label->textContent : $href,
+ note => ($first && $notes) ? $notes->textContent : '',
+ ind2 => $node->getAttribute('ind2')
+ });
+ $first = 0;
+ }
+ }
+
+ $client->respond({id => $record_id, urls => \@urls});
+ }
+
+ return undef;
+}
+
+__PACKAGE__->register_method(
+ method => 'catalog_record_summary',
+ api_name => 'open-ils.search.biblio.record.catalog_summary',
+ stream => 1,
+ max_bundle_count => 1,
+ signature => {
+ desc => 'Stream of record data suitable for catalog display',
+ params => [
+ {desc => 'Context org unit ID', type => 'number'},
+ {desc => 'Array of Record IDs', type => 'array'}
+ ],
+ return => {
+ desc => q/
+ Stream of record summary objects including id, record,
+ hold_count, copy_counts, display (metabib display
+ fields), attributes (metabib record attrs), plus
+ metabib_id and metabib_records for the metabib variant.
+ /
+ }
+ }
+);
+__PACKAGE__->register_method(
+ method => 'catalog_record_summary',
+ api_name => 'open-ils.search.biblio.record.catalog_summary.staff',
+ stream => 1,
+ max_bundle_count => 1,
+ signature => q/see open-ils.search.biblio.record.catalog_summary/
+);
+__PACKAGE__->register_method(
+ method => 'catalog_record_summary',
+ api_name => 'open-ils.search.biblio.metabib.catalog_summary',
+ stream => 1,
+ max_bundle_count => 1,
+ signature => q/see open-ils.search.biblio.record.catalog_summary/
+);
+
+__PACKAGE__->register_method(
+ method => 'catalog_record_summary',
+ api_name => 'open-ils.search.biblio.metabib.catalog_summary.staff',
+ stream => 1,
+ max_bundle_count => 1,
+ signature => q/see open-ils.search.biblio.record.catalog_summary/
+);
+
+
+sub catalog_record_summary {
+ my ($self, $client, $org_id, $record_ids, $options) = @_;
+ my $e = new_editor();
+ $options ||= {};
+
+ my $is_meta = ($self->api_name =~ /metabib/);
+ my $is_staff = ($self->api_name =~ /staff/);
+
+ my $holds_method = $is_meta ?
+ 'open-ils.circ.mmr.holds.count' :
+ 'open-ils.circ.bre.holds.count';
+
+ my $copy_method = $is_meta ?
+ 'open-ils.search.biblio.metarecord.copy_count':
+ 'open-ils.search.biblio.record.copy_count';
+
+ $copy_method .= '.staff' if $is_staff;
+
+ $copy_method = $self->method_lookup($copy_method); # local method
+
+ for my $rec_id (@$record_ids) {
+
+ my $response = $is_meta ?
+ get_one_metarecord_summary($self, $e, $org_id, $rec_id) :
+ get_one_record_summary($self, $e, $org_id, $rec_id);
+
+ ($response->{copy_counts}) = $copy_method->run($org_id, $rec_id);
+
+ $response->{first_call_number} = get_first_call_number(
+ $e, $rec_id, $org_id, $is_staff, $is_meta, $options);
+
+ $response->{hold_count} =
+ $U->simplereq('open-ils.circ', $holds_method, $rec_id);
+
+ if ($options->{flesh_copies}) {
+ $response->{copies} = get_representative_copies(
+ $e, $rec_id, $org_id, $is_staff, $is_meta, $options);
+ }
+
+ $client->respond($response);
+ }
+
+ return undef;
+}
+
+# Returns a snapshot of copy information for a given record or metarecord,
+# sorted by pref org and search org.
+sub get_representative_copies {
+ my ($e, $rec_id, $org_id, $is_staff, $is_meta, $options) = @_;
+
+ my @rec_ids;
+ my $limit = $options->{copy_limit};
+ my $copy_depth = $options->{copy_depth};
+ my $copy_offset = $options->{copy_offset};
+ my $pref_ou = $options->{pref_ou};
+
+ my $org_tree = $U->get_org_tree;
+ if (!$org_id) { $org_id = $org_tree->id; }
+ my $org = $U->find_org($org_tree, $org_id);
+
+ return [] unless $org;
+
+ my $func = 'unapi.biblio_record_entry_feed';
+ my $includes = '{holdings_xml,acp,acnp,acns}';
+ my $limits = "acn=>$limit,acp=>$limit";
+
+ if ($is_meta) {
+ $func = 'unapi.metabib_virtual_record_feed';
+ $includes = '{holdings_xml,acp,acnp,acns,mmr.unapi}';
+ $limits .= ",bre=>$limit";
+ }
+
+ my $xml_query = $e->json_query({from => [
+ $func, '{'.$rec_id.'}', 'marcxml',
+ $includes, $org->shortname, $copy_depth, $limits,
+ undef, undef,undef, undef, undef,
+ undef, undef, undef, $pref_ou
+ ]})->[0];
+
+ my $xml = $xml_query->{$func};
+
+ my $doc = XML::LibXML->new->parse_string($xml);
+
+ my $copies = [];
+ for my $volume ($doc->documentElement->findnodes('//*[local-name()="volume"]')) {
+ my $label = $volume->getAttribute('label');
+ my $prefix = $volume->getElementsByTagName('call_number_prefix')->[0]->getAttribute('label');
+ my $suffix = $volume->getElementsByTagName('call_number_suffix')->[0]->getAttribute('label');
+
+ my $copies_node = $volume->findnodes('./*[local-name()="copies"]')->[0];
+
+ for my $copy ($copies_node->findnodes('./*[local-name()="copy"]')) {
+
+ my $status = $copy->getElementsByTagName('status')->[0]->textContent;
+ my $location = $copy->getElementsByTagName('location')->[0]->textContent;
+ my $circ_lib_sn = $copy->getElementsByTagName('circ_lib')->[0]->getAttribute('shortname');
+
+ push(@$copies, {
+ call_number_label => $label,
+ call_number_prefix_label => $prefix,
+ call_number_suffix_label => $suffix,
+ circ_lib_sn => $circ_lib_sn,
+ copy_status => $status,
+ copy_location => $location
+ });
+ }
+ }
+
+ return $copies;
+}
+
+sub get_first_call_number {
+ my ($e, $rec_id, $org_id, $is_staff, $is_meta, $options) = @_;
+
+ my $limit = $options->{copy_limit};
+ $options->{copy_limit} = 1;
+
+ my $copies = get_representative_copies(
+ $e, $rec_id, $org_id, $is_staff, $is_meta, $options);
+
+ $options->{copy_limit} = $limit;
+
+ return $copies->[0];
+}
+
+sub get_one_rec_urls {
+ my ($self, $e, $org_id, $bib_id) = @_;
+
+ my ($resp) = $self->method_lookup(
+ 'open-ils.search.biblio.record.resource_urls.retrieve')
+ ->run($org_id, $bib_id);
+
+ return $resp->{urls};
+}
+
+# Start with a bib summary and augment the data with additional
+# metarecord content.
+sub get_one_metarecord_summary {
+ my ($self, $e, $org_id, $rec_id) = @_;
+
+ my $meta = $e->retrieve_metabib_metarecord($rec_id) or return {};
+ my $maps = $e->search_metabib_metarecord_source_map({metarecord => $rec_id});
+
+ my $bre_id = $meta->master_record;
+
+ my $response = get_one_record_summary($self, $e, $org_id, $bre_id);
+ $response->{urls} = get_one_rec_urls($self, $e, $org_id, $bre_id);
+
+ $response->{metabib_id} = $rec_id;
+ $response->{metabib_records} = [map {$_->source} @$maps];
+
+ my @other_bibs = map {$_->source} grep {$_->source != $bre_id} @$maps;
+
+ # Augment the record attributes with those of all of the records
+ # linked to this metarecord.
+ if (@other_bibs) {
+ my $attrs = $e->search_metabib_record_attr_flat({id => \@other_bibs});
+
+ my $attributes = $response->{attributes};
+
+ for my $attr (@$attrs) {
+ $attributes->{$attr->attr} = [] unless $attributes->{$attr->attr};
+ push(@{$attributes->{$attr->attr}}, $attr->value) # avoid dupes
+ unless grep {$_ eq $attr->value} @{$attributes->{$attr->attr}};
+ }
+ }
+
+ return $response;
+}
+
+sub get_one_record_summary {
+ my ($self, $e, $org_id, $rec_id) = @_;
+
+ my $bre = $e->retrieve_biblio_record_entry([$rec_id, {
+ flesh => 1,
+ flesh_fields => {
+ bre => [qw/compressed_display_entries mattrs creator editor/]
+ }
+ }]) or return {};
+
+ # Compressed display fields are pachaged as JSON
+ my $display = {};
+ $display->{$_->name} = OpenSRF::Utils::JSON->JSON2perl($_->value)
+ foreach @{$bre->compressed_display_entries};
+
+ # Create an object of 'mraf' attributes.
+ # Any attribute can be multi so dedupe and array-ify all of them.
+ my $attributes = {};
+ for my $attr (@{$bre->mattrs}) {
+ $attributes->{$attr->attr} = {} unless $attributes->{$attr->attr};
+ $attributes->{$attr->attr}->{$attr->value} = 1; # avoid dupes
+ }
+ $attributes->{$_} = [keys %{$attributes->{$_}}] for keys %$attributes;
+
+ # clear bulk
+ $bre->clear_marc;
+ $bre->clear_mattrs;
+ $bre->clear_compressed_display_entries;
+
+ return {
+ id => $rec_id,
+ record => $bre,
+ display => $display,
+ attributes => $attributes,
+ urls => get_one_rec_urls($self, $e, $org_id, $rec_id)
+ };
+}
+
1;