Skip to content

Commit a54254f

Browse files
authored
Merge pull request #235 from MITLibraries/use-76-cache-primo
Cache Primo queries
2 parents ce948fc + 75b451c commit a54254f

File tree

1 file changed

+46
-35
lines changed

1 file changed

+46
-35
lines changed

app/controllers/search_controller.rb

Lines changed: 46 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,9 @@ def results
1717

1818
# Determine which tab to load - default to primo unless gdt is enabled
1919
@active_tab = if Flipflop.enabled?(:gdt)
20-
'gdt' # Keep existing GDT behavior unchanged
20+
'gdt' # Keep existing GDT behavior unchanged
2121
else
22-
params[:tab] || 'primo' # Default to primo for new tabbed interface
22+
params[:tab] || 'primo' # Default to primo for new tabbed interface
2323
end
2424
@enhanced_query = Enhancer.new(params).enhanced_query
2525

@@ -43,7 +43,7 @@ def results
4343
def load_gdt_results
4444
query = QueryBuilder.new(@enhanced_query).query
4545

46-
response = cache_timdex_query(query)
46+
response = query_timdex(query)
4747

4848
# Handle errors
4949
@errors = extract_errors(response)
@@ -53,30 +53,28 @@ def load_gdt_results
5353
end
5454

5555
def load_primo_results
56-
begin
57-
primo_search = PrimoSearch.new
58-
per_page = params[:per_page] || 20
59-
primo_response = primo_search.search(params[:q], per_page)
60-
61-
@results = NormalizePrimoResults.new(primo_response, params[:q]).normalize
62-
63-
# Basic pagination for now.
64-
if @results.present?
65-
@pagination = {
66-
hits: @results.count,
67-
start: 1,
68-
end: @results.count
69-
}
70-
end
71-
72-
rescue StandardError => e
73-
@errors = handle_primo_errors(e)
56+
primo_response = query_primo
57+
@results = NormalizePrimoResults.new(primo_response, @enhanced_query[:q]).normalize
58+
59+
# Enhanced pagination using cached response
60+
if @results.present?
61+
total_hits = primo_response.dig('info', 'total') || @results.count
62+
per_page = @enhanced_query[:per_page] || 20
63+
current_page = @enhanced_query[:page] || 1
64+
65+
@pagination = {
66+
hits: total_hits,
67+
start: ((current_page - 1) * per_page) + 1,
68+
end: [current_page * per_page, total_hits].min
69+
}
7470
end
71+
rescue StandardError => e
72+
@errors = handle_primo_errors(e)
7573
end
7674

7775
def load_timdex_results
7876
query = QueryBuilder.new(@enhanced_query).query
79-
response = cache_timdex_query(query)
77+
response = query_timdex(query)
8078

8179
@errors = extract_errors(response)
8280
@pagination = Analyzer.new(@enhanced_query, response).pagination if @errors.nil?
@@ -87,31 +85,44 @@ def active_filters
8785
ENV.fetch('ACTIVE_FILTERS', '').split(',').map(&:strip)
8886
end
8987

90-
def cache_timdex_query(query)
91-
# Create cache key for this query
92-
# Sorting query hash to ensure consistent key generation regardless of the parameter order
93-
sorted_query = query.sort_by { |k, v| k.to_sym }.to_h
94-
cache_key = Digest::MD5.hexdigest(sorted_query.to_s)
95-
96-
# builder hands off to wrapper which returns raw results here
97-
# We are using two difference caches to allow for Geo and USE to be cached separately. This ensures we don't have
98-
# cache key collision for these two different query types. In practice, the likelihood of this happening is low,
99-
# as the query parameters are different for each type and they won't often be run with the same cache backend other
100-
# than locally, but this is a safeguard.
101-
# The response type is a GraphQL::Client::Response, which is not directly serializable, so we convert it to a hash.
88+
def query_timdex(query)
89+
# We generate unique cache keys to avoid naming collisions.
90+
cache_key = generate_cache_key(query)
91+
92+
# Builder hands off to wrapper which returns raw results here.
10293
Rails.cache.fetch("#{cache_key}/#{@active_tab}", expires_in: 12.hours) do
10394
raw = if @active_tab == 'gdt'
10495
execute_geospatial_query(query)
10596
elsif @active_tab == 'timdex'
10697
TimdexBase::Client.query(TimdexSearch::BaseQuery, variables: query)
10798
end
99+
100+
# The response type is a GraphQL::Client::Response, which is not directly serializable, so we
101+
# convert it to a hash.
108102
{
109103
data: raw.data.to_h,
110104
errors: raw.errors.details.to_h
111105
}
112106
end
113107
end
114108

109+
def query_primo
110+
# We generate unique cache keys to avoid naming collisions.
111+
cache_key = generate_cache_key(@enhanced_query)
112+
113+
Rails.cache.fetch("#{cache_key}/primo", expires_in: 12.hours) do
114+
primo_search = PrimoSearch.new
115+
per_page = @enhanced_query[:per_page] || 20
116+
primo_search.search(@enhanced_query[:q], per_page)
117+
end
118+
end
119+
120+
def generate_cache_key(query)
121+
# Sorting query hash to ensure consistent key generation regardless of the parameter order
122+
sorted_query = query.sort_by { |k, _v| k.to_sym }.to_h
123+
Digest::MD5.hexdigest(sorted_query.to_s)
124+
end
125+
115126
def execute_geospatial_query(query)
116127
if query['geobox'] == 'true' && query[:geodistance] == 'true'
117128
TimdexBase::Client.query(TimdexSearch::AllQuery, variables: query)
@@ -261,7 +272,7 @@ def validate_geobox_values!
261272

262273
def handle_primo_errors(error)
263274
Rails.logger.error("Primo search error: #{error.message}")
264-
275+
265276
if error.is_a?(ArgumentError)
266277
[{ 'message' => 'Primo search is not properly configured.' }]
267278
elsif error.is_a?(HTTP::TimeoutError)

0 commit comments

Comments
 (0)