App Search APIedit

This document includes examples for different API requests. Please refer to the Elastic App Search Documentation for more information, particularly the API Reference.

Enginesedit

# Create an engine
client.create_engine(body: { name: 'videogames' })

# List all engines
client.list_engines

# Get an engine
client.engine('videogames')

# Delete an engine
client.delete_engine('videogames')

Meta enginesedit

# Create a meta engine:
body = {
  name: engine_name,
  type: 'meta',
  source_engines: [ 'books', 'videogames' ]
}
client.create_engine(body: body)

# Add a source engine to a meta engine:
client.add_meta_engine_source(meta_engine_name, source_engines: ['engine1', 'engine2'])

# Remove a source engine from a meta engine:
client.delete_meta_engine_source(meta_engine_name, source_engines: ['engine1', 'engine2'])

Documentsedit

engine_name = 'videogames'
document = {
  id: 'Mr1064',
  name: 'Super Luigi 64',
  body: 'A classic 3D videogame'
}

# Index documents
client.index_documents(engine_name, documents: document)

# List documents
client.list_documents(engine_name)

# Get document(s) by ID:
client.documents(engine_name, document_ids: [id1, id2])

# Delete a document
client.delete_documents(engine_name, document_ids: [document_id])

# Update a document
client.put_documents(engine_name, documents: [{id: document_id, key: value}])

Searchedit

# Single Search
query = {
  body: {
    query: 'luigi'
  }
}

client.search(engine_name, query)

# Search with multiple filters
client.search('parks', {
  body: {
    query: "olympic",
    filters: {
      all: [
        { states: "Washington" },
        { world_heritage_site: "true" }
      ]
    }
  }
})

# Multi query search
queries = [{ query: 'Nicanor'}, { query: 'Iain' }]

client.multi_search(engine_name, body: queries)

Synonym Setsedit

# Create a synonym set
client.create_synonym_set(engine_name, body: {['synonym1', 'synonym2']})

# List synonym sets
client.list_synonym_sets(engine_name)

# Retrieve a synonym set by id
client.synonym_set(engine_name, synonym_set_id: 'id')

# Update a synonym set by id
client.put_synonym_set(engine_name, synonym_set_id: 'id', body: {synonyms: ['synonym2', 'synonym3']})

# Delete a synonym set
client.delete_synonym_set(engine_name, synonym_set_id: id)

Curationsedit

# Create a curation
client.create_curation(
  engine_name,
  body: {
    queries: ['query1'],
    promoted: ['doc-id1'],
    hidden: ['doc-id2']
  }
)

# Retrieve a curation by id:
client.curation(engine_name, curation_id: 'cur-id')

# Update an existing curation:
client.put_curation(
  engine_name,
  {
    curation_id: 'cur-id',
    body: {
      queries: ['query1'],
      promoted: ['doc-id2'],
      hidden: ['doc-id1']
    }
  }
)

# List existing curations:
client.list_curations(engine_name)

# Delete a curation:
client.delete_curation(engine_name, curation_id: 'cur-id')

Api Keyedit

# Create API Key
body = {
  name: name,
  type: 'private',
  read: true,
  write: true,
  access_all_engines: true
}
response = client.create_api_key(body: body)

# Get the details of an API Key
client.api_key(api_key_name: name)

# List API Keys
client.list_api_keys

# Update an API Key
body = { name: name, type: 'private', read: true, write: true, engines: ['test'] }
client.put_api_key(api_key_name: name, body: body)

# Delete an API Key
client.delete_api_key(api_key_name: name)

Web Crawleredit

See Web Crawler API reference for more information.

# Create a crawler domain
body = { name: 'https://www.elastic.co' }
client.create_crawler_domain(engine_name, body: body)

# Get crawler domain information
client.crawler_domain(engine_name, domain_id: domain_id)

# List crawler domains
client.list_crawler_domains(engine_name)

# Update a crawler domain
body = { name: 'https://www.wikipedia.org' }
client.put_crawler_domain(engine_name, domain_id: domain_id, domain: body)

# Delete a crawler domain
client.delete_crawler_domain(engine_name, domain_id: domain_id)

# Create a crawler crawl request
client.create_crawler_crawl_request(engine_name)

# Retrieve a crawl request
client.crawler_crawl_request(engine_name, crawl_request_id: request_id)

# Retrieve active crawl request details
client.crawler_active_crawl_request(engine_name)

# List crawl requests
client.list_crawler_crawl_requests(engine_name)

# Delete an active crawl request
client.delete_active_crawl_request(engine_name)

# Set a crawler crawl schedule
body = { frequency: 1, unit: 'day' }
client.put_crawler_crawl_schedule(engine_name, body: body)

# Retrieve crawler crawl schedule
client.crawler_crawl_schedule(engine_name)

# Delete a crawler crawl schedule
client.delete_crawler_crawl_schedule(engine_name)

# Create a crawler entry point
client.create_crawler_entry_point(engine_name, domain_id: domain_id, body: { value: '/elastic-stack' })

# Update a crawler entry point
client.put_crawler_entry_point(
  engine_name,
  domain_id: domain_id,
  entry_point_id: entry_point_id,
  body: { value: '/enterprise-search' }
)

# Validate a URL
client.crawler_url_validation_result(engine_name,  url: name)

# Extract ccontent from a URL
client.crawler_url_extraction_result(engine_name, url: name)

# Retrieve tracing history for a crawler URL
client.crawler_url_tracing_result(engine_name, url: name)

# Delete a crawler entry point
client.delete_crawler_entry_point(
  engine_name,
  domain_id: domain_id,
  entry_point_id: entry_point_id
)

# Retrieve crawler metrics
client.crawler_metrics

# Retrieve crawler configuration overview
client.crawler_overview(engine_name)

# Create a crawler sitemap
body = { url: 'https://www.elastic.co/sitemap.xml' }
client.create_crawler_sitemap(engine_name, domain_id: domain_id, body: body)

# Update a crawler sitemap
body = { url: 'https://www.elastic.co/sitemap2.xml' }
client.put_crawler_sitemap(engine_name, domain_id: domain_id, sitemap_id: sitemap_id, body: body)

# Delete a crawler sitemap
client.delete_crawler_sitemap(engine_name, domain_id: domain_id, sitemap_id: sitemap_id)

# Create a crawler crawl rule
body = { order: 1, policy: 'allow', rule: 'contains', pattern: '/stack' }
client.create_crawler_crawl_rule(engine_name, domain_id: domain_id, body: body)

# Update a crawler crawl rule
body = { order: 2, policy: 'allow', rule: 'begins', pattern: '/stack' }
client.put_crawler_crawl_rule(engine_name, domain_id: domain_id, crawl_rule_id: rule_id, body: body)

# Delete a crawler crawl rule
client.delete_crawler_crawl_rule(engine_name, domain_id: domain_id, crawl_rule_id: rule_id)

# Create a process crawl
client.create_crawler_process_crawl(engine_name, body: { dry_run: true })

# Retrieve a process crawl
client.crawler_process_crawl(engine_name, process_crawl_id: id)

# Retrieve denied URLs cor a process crawl
client.denied_urls(engine_name, process_crawl_id: id)

# List process crawls
client.list_crawler_process_crawls(engine_name)

# View denied urls for Process Crawl
client.crawler_process_crawl_denied_urls(engine_name, process_crawl_id: id)

# Cancel an active crawl request, stopping a running crawl if needed.
client.delete_crawler_active_crawl_request(engine_name)

Adaptive Relevance Suggestionsedit

# Update an adaptive relevance suggestion
body = [{ query: 'forest', type: 'curation', status: 'applied' }]
client.put_adaptive_relevance_suggestions(engine_name, body: body)

# Retrieve an adaptive relevance suggestion
client.adaptive_relevance_suggestions(engine_name, search_suggestion_query: 'test')

# List adaptive relevance suggestions
client.list_adaptive_relevance_suggestions(engine_name)

Adaptive Relevance Settingsedit

# Show the settings for an engine
client.adaptive_relevance_settings(engine_name)

# Update relevance settings
 body = {
        curation: { enabled: true }
      }
client.put_adaptive_relevance_settings(engine_name, body: body)

# Refresh adaptive relevance update process
client.refresh_adaptive_relevance_update_process(engine_name, adaptive_relevance_suggestion_type: 'curation')

Elasticsearch Searchedit

Submit an Elasticsearch search request to the document indices that power an App Search engine and retrieve the results. You can use this API with regular engines and meta engines. See Elasticsearch search API for App Search for more information and requirements for using this API.

es_request = { query: { bool: { must: { term: { title: 'test' } } } } }
client.search_es_search(engine_name, body: es_request)

Search Explainedit

Submit a search and retrieve an Elasticsearch query.

response = client.search_explain(engine_name, body: { query: 'test' })
response.body['query_string']
# => "GET enterprise-search-engine-app-search-explain/_search"

Other API Endpointsedit

# Count analytics - Returns the number of clicks and total number of queries over a period
client.count_analytics(engine_name)

# Schema - Retrieve current schema for the engine
client.schema(engine_name)

# Update schema for an engine
client.put_schema(engine_name, schema: {field: 'type'})

# Logs - The API Log displays API request and response data at the Engine level
client.api_logs(engine_name, from_date: Date.new(2020, 10, 01), to_date: Date.new(2020, 11, 05))

# Queries Analytics - Returns queries analytics by usage count
client.top_queries_analytics(engine_name)

# Clicks Analytics - Returns the number of clicks received by a document in descending order
client.top_clicks_analytics(engine_name, query: {})

# Search Settings - Returns current search settings for an engine
client.search_settings(engine_name)

# Update Search Settings
client.put_search_settings(engine_name, body: body)

# Reset search settings
# Warning: This means your settings are wiped! Back them up!
client.reset_search_settings(engine_name)

# Click - Send data about clicked results
client.log_clickthrough(engine_name, body: { query: 'query', document_id: 'doc-id' })

# Query Suggestion - Provide relevant query suggestions for incomplete queries
client.query_suggestion(engine_name, query: 'incomplete_query')