App Search APIedit

This document includes examples for different API requests. Please refer to the Elastic App Search Documentation for more information, particularly the API Reference.


# Create an engine
client.create_engine(body: { name: 'videogames' })

# List all engines

# Get an engine

# Delete an engine

Meta enginesedit

# Create a meta engine:
body = {
  name: engine_name,
  type: 'meta',
  source_engines: [ 'books', 'videogames' ]
client.create_engine(body: body)

# Add a source engine to a meta engine:
client.add_meta_engine_source(meta_engine_name, source_engines: ['engine1', 'engine2'])

# Remove a source engine from a meta engine:
client.delete_meta_engine_source(meta_engine_name, source_engines: ['engine1', 'engine2'])


engine_name = 'videogames'
document = {
  id: 'Mr1064',
  name: 'Super Luigi 64',
  body: 'A classic 3D videogame'

# Index documents
client.index_documents(engine_name, documents: document)

# List documents

# Get document(s) by ID:
client.documents(engine_name, document_ids: [id1, id2])

# Delete a document
client.delete_documents(engine_name, document_ids: [document_id])

# Update a document
client.put_documents(engine_name, documents: [{id: document_id, key: value}])


# Single Search
query = {
  body: {
    query: 'luigi'
}, query)

# Search with multiple filters'parks', {
  body: {
    query: "olympic",
    filters: {
      all: [
        { states: "Washington" },
        { world_heritage_site: "true" }

# Multi query search
queries = [{ query: 'Nicanor'}, { query: 'Iain' }]

client.multi_search(engine_name, body: queries)

Synonym Setsedit

# Create a synonym set
client.create_synonym_set(engine_name, body: {['synonym1', 'synonym2']})

# List synonym sets

# Retrieve a synonym set by id
client.synonym_set(engine_name, synonym_set_id: 'id')

# Update a synonym set by id
client.put_synonym_set(engine_name, synonym_set_id: 'id', body: {synonyms: ['synonym2', 'synonym3']})

# Delete a synonym set
client.delete_synonym_set(engine_name, synonym_set_id: id)


# Create a curation
  body: {
    queries: ['query1'],
    promoted: ['doc-id1'],
    hidden: ['doc-id2']

# Retrieve a curation by id:
client.curation(engine_name, curation_id: 'cur-id')

# Update an existing curation:
    curation_id: 'cur-id',
    body: {
      queries: ['query1'],
      promoted: ['doc-id2'],
      hidden: ['doc-id1']

# List existing curations:

# Delete a curation:
client.delete_curation(engine_name, curation_id: 'cur-id')

Api Keyedit

# Create API Key
body = {
  name: name,
  type: 'private',
  read: true,
  write: true,
  access_all_engines: true
response = client.create_api_key(body: body)

# Get the details of an API Key
client.api_key(api_key_name: name)

# List API Keys

# Update an API Key
body = { name: name, type: 'private', read: true, write: true, engines: ['test'] }
client.put_api_key(api_key_name: name, body: body)

# Delete an API Key
client.delete_api_key(api_key_name: name)

Web Crawleredit

See Web Crawler API reference for more information.

# Create a crawler domain
body = { name: '' }
client.create_crawler_domain(engine_name, body: body)

# Get crawler domain information
client.crawler_domain(engine_name, domain_id: domain_id)

# List crawler domains

# Update a crawler domain
body = { name: '' }
client.put_crawler_domain(engine_name, domain_id: domain_id, domain: body)

# Delete a crawler domain
client.delete_crawler_domain(engine_name, domain_id: domain_id)

# Create a crawler crawl request

# Retrieve a crawl request
client.crawler_crawl_request(engine_name, crawl_request_id: request_id)

# Retrieve active crawl request details

# List crawl requests

# Delete an active crawl request

# Set a crawler crawl schedule
body = { frequency: 1, unit: 'day' }
client.put_crawler_crawl_schedule(engine_name, body: body)

# Retrieve crawler crawl schedule

# Delete a crawler crawl schedule

# Create a crawler entry point
client.create_crawler_entry_point(engine_name, domain_id: domain_id, body: { value: '/elastic-stack' })

# Update a crawler entry point
  domain_id: domain_id,
  entry_point_id: entry_point_id,
  body: { value: '/enterprise-search' }

# Validate a URL
client.crawler_url_validation_result(engine_name,  url: name)

# Extract ccontent from a URL
client.crawler_url_extraction_result(engine_name, url: name)

# Retrieve tracing history for a crawler URL
client.crawler_url_tracing_result(engine_name, url: name)

# Delete a crawler entry point
  domain_id: domain_id,
  entry_point_id: entry_point_id

# Retrieve crawler metrics

# Retrieve crawler configuration overview

# Create a crawler sitemap
body = { url: '' }
client.create_crawler_sitemap(engine_name, domain_id: domain_id, body: body)

# Update a crawler sitemap
body = { url: '' }
client.put_crawler_sitemap(engine_name, domain_id: domain_id, sitemap_id: sitemap_id, body: body)

# Delete a crawler sitemap
client.delete_crawler_sitemap(engine_name, domain_id: domain_id, sitemap_id: sitemap_id)

# Create a crawler crawl rule
body = { order: 1, policy: 'allow', rule: 'contains', pattern: '/stack' }
client.create_crawler_crawl_rule(engine_name, domain_id: domain_id, body: body)

# Update a crawler crawl rule
body = { order: 2, policy: 'allow', rule: 'begins', pattern: '/stack' }
client.put_crawler_crawl_rule(engine_name, domain_id: domain_id, crawl_rule_id: rule_id, body: body)

# Delete a crawler crawl rule
client.delete_crawler_crawl_rule(engine_name, domain_id: domain_id, crawl_rule_id: rule_id)

# Create a process crawl
client.create_crawler_process_crawl(engine_name, body: { dry_run: true })

# Retrieve a process crawl
client.crawler_process_crawl(engine_name, process_crawl_id: id)

# Retrieve denied URLs cor a process crawl
client.denied_urls(engine_name, process_crawl_id: id)

# List process crawls

# View denied urls for Process Crawl
client.crawler_process_crawl_denied_urls(engine_name, process_crawl_id: id)

# Cancel an active crawl request, stopping a running crawl if needed.

Adaptive Relevance Suggestionsedit

# Update an adaptive relevance suggestion
body = [{ query: 'forest', type: 'curation', status: 'applied' }]
client.put_adaptive_relevance_suggestions(engine_name, body: body)

# Retrieve an adaptive relevance suggestion
client.adaptive_relevance_suggestions(engine_name, search_suggestion_query: 'test')

# List adaptive relevance suggestions

Adaptive Relevance Settingsedit

# Show the settings for an engine

# Update relevance settings
 body = {
        curation: { enabled: true }
client.put_adaptive_relevance_settings(engine_name, body: body)

# Refresh adaptive relevance update process
client.refresh_adaptive_relevance_update_process(engine_name, adaptive_relevance_suggestion_type: 'curation')

Elasticsearch Searchedit

Submit an Elasticsearch search request to the document indices that power an App Search engine and retrieve the results. You can use this API with regular engines and meta engines. See Elasticsearch search API for App Search for more information and requirements for using this API.

es_request = { query: { bool: { must: { term: { title: 'test' } } } } }
client.search_es_search(engine_name, body: es_request)

Search Explainedit

Submit a search and retrieve an Elasticsearch query.

response = client.search_explain(engine_name, body: { query: 'test' })
# => "GET enterprise-search-engine-app-search-explain/_search"

Other API Endpointsedit

# Count analytics - Returns the number of clicks and total number of queries over a period

# Schema - Retrieve current schema for the engine

# Update schema for an engine
client.put_schema(engine_name, schema: {field: 'type'})

# Logs - The API Log displays API request and response data at the Engine level
client.api_logs(engine_name, from_date:, 10, 01), to_date:, 11, 05))

# Queries Analytics - Returns queries analytics by usage count

# Clicks Analytics - Returns the number of clicks received by a document in descending order
client.top_clicks_analytics(engine_name, query: {})

# Search Settings - Returns current search settings for an engine

# Update Search Settings
client.put_search_settings(engine_name, body: body)

# Reset search settings
# Warning: This means your settings are wiped! Back them up!

# Click - Send data about clicked results
client.log_clickthrough(engine_name, body: { query: 'query', document_id: 'doc-id' })

# Query Suggestion - Provide relevant query suggestions for incomplete queries
client.query_suggestion(engine_name, query: 'incomplete_query')