Search

Advanced search is enabled.
Showing 1 - 20 of 38 code results for Sidekiq path:elastic  in
of GitLab.org / GitLab
  end

  it "searches notes", :sidekiq_inline do
    project = create :project, :public
    issue = create :issue, project: project
    end

    it 'finds issues', :sidekiq_inline do
      create(:issue, project: project, title: 'Test searching for an issue')
      ensure_elasticsearch_index!

      it 'only indexes enabled projects' do
        Sidekiq::Testing.inline! do
          create :project, path: 'test_two', description: 'awesome project'
          create :project
  end

  it "searches merge requests", :sidekiq_might_not_need_inline do
    project = create :project, :public, :repository
      expect(Elastic::ProcessInitialBookkeepingService).to receive(:backfill_projects!).with(project1, project2)

      Sidekiq::Testing.fake! do
        described_class.new.execute(project_ids: [project1.id, project2.id], namespace_ids: [3, 4])
      end
  end

  it 'returns the record if a single project was passed', :sidekiq_might_not_need_inline do
    result = described_class.elastic_search(
      'test',
    stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)

    Sidekiq::Testing.inline! do
      project.wiki.create_page("index_page", "Bla bla term1")
      project.wiki.create_page("omega_page", "Bla bla term2")
  end

  it "searches milestones", :sidekiq_might_not_need_inline do
    project = create :project
  end

  context 'issues search', :sidekiq_inline do
    let!(:project) { create(:project, :public, group: group) }
    let!(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
The Elasticsearch integration depends on an external indexer. We ship an [indexer written in Go](https://gitlab.com/gitlab-org/gitlab-elasticsearch-indexer). The user must trigger the initial indexing via a Rake task but, after this is done, GitLab itself will trigger reindexing when required via `after_` callbacks on create, update, and destroy that are inherited from [`/ee/app/models/concerns/elastic/application_versioned_search.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/app/models/concerns/elastic/application_versioned_search.rb).

After initial indexing is complete, create, update, and delete operations for all models except projects (see [#207494](https://gitlab.com/gitlab-org/gitlab/-/issues/207494)) are tracked in a Redis [`ZSET`](https://redis.io/topics/data-types#sorted-sets). A regular `sidekiq-cron` `ElasticIndexBulkCronWorker` processes this queue, updating many Elasticsearch documents at a time with the [Bulk Request API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html).

Search queries are generated by the concerns found in [`ee/app/models/concerns/elastic`](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/models/concerns/elastic). These concerns are also in charge of access control, and have been a historic source of security bugs so please pay close attention to them!

  def index!(project)
    Sidekiq::Testing.inline! do
      project.repository.index_commits_and_blobs

      before do
        Sidekiq::Testing.disable! do
          project1
          project2
| `Maximum file size indexed`                           | See [the explanation in instance limits.](../administration/instance_limits.md#maximum-file-size-indexed). |
| `Maximum field length`                                | See [the explanation in instance limits.](../administration/instance_limits.md#maximum-field-length). |
| `Maximum bulk request size (MiB)` | The Maximum Bulk Request size is used by the GitLab Golang-based indexer processes and indicates how much data it ought to collect (and store in memory) in a given indexing process before submitting the payload to Elasticsearch’s Bulk API. This setting should be used with the Bulk request concurrency setting (see below) and needs to accommodate the resource constraints of both the Elasticsearch host(s) and the host(s) running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
| `Bulk request concurrency`                            | The Bulk request concurrency indicates how many of the GitLab Golang-based indexer processes (or threads) can run in parallel to collect data to subsequently submit to Elasticsearch’s Bulk API. This increases indexing performance, but fills the Elasticsearch bulk requests queue faster. This setting should be used together with the Maximum bulk request size setting (see above) and needs to accommodate the resource constraints of both the Elasticsearch host(s) and the host(s) running the GitLab Golang-based indexer either from the `gitlab-rake` command or the Sidekiq tasks. |
| `Client request timeout` | Elasticsearch HTTP client request timeout value in seconds. `0` means using the system default timeout value, which depends on the libraries that GitLab application is built upon. |
  end

  describe "search", :sidekiq_inline do
    let_it_be(:project) { create(:project, :public, :repository, :wiki_repo) }
    let_it_be(:private_project) { create(:project, :repository, :wiki_repo) }

  it "searches issues", :aggregate_failures do
    Sidekiq::Testing.inline! do
      create :issue, title: 'bla-bla term1', project: project
      create :issue, description: 'bla-bla term2', project: project

  def up
    Sidekiq.redis do |conn|
      conn.del "queue:elastic_batch_project_indexer"
    end
  include Gitlab::ExclusiveLeaseHelpers

  sidekiq_options retry: false

  feature_category :global_search
  include ApplicationWorker

  sidekiq_options retry: 2
  feature_category :global_search
  include ApplicationWorker

  sidekiq_options retry: 2
  feature_category :global_search
  urgency :throttled

  feature_category :global_search
  sidekiq_options retry: 2
  loggable_arguments 1