Class: Fluent::Plugin::GithubActivitiesInput

Inherits:
Input
  • Object
show all
Defined in:
lib/fluent/plugin/in_github-activities.rb

Constant Summary collapse

DEFAULT_BASE_TAG =
"github-activity"
DEFAULT_CLIENTS =
4
DEFAULT_STORAGE_TYPE =
"local"

Instance Method Summary collapse

Instance Method Details

#configure(conf) ⇒ Object

Raises:

  • (Fluent::ConfigError)


51
52
53
54
55
56
57
58
59
60
61
62
63
64
# File 'lib/fluent/plugin/in_github-activities.rb', line 51

def configure(conf)
  super

  @base_tag = @base_tag.sub(/\.\z/, "")
  @users += load_users_list
  @n_clients = [@clients, @users.size].min
  @interval = @interval * @n_clients
  raise Fluent::ConfigError, "You can define <storage> section at once" unless @storage_configs.size == 1
  storage_section = @storage_configs.first
  storage_config = storage_section.corresponding_config_element
  @pos_storage = storage_create(usage: storage_section.usage,
                                conf: storage_config,
                                default_type: DEFAULT_STORAGE_TYPE)
end

#shutdownObject



107
108
109
110
111
112
113
114
# File 'lib/fluent/plugin/in_github-activities.rb', line 107

def shutdown
  until @request_queue.empty?
    log.debug(queue_size: @request_queue.size)
    sleep(@interval)
  end
  @crawlers.each(&:stop)
  super
end

#startObject



66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# File 'lib/fluent/plugin/in_github-activities.rb', line 66

def start
  super

  @request_queue = Queue.new
  @crawlers = []

  users_manager_params = {
    users: @users,
    pos_storage: @pos_storage,
  }
  users_manager = ::Fluent::Plugin::GithubActivities::UsersManager.new(users_manager_params)
  users_manager.generate_initial_requests.each do |request|
    @request_queue.push(request)
  end
  @n_clients.times do |n|
    thread_create("in_github_activity_#{n}".to_sym) do
      crawler_options = {
        access_token: @access_token,
        watching_users: @users,
        include_commits_from_pull_request: @include_commits_from_pull_request,
        include_foreign_commits: @include_foreign_commits,
        pos_storage: @pos_storage,
        request_queue: @request_queue,
        default_interval: @interval,
        log: log
      }
      crawler = ::Fluent::Plugin::GithubActivities::Crawler.new(crawler_options)
      @crawlers << crawler
      crawler.on_emit = lambda do |tag, record|
        router.emit("#{@base_tag}.#{tag}", Engine.now, record)
      end

      loop do
        crawler.process_request
        break unless crawler.running
        sleep(crawler.interval_for_next_request)
      end
    end
  end
end