#!/usr/bin/env ruby

#--
# Spoolinger, a generic and lazy spool manager
# Copyright (c) 2012-2018 Marc Dequènes (Duck) <Duck@DuckCorp.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#++

# to allow in-place run for test
$: << File.join(File.dirname(__FILE__), "..", "lib")

require 'spoolinger/config'
require 'spoolinger/logger'
require 'spoolinger/ruby_extra'
require 'eventmachine'
require 'spoolinger/incoming_watcher'
require 'spoolinger/spool_file'
require 'spoolinger/load_check'
require 'pp'


module Spoolinger
  PRODUCT = "Spoolinger"
  VERSION = "0.3.2"

  class SpoolingerTask
    def initialize
      @config = Config.instance

      # setup logs
      unless @config.log.nil?
        logger.output_level(@config.log.console_level) unless @config.log.console_level.nil?
        if @config.log.file
          if File.directory? @config.log.file.directory
            logger.log_to_file(File.join(@config.log.file.directory, "spoolinger.log"), @config.log.file.level)
          else
            logger.fatal "Log path does not exist or is not a directory, exiting"
            exit 1
          end
        end
      end

      if @config.spooling.file_name_pattern
        begin
          @config.spooling.file_name_pattern = Regexp.new(@config.spooling.file_name_pattern)
        rescue
          logger.fatal "Bad regex for spooling/file_name_pattern setting: %s" % $!.to_s
          exit 1
        end
      end
      if @config.spooling.file_content_patterns
        regex_opts = @config.spooling.file_content_patterns_case_insensitive ? Regexp::IGNORECASE : nil
        @config.spooling.file_content_patterns.map! do |regex|
          begin
            Regexp.new(regex, regex_opts)
          rescue
            logger.fatal "Bad regex '%s' for spooling/file_content_patterns setting: %s" % $!.to_s
            exit 1
          end
        end
      end

      @spool_files = {}
      @spool_queue_keys = []
      @ongoing_processing_count = 0
      @load_ok = true
    end

    def run
      logger.info "Spoolinger starting…"

      EventMachine.error_handler do |e|
        logger.error "Reactor error: " + e.to_s
        logger.debug e.backtrace.join("\n")
      end

      # max thread pool size and max concurrency should match
      # add one more for load check
      EventMachine.threadpool_size = @config.spooling.max_concurrency + 1

      EventMachine.run do
        # catch incoming files as soon as possible
        @watcher = IncomingWatcher.new(@config.spooling.watch_path) do |path|
            received_file(path)
        end

        @sched_timer = EM.add_periodic_timer(@config.spooling.reschedule_interval) { schedule_processing }
        if @config.load_check.script
          logger.info "Load check activated"
          @load_timer = EM.add_periodic_timer(@config.load_check.interval) { check_load }
        end

        logger.info "Spoolinger started"

        # care about files already there, sorted by name (which must
        # contain a number or timestamp)
        Dir.glob(File.join(@config.spooling.watch_path, "*")).sort.each {|f| received_file(f) }
      end

      logger.info "Spoolinger stopped"
    rescue Errno::EINTR
      unless @stopping
        logger.debug $!.backtrace.join("\n")
        logger.fatal "Error during processing: " + $!.to_s
        exit 2
      end
    rescue
        logger.debug $!.backtrace.join("\n")
        logger.fatal "Error during processing: " + $!.to_s
        exit 2
    end

    def stop
      return if @stopping

      @stopping = true

      @load_timer.cancel if @load_timer
      @watcher.stop
      @sched_timer.cancel

      EM.add_periodic_timer(1) { schedule_stop }
    end

    private

    def received_file(filename)
      # reject non-regular files
      return unless File.file?(filename)

      begin
        file = SpoolFile.new(filename, @config.spooling)
      rescue
        logger.warn "Received file '#{filename}' with error: " + $!.to_s
        logger.debug "Crash trace: " + $!.backtrace.join("\n")
        return
      end
      logger.debug "New file '#{filename}' with key '#{file.key}'"

      previous_file = @spool_files[file.key]
      msg = "Processing operation for file '#{file.key}'"
      if previous_file
        if @config.spooling.cancel_key.nil? or file.cancel_key == previous_file.cancel_key
          # keep previous one if cancellation mode is off or if they agree
          logger.warn "#{msg} already queued"
        else
          # if they disagree, cancel operation
          logger.info "#{msg} canceled"
          delete_file(previous_file)
        end
        delete_file(file)
      else
        logger.info "#{msg} queued"
        @spool_files[file.key] = file
        @spool_queue_keys << file.key

        # see if it can be scheduled
        schedule_processing
      end
    end

    def schedule_stop
      logger.info "Spoolinger stopping…"

      if @watcher.stopped and @ongoing_processing_count == 0
        EventMachine.stop_event_loop
      end
    end

    def schedule_processing

      # nothing to do
      return if @spool_files.empty?

      # program overloaded
      logger.debug "Current processing concurrency: #{@ongoing_processing_count}"
      return unless @ongoing_processing_count < @config.spooling.max_concurrency

      # host overloaded
      return unless @load_ok

      cb = Proc.new {|result| processing_finished(result) }

      # get next without removing from queue
      next_file_key = @spool_queue_keys.first
      next_file = @spool_files[next_file_key]

      now = Time.now.tv_sec
      if @config.spooling.min_age and now - next_file.arrival_ts < @config.spooling.min_age
        logger.debug "File '#{next_file.key}' is too young, not scheduled yet"
        return
      end

      # remove from queue
      delete_file(next_file, false)

      if @config.spooling.max_age and now - next_file.arrival_ts > @config.spooling.max_age
        logger.info "File '#{next_file.key}' is too old, removed"
        return
      end

      logger.debug "Scheduling processing for file '#{next_file.key}'"
      @ongoing_processing_count += 1
      EventMachine.defer(nil, cb) do
        processing(next_file)
      end
    end

    def delete_file(file, with_file = true)
      @spool_files.delete(file.key)
      @spool_queue_keys.delete(file.key)
      file.delete if with_file
    end

    def processing(file)
      logger.info "Processing file '#{file.key}'"

      command = file.template_replace(@config.spooling.command)

      logger.debug "Running command: " + command
      ok = system(command)

      {:ok => ok, :file => file}
    end

    def processing_finished(result)
      # in case of failure, the file admin can purge the spool or
      # restart the daemon to reschedule them, depending if the
      # failure is transient or not
      if result[:ok]
        logger.info "File '#{result[:file].key}' processed successfully"
        result[:file].delete
      else
        logger.error "File '#{result[:file].key}' processing failed"
      end

      @ongoing_processing_count -= 1

      # a thread is idle now!
      schedule_processing
    end

    def check_load
      return if @check_load_running
      @check_load_running = true

      cb = Proc.new do |result|
        logger.debug "Load check finished"

        # if nil an error occured during the check, then keeping the old value
        unless result.nil?
          @load_ok = result
          logger.debug "Load is: " + (@load_ok ? "OK": "TOO HIGH")
        end

        @check_load_running = false
      end

      begin
        @load_checker = LoadChecker.create(@config.load_check) unless @load_checker
      rescue
        @load_timer.cancel
        @load_timer = nil

        logger.error "Load checker initialization failed, deactivated"

        return
      end

      EventMachine.defer(nil, cb) do
        logger.debug "Load check started"

        @load_checker.check
      end
    end

  end
end

Spoolinger::Config.load
prog = Spoolinger::SpoolingerTask.new

trap('INT') do
  prog.stop
end
trap('TERM') do
  prog.stop
end

prog.run
