#!/opt/opsview/orchestrator/venv3/bin/python -u

import sys
import argparse
import getpass
import json
import csv

from datetime import datetime, time, timedelta
from collections import namedtuple, defaultdict

from opsview.common.datastore import DataStore
from opsview.common.util.confighelper import ConfigYamlFileReader, build_config_defaults
from opsview.orchestrator.config import Config
from opsview.orchestrator.resources import get_resource_dir
from opsview.common.database.directdata import PyMySQLDirectProvider

HostgroupResultRow = namedtuple('HostgroupResultRow', ['id', 'name' ])

class Runner(object):

    def __init__(self, config):
        self._ds_config = config.notification_logs_store
        self._db_connection = config.master_database_connection
        self._db_name = config.opsview_database_name
        self._db = PyMySQLDirectProvider()

    def run(self, verbose=False, hostgroup_name=False, days=7, start_date=False, end_date=False, page_size=200):
        ds_channel = DataStore.create_provider_channel(self._ds_config)
        self._db.connect(self._db_connection, database=self._db_name)

        if start_date:
            start_time=datetime.combine(datetime.strptime(start_date, '%Y/%m/%d'), time.min)
        else:
            start_time=datetime.combine(datetime.today() - timedelta(days = int(days)-1), time.min)

        if end_date:
            end_time=datetime.combine(datetime.strptime(end_date, '%Y/%m/%d'), time.max)
        else:
            end_time=datetime.combine(datetime.today(), time.max)

        print('start_time:', start_time.strftime("%Y/%m/%d %H:%M:%S"), flush=True)
        print('end_time:  ', end_time.strftime("%Y/%m/%d %H:%M:%S"), flush=True)

        host_group_ids = []

        # resolve the provided hostgroup names into ID's
        if hostgroup_name:
            query = (
                "SELECT id,name FROM hostgroups"
                " WHERE name = '{}'".format( hostgroup_name )
            )

            rows = self._db.query_rows(query, row_type=HostgroupResultRow)
            for row in rows:
                host_group_ids.append(row.id)

            if not host_group_ids:
                print("FATAL: Hostgroup '{}' not found".format(hostgroup_name))
                exit(1)

            print("hostgroup name: {}".format(hostgroup_name), flush=True)

        #print(f"hostgrops ids: { host_group_ids}")

        now = datetime.now()
        csv_filename = "/tmp/notifications_{}.csv".format( now.strftime("%Y%m%d.%H%M%S") )
        csv_header = [ 'Timestamp', 'Hostname', 'Servicecheck', 'Status', 'Type', 'Output', 'Contacts' ]

        f = open(csv_filename, 'w', encoding='UTF8', newline='')
        writer = csv.writer(f, quoting=csv.QUOTE_ALL)
        writer.writerow(csv_header)

        host_state = {
            0: "OK",
            1: "DOWN",
            2: "UNREACHABLE"
        }

        service_state = {
            0: "OK",
            1: "WARNING",
            2: "CRITICAL",
            3: "UNKNOWN"
        }

        limit = page_size
        page = 0

        contact_name = None
        hostnames = None
        servicenames = None
        host_states = None
        service_states = None
        count = 0

        print("Writing to: {}".format(csv_filename), flush=True)
        print("Starting export at: {}".format(datetime.now()), flush=True)
        export_start=datetime.now()
        print("Processing: {}  ".format(count), end='\r', flush=True)

        finished=False
        while not finished:

            page += 1
            page_limit = page * limit
            page_limit = limit
            page_skip = (page -1) * limit

            #print ("page={}".format(page))
            #print ("page_limit={}".format(page_limit))
            #print ("page_skip={}".format(page_skip))

            notification_logs = ds_channel.query_notification_logs(
                    page_limit,
                    start_time,
                    page_skip,
                    end_time,
                    contact_name,
                    host_group_ids,
                    hostnames,
                    servicenames,
                    host_states,
                    service_states)

            logobj = json.loads(notification_logs)

            rows=logobj.get('docs')

            if not rows:
                finished=True

            for row in rows:

                count += 1
                print("Processing: {}  ".format(count), end='\r', flush=True)

                csv_row = []

                event_timestamp = datetime.fromtimestamp(row.get('notification_time'))
                csv_row.append( event_timestamp.strftime("%Y-%m-%d %H:%M:%S") )

                if row.get("object_type") == "host":
                    csv_row.append( row.get("hostname") )
                    csv_row.append( 'n/a' )
                    csv_row.append(  host_state.get( row.get("state") ))
                else:
                    csv_row += row.get("object_name").split("::")
                    csv_row.append(  service_state.get( row.get("state") ))

                csv_row.append( row.get("notification_type") )
                csv_row.append( row.get("output") )

                writer.writerow(csv_row)

        f.close()
        print(flush=True)
        print("Ending export at: {}".format(datetime.now()), flush=True)
        export_end=datetime.now()
        export_duration = export_end - export_start
        print("Export duration: {}".format(export_duration), flush=True)


def get_args():
    parser = argparse.ArgumentParser(description="Export notifications log.")
    parser.add_argument('-v', '--verbose', help="Display verbose information.", action='store_true')
    parser.add_argument('-g', '--hostgroup', help="Limit to the specified Hostgroup.", required=False, default=None)
    parser.add_argument('-d', '--days', help="Limit to the last N days (default: 7).", required=False, default=7)
    parser.add_argument('-s', '--start_date', help="Start date for the export (YYYY/MM/DD)", required=False, default=None)
    parser.add_argument('-e', '--end_date', help="End date for the export (YYYY/MM/DD)", required=False, default=None)
    parser.add_argument('-p', '--page_size', help="Page size to fetch", required=False, default=200, type=int)
    return parser.parse_args()

def main():
    base_defaults_file_path, defaults_file_path = build_config_defaults(
        'orchestrator',
        input_dir=get_resource_dir(),
        output_path=Config.get_defaults_file_path(),
    )
    config = Config(ConfigYamlFileReader(base_defaults_file_path, defaults_file_path, Config.get_config_file_path()))

    args = get_args()

    Runner(config).run(
        verbose=args.verbose,
        hostgroup_name=args.hostgroup,
        days=args.days,
        start_date=args.start_date,
        end_date=args.end_date,
        page_size=args.page_size,
    )

if __name__ == '__main__':
    try:
        exit(main())
    except KeyboardInterrupt:
        exit(2)
	