Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#274 Remove all elasticsearch references

Merged
GitHub User merged 1 commits into Deci-AI:master from deci-ai:feature/LAB-0000_remove_elasticsearch_references
@@ -1,8 +1,6 @@
 torch>=1.9.0
 torch>=1.9.0
 tqdm>=4.57.0
 tqdm>=4.57.0
 boto3>=1.17.15
 boto3>=1.17.15
-elasticsearch==7.15.2
-CMRESHandler>=1.0.0
 jsonschema>=3.2.0
 jsonschema>=3.2.0
 Deprecated>=1.2.11
 Deprecated>=1.2.11
 opencv-python>=4.5.1
 opencv-python>=4.5.1
Discard
@@ -4,15 +4,13 @@ import logging.config
 from super_gradients.common.auto_logging import AutoLoggerConfig
 from super_gradients.common.auto_logging import AutoLoggerConfig
 from super_gradients.common.environment.environment_config import DEFAULT_LOGGING_LEVEL
 from super_gradients.common.environment.environment_config import DEFAULT_LOGGING_LEVEL
 
 
-# Do not remove, it's necessary for the get_logger functionality.
-from cmreslogging.handlers import CMRESHandler
 
 
-
-def get_logger(logger_name: str, training_log_path=None, logs_dir_path=None, log_level=DEFAULT_LOGGING_LEVEL) -> logging.Logger:
-    config_dict = AutoLoggerConfig.generate_config_for_module_name(module_name=logger_name,
-                                                                   training_log_path=training_log_path,
-                                                                   logs_dir_path=logs_dir_path,
-                                                                   log_level=log_level)
+def get_logger(
+    logger_name: str, training_log_path=None, logs_dir_path=None, log_level=DEFAULT_LOGGING_LEVEL
+) -> logging.Logger:
+    config_dict = AutoLoggerConfig.generate_config_for_module_name(
+        module_name=logger_name, training_log_path=training_log_path, logs_dir_path=logs_dir_path, log_level=log_level
+    )
     logging.config.dictConfig(config_dict)
     logging.config.dictConfig(config_dict)
     logger: logging.Logger = logging.getLogger(logger_name)
     logger: logging.Logger = logging.getLogger(logger_name)
     return logger
     return logger
Discard
@@ -3,9 +3,7 @@ import os
 
 
 import pkg_resources
 import pkg_resources
 
 
-from super_gradients.common.aws_connection.aws_secrets_manager_connector import AWSSecretsManagerConnector
-from super_gradients.common.environment import AWS_ENV_NAME
-from super_gradients.common.environment.environment_config import DONT_USE_ELASTICSEARCH_LOGGER, DEFAULT_LOGGING_LEVEL
+from super_gradients.common.environment.environment_config import DEFAULT_LOGGING_LEVEL
 
 
 
 
 class AutoLoggerConfig:
 class AutoLoggerConfig:
@@ -14,8 +12,14 @@ class AutoLoggerConfig:
     """
     """
 
 
     @staticmethod
     @staticmethod
-    def generate_config_for_module_name(module_name, training_log_path=None, log_level=DEFAULT_LOGGING_LEVEL, max_bytes=10485760, logs_dir_path=None,
-                                        handlers_list=None) -> dict:
+    def generate_config_for_module_name(
+        module_name,
+        training_log_path=None,
+        log_level=DEFAULT_LOGGING_LEVEL,
+        max_bytes=10485760,
+        logs_dir_path=None,
+        handlers_list=None,
+    ) -> dict:
         """
         """
         generate_config_for_module_name - Returns a Config Dict For Logging
         generate_config_for_module_name - Returns a Config Dict For Logging
             :param module_name:     The Python Module name to create auto_logging for
             :param module_name:     The Python Module name to create auto_logging for
@@ -33,41 +37,40 @@ class AutoLoggerConfig:
         """
         """
 
 
         # LOADING THE ORIGINAL ROOT CONFIG FILE
         # LOADING THE ORIGINAL ROOT CONFIG FILE
-        conf_file_name = 'auto_logging_conf.json'
-        conf_file_path = os.path.join(pkg_resources.resource_filename('super_gradients', '/common/auto_logging/'),
-                                      conf_file_name)
+        conf_file_name = "auto_logging_conf.json"
+        conf_file_path = os.path.join(
+            pkg_resources.resource_filename("super_gradients", "/common/auto_logging/"), conf_file_name
+        )
 
 
-        with open(conf_file_path, 'r') as logging_configuration_file:
+        with open(conf_file_path, "r") as logging_configuration_file:
             config_dict = json.load(logging_configuration_file)
             config_dict = json.load(logging_configuration_file)
 
 
         # CREATING THE PATH TO THE "HOME" FOLDER WITH THE LOG FILE NAME
         # CREATING THE PATH TO THE "HOME" FOLDER WITH THE LOG FILE NAME
         if not logs_dir_path:
         if not logs_dir_path:
-            log_file_name = module_name + '.log'
+            log_file_name = module_name + ".log"
             user_dir = os.path.expanduser(r"~")
             user_dir = os.path.expanduser(r"~")
-            logs_dir_path = os.path.join(user_dir, 'sg_logs')
+            logs_dir_path = os.path.join(user_dir, "sg_logs")
 
 
         if not os.path.exists(logs_dir_path):
         if not os.path.exists(logs_dir_path):
             try:
             try:
                 os.mkdir(logs_dir_path)
                 os.mkdir(logs_dir_path)
             except Exception as ex:
             except Exception as ex:
-                print('[WARNING] - sg_logs folder was not found and couldn\'t be created from the code - '
-                      'All of the Log output will be sent to Console!' + str(ex))
+                print(
+                    "[WARNING] - sg_logs folder was not found and couldn't be created from the code - "
+                    "All of the Log output will be sent to Console!" + str(ex)
+                )
 
 
             # HANDLERS LIST IS EMPTY AS CONSOLE IS ONLY ROOT HANDLER BECAUSE MODULE LOGGERS PROPAGATE THEIR LOGS UP.
             # HANDLERS LIST IS EMPTY AS CONSOLE IS ONLY ROOT HANDLER BECAUSE MODULE LOGGERS PROPAGATE THEIR LOGS UP.
             handlers_list = []
             handlers_list = []
-            logger = {
-                "level": log_level,
-                "handlers": handlers_list,
-                "propagate": True
-            }
-            config_dict['loggers'][module_name] = logger
+            logger = {"level": log_level, "handlers": handlers_list, "propagate": True}
+            config_dict["loggers"][module_name] = logger
 
 
             return config_dict
             return config_dict
 
 
         log_file_path = os.path.join(logs_dir_path, log_file_name)
         log_file_path = os.path.join(logs_dir_path, log_file_name)
 
 
         # THE ENTRIES TO ADD TO THE ORIGINAL CONFIGURATION
         # THE ENTRIES TO ADD TO THE ORIGINAL CONFIGURATION
-        handler_name = module_name + '_file_handler'
+        handler_name = module_name + "_file_handler"
         file_handler = {
         file_handler = {
             "class": "logging.handlers.RotatingFileHandler",
             "class": "logging.handlers.RotatingFileHandler",
             "level": log_level,
             "level": log_level,
@@ -75,33 +78,19 @@ class AutoLoggerConfig:
             "filename": log_file_path,
             "filename": log_file_path,
             "maxBytes": max_bytes,
             "maxBytes": max_bytes,
             "backupCount": 20,
             "backupCount": 20,
-            "encoding": "utf8"
+            "encoding": "utf8",
         }
         }
 
 
         # CREATING ONLY A FILE HANDLER, CONSOLE IS ONLY ROOT HANDLER AS MODULE LOGGERS PROPAGATE THEIR LOGS UP.
         # CREATING ONLY A FILE HANDLER, CONSOLE IS ONLY ROOT HANDLER AS MODULE LOGGERS PROPAGATE THEIR LOGS UP.
         if handlers_list is None or handlers_list.empty():
         if handlers_list is None or handlers_list.empty():
             handlers_list = [handler_name]
             handlers_list = [handler_name]
 
 
-        logger = {
-            "level": log_level,
-            "handlers": handlers_list,
-            "propagate": True
-        }
+        logger = {"level": log_level, "handlers": handlers_list, "propagate": True}
 
 
         # ADDING THE NEW LOGGER ENTRIES TO THE CONFIG DICT
         # ADDING THE NEW LOGGER ENTRIES TO THE CONFIG DICT
-        config_dict['handlers'][handler_name] = file_handler
-        config_dict['loggers'][module_name] = logger
-        config_dict['root']['handlers'].append(handler_name)
-
-        if DONT_USE_ELASTICSEARCH_LOGGER:
-            return config_dict
-
-        # Creating a ElasticSearch handler
-        elastic_handler, elastic_handler_name = AutoLoggerConfig.configure_elasticsearch_handler(config_dict,
-                                                                                                 module_name)
-        if elastic_handler and elastic_handler_name:
-            handlers_list.append(elastic_handler_name)
-            config_dict['handlers'][elastic_handler_name] = elastic_handler
+        config_dict["handlers"][handler_name] = file_handler
+        config_dict["loggers"][module_name] = logger
+        config_dict["root"]["handlers"].append(handler_name)
 
 
         if training_log_path:
         if training_log_path:
             training_file_handler = {
             training_file_handler = {
@@ -111,64 +100,11 @@ class AutoLoggerConfig:
                 "filename": training_log_path,
                 "filename": training_log_path,
                 "maxBytes": max_bytes,
                 "maxBytes": max_bytes,
                 "backupCount": 20,
                 "backupCount": 20,
-                "encoding": "utf8"
+                "encoding": "utf8",
             }
             }
 
 
             # ALL OF DECI_TRAINER MODULES LOGGERS PROPAGATE UP TO THE ROOT SO THE ADD TRAIN FILE HANDLER FOR THE ROOT.
             # ALL OF DECI_TRAINER MODULES LOGGERS PROPAGATE UP TO THE ROOT SO THE ADD TRAIN FILE HANDLER FOR THE ROOT.
-            config_dict['handlers']["training"] = training_file_handler
-            config_dict['root']['handlers'].append("training")
+            config_dict["handlers"]["training"] = training_file_handler
+            config_dict["root"]["handlers"].append("training")
 
 
         return config_dict
         return config_dict
-
-    @staticmethod
-    def configure_elasticsearch_handler(config_dict: dict, module_name: str):
-        """
-        Configures the ElasticSearch loggeing handler through an matching library.
-        """
-        # Getting the elasticsearch secrets
-        if not AWS_ENV_NAME:
-            return None, None
-
-        try:
-            elastic_secrets = AWSSecretsManagerConnector. \
-                get_secret_values_dict_for_secret_key_properties(env=AWS_ENV_NAME,
-                                                                 secret_name='elasticLogging',
-                                                                 secret_key='ELASTIC')
-
-            # logging_user_name = elastic_secrets['ELASTIC.USERNAME']
-            # logging_user_password = elastic_secrets['ELASTIC.PASSWORD']
-            elastic_host = elastic_secrets['ELASTIC.HOST']
-            elastic_port = int(elastic_secrets['ELASTIC.PORT'])
-            elastic_index_name = elastic_secrets['ELASTIC.DEFAULT_INDEX_NAME']
-            flush_frequency = int(elastic_secrets['ELASTIC.FLUSH_FREQUENCY_SECONDS'])
-
-            # We import here because not everybody may want elasticsearch handler, thus doesn't need CMRESHandler library.
-            from cmreslogging.handlers import CMRESHandler
-            config_dict['handlers']['elasticsearch'] = {
-                "level": "DEBUG",
-                "class": "cmreslogging.handlers.CMRESHandler",
-                "hosts": [
-                    {
-                        "host": elastic_host,
-                        "port": elastic_port
-                    }
-                ],
-                "es_index_name": elastic_index_name,
-                "es_additional_fields": {
-                    "App": "Deci",
-                    "Environment": AWS_ENV_NAME
-                },
-                "auth_type": CMRESHandler.AuthType.NO_AUTH,
-                # "auth_details": [
-                #     logging_user_name,
-                #     logging_user_password
-                # ],
-                "use_ssl": True,
-                "flush_frequency_in_sec": flush_frequency
-            }
-            elastic_handler = config_dict['handlers']['elasticsearch']
-            elastic_handler_name = module_name + '_elastic_handler'
-            return elastic_handler, elastic_handler_name
-        except Exception as e:
-            print(f'Failed to get the elasticsearch logging secrets: {e}')
-            return None, None
Discard
@@ -1,26 +1,25 @@
 import logging
 import logging
 from os import environ
 from os import environ
 
 
-AWS_ENV_NAME = environ.get('ENVIRONMENT_NAME')
+AWS_ENV_NAME = environ.get("ENVIRONMENT_NAME")
 
 
-AWS_ENVIRONMENTS = ['development', 'staging', 'production']
+AWS_ENVIRONMENTS = ["development", "staging", "production"]
 if AWS_ENV_NAME not in AWS_ENVIRONMENTS:
 if AWS_ENV_NAME not in AWS_ENVIRONMENTS:
     if AWS_ENV_NAME is None:
     if AWS_ENV_NAME is None:
         if AWS_ENV_NAME not in AWS_ENVIRONMENTS:
         if AWS_ENV_NAME not in AWS_ENVIRONMENTS:
             print(
             print(
-                f'You did not mention an AWS environment.'
-                f'You can set the environment variable ENVIRONMENT_NAME with one of the values: {",".join(AWS_ENVIRONMENTS)}')
+                f"You did not mention an AWS environment."
+                f'You can set the environment variable ENVIRONMENT_NAME with one of the values: {",".join(AWS_ENVIRONMENTS)}'
+            )
         else:
         else:
             print(
             print(
-                f'Bad AWS environment name: {AWS_ENV_NAME}. Please set an environment variable named ENVIRONMENT_NAME with one of the values: {",".join(AWS_ENVIRONMENTS)}')
-
-# If this flag exists, ElasticSerach handler in every ILogger class (auto-logger) will be disabled.
-# For standalone super_gradients.infra clients.
-DONT_USE_ELASTICSEARCH_LOGGER = environ.get('DONT_USE_ELASTICSEARCH_LOGGER') is not None
+                f'Bad AWS environment name: {AWS_ENV_NAME}. Please set an environment variable named ENVIRONMENT_NAME with one of the values: {",".join(AWS_ENVIRONMENTS)}'
+            )
 
 
 # Controlling the default logging level via environment variable
 # Controlling the default logging level via environment variable
-DEFAULT_LOGGING_LEVEL = environ.get('LOG_LEVEL', 'INFO').upper()
+DEFAULT_LOGGING_LEVEL = environ.get("LOG_LEVEL", "INFO").upper()
 logging.basicConfig(
 logging.basicConfig(
-    level=DEFAULT_LOGGING_LEVEL)  # Set the default level for all libraries - including 3rd party packages
+    level=DEFAULT_LOGGING_LEVEL
+)  # Set the default level for all libraries - including 3rd party packages
 
 
 DDP_LOCAL_RANK = -1
 DDP_LOCAL_RANK = -1
Discard