Commit af5af7eb by arjun.b

logging file added

parent 9079c3c0
......@@ -3,4 +3,7 @@ import uvicorn
from scripts.config.application_config import port_no
if __name__ == "__main__":
uvicorn.run("scripts.services.main:app", port=int(port_no), reload=True)
try:
uvicorn.run("scripts.services.main:app", port=int(port_no), reload=True)
except Exception as e:
print(str(e))
from pymongo import MongoClient
import logging
from pymongo import MongoClient
from scripts.config.application_config import mongo_uri
from scripts.utils.mongo_utils import create_collection
......@@ -15,6 +16,7 @@ def db_connect():
print("mongo connect successfully")
return database
except Exception as e:
logging.error("database connection error")
print(str(e))
......
logger:
name: fastapi
level: DEBUG
handlers:
- type: RotatingFileHandler
max_bytes: 100000000
back_up_count: 5
- type: SocketHandler
host: localhost
port: 8000
- type: StreamHandler
name: fastapi crud
import logging
import os
# from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
import yaml
from logging.handlers import RotatingFileHandler
from scripts.config.application_config import file_path
# this method is to read the configuration from backup.conf
def read_configuration(file_name):
"""
:param file_name:
:return: all the configuration constants
"""
with open(file_name, 'r') as stream:
try:
return yaml.safe_load(stream)
except Exception as e:
print(f"Failed to load Configuration. Error: {e}")
config = read_configuration("scripts/logging/logger_conf.yml")
logging_config = config["logger"]
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger('')
__logger__.setLevel(logging_config["level"].upper())
__logger__.setLevel("ERROR")
log_formatter = '%(asctime)s - %(levelname)-6s - [%(threadName)5s:%(funcName)5s():%(lineno)s] - %(message)s'
time_format = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(log_formatter, time_format)
log_file = os.path.join(f"{file_path}ERROR.log")
temp_handler = RotatingFileHandler(log_file,
maxBytes=100000000,
backupCount=5)
temp_handler.setFormatter(formatter)
for each_handler in logging_config["handlers"]:
if each_handler["type"] in ["RotatingFileHandler"]:
if not os.path.exists(file_path):
os.makedirs(file_path)
log_file = os.path.join(f"{file_path}{logging_config['name']}.log")
temp_handler = RotatingFileHandler(log_file,
maxBytes=each_handler["max_bytes"],
backupCount=each_handler["back_up_count"])
temp_handler.setFormatter(formatter)
__logger__.addHandler(temp_handler)
__logger__.addHandler(temp_handler)
return __logger__
......
......@@ -6,6 +6,7 @@ from scripts.constants.end_points import EndPoints
from scripts.core.handlers.fastapi_handler import fastAPIHandler
from scripts.database.model import employee_insert, employee_update
import pandas as pd
from scripts.logging.logging import logger
app = FastAPI()
connect(db="employees", host="localhost", port=int(port_connect))
......@@ -24,7 +25,7 @@ async def upload_file(file: UploadFile):
fastAPIHandler.file_upload(file_content)
return {"data": "CSV data uploaded successfully"}
except Exception as e:
logging.error(f'upload file failed:{e}')
logger.error(f'upload file failed:{e}')
print(e)
......@@ -35,7 +36,7 @@ def send_data(emp: employee_insert):
fastAPIHandler.insert_data(emp)
return {"message": "new data has been inserted"}
except Exception as e:
logging.error(f'inserting a document failed {e}')
logger.error(f'inserting a document failed {e}')
# update data
......@@ -45,7 +46,7 @@ def update_data(doc_id: int, details: employee_update):
fastAPIHandler.update_data(doc_id, details)
return {"data": "data updated"}
except Exception as e:
logging.error(f'updating the data failed {e}')
logger.error(f'updating the data failed {e}')
# delete data
......@@ -55,4 +56,4 @@ def delete_data(doc_id: int):
fastAPIHandler.delete_data(doc_id)
return {"data": "data deleted"}
except Exception as e:
logging.error(f'deleting a document failed {e}')
logger.error(f'deleting a document failed {e}')
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment