Commit fcda9046 by ajil.k

updated

parent a5cc28e9
FROM_DATE=2023-01-01 00:00:00.000
TO_DATE=2023-02-07 00:00:00.000
FROM_DATE=2022-01-01 00:00:00.000
TO_DATE=2023-02-08 00:00:00.000
URL=http://192.168.0.220:8080/api/v1/datapoints/query
\ No newline at end of file
URL=http://192.168.0.220:8080/api/v1/datapoints/query
MONGO_URI=192.168.0.220:2717
PASSWORD=iLensDevMongo783
USER=admin
HIERARCHY_ID=site_107
PROJECT_ID=project_102
......@@ -7,3 +7,8 @@ from_date = os.getenv("FROM_DATE")
to_date = os.getenv("TO_DATE")
url = os.getenv("URL")
mongodb_uri = os.getenv("MONGO_URI")
project_id = os.getenv("PROJECT_ID")
hierarchy_id = os.getenv("HIERARCHY_ID")
username = os.getenv("USER")
password = os.getenv("PASSWORD")
......@@ -15,6 +15,6 @@ def convert_timestamp(timestamp):
def convert_epoch(epoch):
# Dividing by 1000 to ignore milliseconds
epoch /= 1000
#
# Format the timestamp
timestamp = datetime.fromtimestamp(epoch).strftime('%Y-%m-%d %H:%M:%S')
return timestamp
import json
import pandas as pd
from script.core.handlers.convert_time import convert_epoch
def generate_csv_data(tag_values, response_data):
try:
data = json.loads(response_data)
# Extract values from te query
values = data["queries"][0]["results"][0]["values"]
converted_epochs = [convert_epoch(data[0]) for data in values]
value = [data[1] for data in values]
for i in range(len(tag_values)):
tag_name = tag_values[i]
dataframe = pd.DataFrame({'Timestamp': converted_epochs, tag_name: value})
print(dataframe)
except Exception as e:
from script.logging.logger import logger
logger.exception(e)
print("Error when generating data - ", e)
from pymongo import MongoClient
from script.constants.app_config import mongodb_uri, project_id, site_id, site_name, line_id, equipment_id
import pandas as pd
from script.constants.app_config import mongodb_uri, hierarchy_id, username, password
client = MongoClient(mongodb_uri)
client = MongoClient(mongodb_uri,
username=username,
password=password)
database = client['ilens_configuration']
collection = database['tag_hierarchy']
......@@ -11,16 +14,83 @@ class SendData:
def extract_data():
try:
tag_hierarchy = collection
query = {
project_id: 1,
site_id: 1,
site_name: 1,
line_id: 1,
equipment_id: 1,
"_id": 0
}
documents = tag_hierarchy.find({}, query)
for document in documents:
print(document)
query = {}
hierarchies = hierarchy_id.split('$')
for each_hierarchy in hierarchies:
if "site_" in each_hierarchy:
query["site_id"] = each_hierarchy
elif "line_" in each_hierarchy:
query["line_id"] = each_hierarchy
elif "equipment_" in each_hierarchy:
query["equipment_id"] = each_hierarchy
# filter_dict = {"_id": 0, "site_id": 1, "line_id": 1, "equipment_id": 1,
# "site_name": 1, "line_name": 1, "equipment_name": 1, "id": 1, "tag_name": 1}
pipeline = [
{
"$match": query
},
{
'$lookup': {
'from': 'tags',
'localField': 'parameter_id',
'foreignField': 'id',
'as': 'tag_details'
}
}, {
'$unwind': '$tag_details'
}, {
'$group': {
'_id': '$id',
'first_document': {
'$first': '$$ROOT'
}
}
}, {
'$replaceRoot': {
'newRoot': '$first_document'
}
}, {
'$project': {
'site_id': '$site_id',
'line_id': '$line_id',
'equipment_id': '$equipment_id',
'site_name': '$site_name',
'dept_name': '$dept_name',
'line_name': '$line_name',
'equipment_name': '$equipment_name',
'id': '$id',
'tag_name': '$tag_details.tag_name',
'_id': 0
}
}
]
tag_names = tag_hierarchy.aggregate(pipeline)
data = list(tag_names)
dataset = pd.DataFrame(data)
return dataset
except Exception as e:
print(e)
@staticmethod
def final_dict(dataframe):
try:
final_data = {}
for index, row in dataframe.iterrows():
hierarchy_name = ""
if row["site_name"] is not None:
hierarchy_name = row["site_name"]
if row["line_name"] != "":
hierarchy_name = hierarchy_name + ">" + row["line_name"]
if row["equipment_name"] != "":
hierarchy_name = hierarchy_name + ">" + row["equipment_name"]
if row["dept_name"] != "":
hierarchy_name = hierarchy_name + ">" + row["dept_name"]
if row["tag_name"] != "":
hierarchy_name = hierarchy_name + ":" + row["tag_name"]
new_dict = {row["id"]: hierarchy_name}
final_data.update(new_dict)
return final_data
except Exception as e:
print(e)
import logging
import os
from logging.handlers import RotatingFileHandler
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger('')
__logger__.setLevel(logging.INFO)
log_formatter = '%(asctime)s - %(levelname)-6s - %(message)s'
time_format = "%Y-%m-%d %H:%M:%S"
file_path = "temp/"
file_name = "logs"
formatter = logging.Formatter(log_formatter, time_format)
if not os.path.exists(file_path):
os.makedirs(file_path)
log_file = os.path.join(f"{file_path}{file_name}.log")
temp_handler = RotatingFileHandler(log_file, maxBytes=1)
temp_handler.setFormatter(formatter)
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
import json
import requests
from script.constants.app_config import from_date, to_date, url
from script.core.handlers.convert_time import convert_timestamp, convert_epoch
from script.core.handlers.convert_time import convert_timestamp
from script.core.handlers.generate_data import generate_csv_data
from script.core.handlers.send_data import SendData
......@@ -10,14 +11,26 @@ def start_service_on_kairosdb():
start_absolute = convert_timestamp(from_date)
end_absolute = convert_timestamp(to_date)
query_url = url
tags = ["site_114$line_4901$equipment_6054$tag_61534"]
mongo_obj = SendData()
df = mongo_obj.extract_data()
tag_value = mongo_obj.final_dict(df)
dicts_key_list = list(tag_value.keys())
query = {
"metrics": [
{
"tags": {
"c3": tags
"c3": dicts_key_list
},
"name": "ilens.live_data.raw",
"group_by": [
{
"name": "tag",
"tags": [
"c3"
]
}
],
"aggregators": [
{
"name": "sum",
......@@ -37,11 +50,9 @@ def start_service_on_kairosdb():
"end_absolute": end_absolute
}
data = json.dumps(query)
response = requests.post(query_url, data=data)
query_data = json.dumps(query)
response = requests.post(query_url, data=query_data)
print(response.text)
data = json.dumps(response.text)
timestamp = convert_epoch(1675693206000)
print(timestamp)
#generate_csv_data(dicts_key_list, response.text)
except Exception as e:
print("Error -", e)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment