Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
K
kairosdb_task
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ajil.k
kairosdb_task
Commits
fcda9046
Commit
fcda9046
authored
Feb 08, 2023
by
ajil.k
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
updated
parent
a5cc28e9
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
165 additions
and
26 deletions
+165
-26
.env
+9
-4
script/constants/app_config.py
+5
-0
script/core/handlers/convert_time.py
+1
-1
script/core/handlers/generate_data.py
+21
-0
script/core/handlers/send_data.py
+83
-13
script/logging/logger.py
+27
-0
script/services/app_call.py
+19
-8
No files found.
.env
View file @
fcda9046
FROM_DATE=202
3
-01-01 00:00:00.000
FROM_DATE=202
2
-01-01 00:00:00.000
TO_DATE=2023-02-0
7
00:00:00.000
TO_DATE=2023-02-0
8
00:00:00.000
URL=http://192.168.0.220:8080/api/v1/datapoints/query
URL=http://192.168.0.220:8080/api/v1/datapoints/query
\ No newline at end of file
MONGO_URI=192.168.0.220:2717
PASSWORD=iLensDevMongo783
USER=admin
HIERARCHY_ID=site_107
PROJECT_ID=project_102
script/constants/app_config.py
View file @
fcda9046
...
@@ -7,3 +7,8 @@ from_date = os.getenv("FROM_DATE")
...
@@ -7,3 +7,8 @@ from_date = os.getenv("FROM_DATE")
to_date
=
os
.
getenv
(
"TO_DATE"
)
to_date
=
os
.
getenv
(
"TO_DATE"
)
url
=
os
.
getenv
(
"URL"
)
url
=
os
.
getenv
(
"URL"
)
mongodb_uri
=
os
.
getenv
(
"MONGO_URI"
)
project_id
=
os
.
getenv
(
"PROJECT_ID"
)
hierarchy_id
=
os
.
getenv
(
"HIERARCHY_ID"
)
username
=
os
.
getenv
(
"USER"
)
password
=
os
.
getenv
(
"PASSWORD"
)
script/core/handlers/convert_time.py
View file @
fcda9046
...
@@ -15,6 +15,6 @@ def convert_timestamp(timestamp):
...
@@ -15,6 +15,6 @@ def convert_timestamp(timestamp):
def
convert_epoch
(
epoch
):
def
convert_epoch
(
epoch
):
# Dividing by 1000 to ignore milliseconds
# Dividing by 1000 to ignore milliseconds
epoch
/=
1000
epoch
/=
1000
#
#
Format the timestamp
timestamp
=
datetime
.
fromtimestamp
(
epoch
)
.
strftime
(
'
%
Y-
%
m-
%
d
%
H:
%
M:
%
S'
)
timestamp
=
datetime
.
fromtimestamp
(
epoch
)
.
strftime
(
'
%
Y-
%
m-
%
d
%
H:
%
M:
%
S'
)
return
timestamp
return
timestamp
script/core/handlers/generate_data.py
0 → 100644
View file @
fcda9046
import
json
import
pandas
as
pd
from
script.core.handlers.convert_time
import
convert_epoch
def
generate_csv_data
(
tag_values
,
response_data
):
try
:
data
=
json
.
loads
(
response_data
)
# Extract values from te query
values
=
data
[
"queries"
][
0
][
"results"
][
0
][
"values"
]
converted_epochs
=
[
convert_epoch
(
data
[
0
])
for
data
in
values
]
value
=
[
data
[
1
]
for
data
in
values
]
for
i
in
range
(
len
(
tag_values
)):
tag_name
=
tag_values
[
i
]
dataframe
=
pd
.
DataFrame
({
'Timestamp'
:
converted_epochs
,
tag_name
:
value
})
print
(
dataframe
)
except
Exception
as
e
:
from
script.logging.logger
import
logger
logger
.
exception
(
e
)
print
(
"Error when generating data - "
,
e
)
script/core/handlers/send_data.py
View file @
fcda9046
from
pymongo
import
MongoClient
from
pymongo
import
MongoClient
from
script.constants.app_config
import
mongodb_uri
,
project_id
,
site_id
,
site_name
,
line_id
,
equipment_id
import
pandas
as
pd
from
script.constants.app_config
import
mongodb_uri
,
hierarchy_id
,
username
,
password
client
=
MongoClient
(
mongodb_uri
)
client
=
MongoClient
(
mongodb_uri
,
username
=
username
,
password
=
password
)
database
=
client
[
'ilens_configuration'
]
database
=
client
[
'ilens_configuration'
]
collection
=
database
[
'tag_hierarchy'
]
collection
=
database
[
'tag_hierarchy'
]
...
@@ -11,16 +14,83 @@ class SendData:
...
@@ -11,16 +14,83 @@ class SendData:
def
extract_data
():
def
extract_data
():
try
:
try
:
tag_hierarchy
=
collection
tag_hierarchy
=
collection
query
=
{
query
=
{}
project_id
:
1
,
hierarchies
=
hierarchy_id
.
split
(
'$'
)
site_id
:
1
,
for
each_hierarchy
in
hierarchies
:
site_name
:
1
,
if
"site_"
in
each_hierarchy
:
line_id
:
1
,
query
[
"site_id"
]
=
each_hierarchy
equipment_id
:
1
,
elif
"line_"
in
each_hierarchy
:
"_id"
:
0
query
[
"line_id"
]
=
each_hierarchy
}
elif
"equipment_"
in
each_hierarchy
:
documents
=
tag_hierarchy
.
find
({},
query
)
query
[
"equipment_id"
]
=
each_hierarchy
for
document
in
documents
:
# filter_dict = {"_id": 0, "site_id": 1, "line_id": 1, "equipment_id": 1,
print
(
document
)
# "site_name": 1, "line_name": 1, "equipment_name": 1, "id": 1, "tag_name": 1}
pipeline
=
[
{
"$match"
:
query
},
{
'$lookup'
:
{
'from'
:
'tags'
,
'localField'
:
'parameter_id'
,
'foreignField'
:
'id'
,
'as'
:
'tag_details'
}
},
{
'$unwind'
:
'$tag_details'
},
{
'$group'
:
{
'_id'
:
'$id'
,
'first_document'
:
{
'$first'
:
'$$ROOT'
}
}
},
{
'$replaceRoot'
:
{
'newRoot'
:
'$first_document'
}
},
{
'$project'
:
{
'site_id'
:
'$site_id'
,
'line_id'
:
'$line_id'
,
'equipment_id'
:
'$equipment_id'
,
'site_name'
:
'$site_name'
,
'dept_name'
:
'$dept_name'
,
'line_name'
:
'$line_name'
,
'equipment_name'
:
'$equipment_name'
,
'id'
:
'$id'
,
'tag_name'
:
'$tag_details.tag_name'
,
'_id'
:
0
}
}
]
tag_names
=
tag_hierarchy
.
aggregate
(
pipeline
)
data
=
list
(
tag_names
)
dataset
=
pd
.
DataFrame
(
data
)
return
dataset
except
Exception
as
e
:
print
(
e
)
@staticmethod
def
final_dict
(
dataframe
):
try
:
final_data
=
{}
for
index
,
row
in
dataframe
.
iterrows
():
hierarchy_name
=
""
if
row
[
"site_name"
]
is
not
None
:
hierarchy_name
=
row
[
"site_name"
]
if
row
[
"line_name"
]
!=
""
:
hierarchy_name
=
hierarchy_name
+
">"
+
row
[
"line_name"
]
if
row
[
"equipment_name"
]
!=
""
:
hierarchy_name
=
hierarchy_name
+
">"
+
row
[
"equipment_name"
]
if
row
[
"dept_name"
]
!=
""
:
hierarchy_name
=
hierarchy_name
+
">"
+
row
[
"dept_name"
]
if
row
[
"tag_name"
]
!=
""
:
hierarchy_name
=
hierarchy_name
+
":"
+
row
[
"tag_name"
]
new_dict
=
{
row
[
"id"
]:
hierarchy_name
}
final_data
.
update
(
new_dict
)
return
final_data
except
Exception
as
e
:
except
Exception
as
e
:
print
(
e
)
print
(
e
)
script/logging/logger.py
0 → 100644
View file @
fcda9046
import
logging
import
os
from
logging.handlers
import
RotatingFileHandler
def
get_logger
():
"""
Creates a rotating log
"""
__logger__
=
logging
.
getLogger
(
''
)
__logger__
.
setLevel
(
logging
.
INFO
)
log_formatter
=
'
%(asctime)
s -
%(levelname)-6
s -
%(message)
s'
time_format
=
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
file_path
=
"temp/"
file_name
=
"logs"
formatter
=
logging
.
Formatter
(
log_formatter
,
time_format
)
if
not
os
.
path
.
exists
(
file_path
):
os
.
makedirs
(
file_path
)
log_file
=
os
.
path
.
join
(
f
"{file_path}{file_name}.log"
)
temp_handler
=
RotatingFileHandler
(
log_file
,
maxBytes
=
1
)
temp_handler
.
setFormatter
(
formatter
)
__logger__
.
addHandler
(
temp_handler
)
return
__logger__
logger
=
get_logger
()
script/services/app_call.py
View file @
fcda9046
import
json
import
json
import
requests
import
requests
from
script.constants.app_config
import
from_date
,
to_date
,
url
from
script.constants.app_config
import
from_date
,
to_date
,
url
from
script.core.handlers.convert_time
import
convert_timestamp
,
convert_epoch
from
script.core.handlers.convert_time
import
convert_timestamp
from
script.core.handlers.generate_data
import
generate_csv_data
from
script.core.handlers.send_data
import
SendData
from
script.core.handlers.send_data
import
SendData
...
@@ -10,14 +11,26 @@ def start_service_on_kairosdb():
...
@@ -10,14 +11,26 @@ def start_service_on_kairosdb():
start_absolute
=
convert_timestamp
(
from_date
)
start_absolute
=
convert_timestamp
(
from_date
)
end_absolute
=
convert_timestamp
(
to_date
)
end_absolute
=
convert_timestamp
(
to_date
)
query_url
=
url
query_url
=
url
tags
=
[
"site_114$line_4901$equipment_6054$tag_61534"
]
mongo_obj
=
SendData
()
df
=
mongo_obj
.
extract_data
()
tag_value
=
mongo_obj
.
final_dict
(
df
)
dicts_key_list
=
list
(
tag_value
.
keys
())
query
=
{
query
=
{
"metrics"
:
[
"metrics"
:
[
{
{
"tags"
:
{
"tags"
:
{
"c3"
:
tags
"c3"
:
dicts_key_list
},
},
"name"
:
"ilens.live_data.raw"
,
"name"
:
"ilens.live_data.raw"
,
"group_by"
:
[
{
"name"
:
"tag"
,
"tags"
:
[
"c3"
]
}
],
"aggregators"
:
[
"aggregators"
:
[
{
{
"name"
:
"sum"
,
"name"
:
"sum"
,
...
@@ -37,11 +50,9 @@ def start_service_on_kairosdb():
...
@@ -37,11 +50,9 @@ def start_service_on_kairosdb():
"end_absolute"
:
end_absolute
"end_absolute"
:
end_absolute
}
}
data
=
json
.
dumps
(
query
)
query_
data
=
json
.
dumps
(
query
)
response
=
requests
.
post
(
query_url
,
data
=
data
)
response
=
requests
.
post
(
query_url
,
data
=
query_
data
)
print
(
response
.
text
)
print
(
response
.
text
)
data
=
json
.
dumps
(
response
.
text
)
#generate_csv_data(dicts_key_list, response.text)
timestamp
=
convert_epoch
(
1675693206000
)
print
(
timestamp
)
except
Exception
as
e
:
except
Exception
as
e
:
print
(
"Error -"
,
e
)
print
(
"Error -"
,
e
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment