Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
M
Mail-services
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Rakesh Dara
Mail-services
Commits
fa720296
Commit
fa720296
authored
May 12, 2020
by
Rakesh Dara
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Upload New File
parent
cead76ee
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
393 additions
and
0 deletions
+393
-0
glens_client_UI_mail_services/MailServiceB64.py
+393
-0
No files found.
glens_client_UI_mail_services/MailServiceB64.py
0 → 100644
View file @
fa720296
"""
mail service to send average reports and windrose
"""
# -----------------Start of Import statements------------------------ #
import
json
import
datetime
import
MySQLdb
import
requests
import
base64
import
os
import
ast
import
time
from
Logging_Mailer
import
logger
from
ConfigParser
import
SafeConfigParser
from
dateutil
import
parser
import
shutil
# - *- coding: utf- 8 - *-
import
sys
reload
(
sys
)
sys
.
setdefaultencoding
(
"ISO-8859-1"
)
# -----------------end of Import statements------------------------ #
parser_conf
=
SafeConfigParser
()
parser_conf
.
read
(
"mail.conf"
)
mailIds
=
parser_conf
.
get
(
"settings"
,
"recipient"
)
reportName
=
parser_conf
.
get
(
"settings"
,
"reportName"
)
windSpeed
=
parser_conf
.
get
(
"settings"
,
"windSpeed"
)
reportDays
=
parser_conf
.
get
(
"settings"
,
"reportDays"
)
reportType
=
parser_conf
.
get
(
"settings"
,
"reportType"
)
customTimeRange
=
parser_conf
.
get
(
"settings"
,
"customTimeRange"
)
mail_flag
=
parser_conf
.
get
(
"settings"
,
"mailSender"
)
windDirection
=
parser_conf
.
get
(
"settings"
,
"windDirection"
)
UIReportLocation
=
parser_conf
.
get
(
"settings"
,
"UIReportLocation"
)
WindroseReportLocation
=
parser_conf
.
get
(
"settings"
,
"WindroseReportLocation"
)
mailServer
=
parser_conf
.
get
(
"settings"
,
"mailServer"
)
stationName
=
parser_conf
.
get
(
"settings"
,
"stationName"
)
monitoring_station
=
str
(
parser_conf
.
get
(
"settings"
,
"monitoringStation"
))
.
split
(
","
)
site_id
=
parser_conf
.
get
(
"settings"
,
"siteId"
)
site_name
=
parser_conf
.
get
(
"settings"
,
"siteName"
)
ReportArchivePath
=
parser_conf
.
get
(
"settings"
,
"reportArchive"
)
delete_days
=
parser_conf
.
get
(
"settings"
,
"data_archive_day"
)
excel_report
=
parser_conf
.
get
(
"settings"
,
"excelReport"
)
pdf_report
=
parser_conf
.
get
(
"settings"
,
"pdfReport"
)
try
:
config_server
=
parser_conf
.
get
(
"settings"
,
"configServer"
)
pdf_dump_url
=
parser_conf
.
get
(
"settings"
,
"pdfDumpURL"
)
csv_dump_url
=
parser_conf
.
get
(
"settings"
,
"csvDumpURL"
)
averageReportService
=
parser_conf
.
get
(
"settings"
,
"averageReportServiceUrl"
)
except
Exception
as
e
:
logger
.
exception
(
"taking config server as cpcb"
+
str
(
e
.
message
))
config_server
=
"cpcb"
pdf_dump_url
=
"http://localhost:7070/kl/GLensClient/PdfDump"
csv_dump_url
=
"http://localhost:7070/kl/GLensClient/CsvDump"
averageReportService
=
"http://localhost:7070/kl/GLensClient/requestAverageReport"
generateWindRose
=
int
(
parser_conf
.
get
(
"settings"
,
"generate_windRose"
))
averageInterval
=
parser_conf
.
get
(
"settings"
,
"averageInterval"
)
windRoseLocation
=
parser_conf
.
get
(
"settings"
,
"WindroseReportLocation"
)
windRoseServiceUrl
=
parser_conf
.
get
(
"settings"
,
"windRoseServiceUrl"
)
customTime
=
parser_conf
.
get
(
"settings"
,
"customTime"
)
global
from_date
,
to_date
def
init_db
():
"""
Initializer
"""
global
db
db
=
None
db
=
MySQLdb
.
connect
(
"127.0.0.1"
,
"root"
,
"root"
,
"db_glensclient"
)
def
del_db
():
"""
del_db
"""
global
db
del
db
def
generate_average_report
(
start_date
,
end_date
):
"""
This function is to generate_average_report
:param start_date:
:param end_date:
:return:
"""
try
:
global
db
body_json
=
{
"bodyContent"
:
dict
(
averageInterval
=
averageInterval
,
normalization
=
[
"fld_data"
],
dataQualityCode
=
"U"
,
userName
=
"User"
),
"mcert"
:
"False"
,
"userRole"
:
"Admin"
,
"userName"
:
"glensAdmin"
,
"siteId"
:
site_id
,
"station_list"
:
monitoring_station
,
"avgFlag"
:
"parameter"
}
cursor
=
db
.
cursor
()
query
=
""
counter
=
0
for
name
in
monitoring_station
:
if
counter
>
0
:
query
+=
" UNION "
query
+=
"select fld_monitoringtype,fld_monitoringunit,fld_analysername,fld_parametername,"
\
"fld_analyserid,fld_unit,fld_parameterid,fld_parameter,fld_siteId, fld_monitoringlabel "
\
"from tbl_metadata where "
\
"fld_monitoringunit = '"
+
name
+
"'and fld_siteId = '"
+
site_id
+
"'"
counter
+=
1
cursor
.
execute
(
query
)
result
=
cursor
.
fetchall
()
cursor
.
close
()
parameter_json
=
[]
for
every_item
in
result
:
temp_json
=
dict
()
temp_json
[
"monitoringType"
]
=
every_item
[
0
]
temp_json
[
"monitoringId"
]
=
every_item
[
1
]
temp_json
[
"analyserName"
]
=
every_item
[
2
]
temp_json
[
"parameterName"
]
=
every_item
[
3
]
temp_json
[
"analyserId"
]
=
every_item
[
4
]
try
:
temp_json
[
"unit"
]
=
every_item
[
5
]
temp_json
[
"displayUnit"
]
=
every_item
[
5
]
except
Exception
as
er
:
logger
.
exception
(
er
.
message
)
temp_json
[
"unit"
]
=
"NA"
temp_json
[
"displayUnit"
]
=
"NA"
temp_json
[
"parameterId"
]
=
every_item
[
6
]
temp_json
[
"rowId"
]
=
every_item
[
7
]
temp_json
[
"siteId"
]
=
every_item
[
8
]
temp_json
[
"monitoringLabel"
]
=
every_item
[
9
]
parameter_json
.
append
(
temp_json
)
body_json
[
"bodyContent"
][
"selectedParams"
]
=
parameter_json
body_json
[
"bodyContent"
][
"fromDate"
]
=
start_date
body_json
[
"bodyContent"
][
"toDate"
]
=
end_date
body_json
[
"bodyContent"
][
"siteId"
]
=
site_id
logger
.
debug
(
"Json being passed to requestAverageReport: "
)
logger
.
debug
(
json
.
dumps
(
body_json
))
average_report_json
=
base64
.
b64encode
(
json
.
dumps
(
body_json
))
# average_report_json = body_json
try
:
response
=
requests
.
post
(
averageReportService
,
json
=
average_report_json
)
logger
.
debug
(
"response from requestAverageReport"
)
logger
.
debug
(
response
.
text
)
json_string
=
base64
.
b64decode
(
response
.
text
)
response_json
=
json
.
loads
(
json_string
)
final_string
=
""
for
value
in
response_json
[
"headerValues"
]:
try
:
final_string
=
final_string
+
","
+
str
(
value
.
replace
(
"
\n
"
,
"-"
)
.
replace
(
"
\xb5
g/m3"
,
"ug/m3"
))
except
Exception
as
er
:
logger
.
exception
(
er
.
message
)
final_string
=
final_string
[
1
:]
for
value
in
response_json
[
"data"
]:
counter
=
0
for
temp_val
in
value
:
if
counter
==
0
:
final_string
=
final_string
+
"
\n
"
+
temp_val
+
","
else
:
final_string
=
final_string
+
str
(
temp_val
[
0
])
+
","
counter
=
counter
+
1
final_string
=
final_string
.
replace
(
",
\n
"
,
"
\n
"
)
final_string
=
final_string
[:
-
1
]
+
"
\n
"
final_json
=
dict
()
final_json
[
"dataExcel"
]
=
ast
.
literal_eval
(
json
.
dumps
(
final_string
))
final_json
[
"headerContent"
]
=
{
"ReportName"
:
str
(
site_name
),
"ReportType"
:
"Average Report"
,
"startTime"
:
start_date
,
"endTime"
:
end_date
,
"DataQualityCode"
:
"U"
}
final_json
[
"fileName"
]
=
"Average Report"
final_json
[
"userRole"
]
=
"Admin"
final_json
[
"userName"
]
=
"glensAdmin"
final_json
[
"siteId"
]
=
site_id
mcert_advisory
=
response_json
[
"mcertAdvisory"
]
final_json
[
"mcertAdvisory"
]
=
ast
.
literal_eval
(
json
.
dumps
(
mcert_advisory
))
logger
.
debug
(
"PDF Dump_json"
)
logger
.
debug
(
final_json
)
pdf_request_json
=
base64
.
b64encode
(
json
.
dumps
(
final_json
))
# pdf_request_json = final_json
pdf_response
=
{}
# if pdf_report == "1":
# print " in pdf"
# pdf_response = requests.post(pdf_dump_url, json=pdf_request_json)
# pdf_response = json.loads(pdf_response.text)
if
excel_report
==
"1"
:
pdf_response
=
requests
.
post
(
csv_dump_url
,
json
=
pdf_request_json
)
json_string
=
base64
.
b64decode
(
pdf_response
.
text
)
pdf_response
=
json
.
loads
(
json_string
)
# pdf_response = json.loads(pdf_response.text)
file_name
=
""
try
:
file_name
=
pdf_response
[
"name"
]
except
Exception
as
es
:
logger
.
exception
(
str
(
es
.
message
))
download_url
=
"http://localhost:7070/kl/GLensClient/download?name="
+
str
(
file_name
)
requests
.
get
(
download_url
)
logger
.
debug
(
"FileName: "
+
str
(
file_name
))
logger
.
debug
(
response_json
)
return
file_name
except
Exception
as
er
:
logger
.
debug
(
"Didn't get the file name"
)
logger
.
exception
(
er
.
message
)
return
None
except
Exception
as
es
:
logger
.
exception
(
es
.
message
)
def
generate_windrose
(
start_date
,
end_date
):
"""
This function is to generate_windRose
:param start_date:
:param end_date:
:return:
"""
try
:
request_json
=
dict
()
request_json
[
"fromDate"
]
=
start_date
request_json
[
"toDate"
]
=
end_date
request_json
[
"ws_parameter"
]
=
windSpeed
request_json
[
"wd_parameter"
]
=
windDirection
request_json
[
"WindroseLocation"
]
=
windRoseLocation
request_json
[
"userName"
]
=
""
request_json
[
"dataQualityCode"
]
=
"U"
request_json
[
"siteId"
]
=
site_id
request_json
[
"mcert"
]
=
"False"
request_json
[
"chart_type"
]
=
"windrose"
response
=
requests
.
post
(
windRoseServiceUrl
,
json
=
request_json
)
logger
.
debug
(
response
)
path
=
response
.
text
[
1
:
-
1
]
+
"
\\
windrose.png"
logger
.
debug
(
path
)
return
path
except
Exception
as
es
:
logger
.
exception
(
es
.
message
)
def
send_mail
(
data_json
):
"""
This function is to send_mail
:param data_json:
:return:
"""
logger
.
debug
(
"Sending mail"
)
headers
=
{
'Content-type'
:
'application/json'
}
response
=
requests
.
post
(
mailServer
,
data
=
json
.
dumps
(
data_json
),
headers
=
headers
)
logger
.
debug
(
response
)
logger
.
debug
(
"Status Code"
+
str
(
response
.
status_code
))
while
True
:
if
int
(
response
.
status_code
)
==
200
:
try
:
logger
.
debug
(
"success"
)
logger
.
debug
(
str
(
response
.
text
))
except
Exception
as
er
:
logger
.
exception
(
er
)
logger
.
debug
(
"File uploaded Successfully to server"
)
break
else
:
logger
.
debug
(
"File upload to server failed"
)
time
.
sleep
(
300
)
def
generate
(
start_date
,
end_date
):
"""
This function is call sub functions
:param start_date:
:param end_date:
:return:
"""
init_db
()
data_json
=
dict
()
data_json
[
"fileObject"
]
=
[]
logger
.
debug
(
"Generating Average Report"
)
report_file_name
=
generate_average_report
(
start_date
,
end_date
)
if
generateWindRose
==
1
:
try
:
logger
.
debug
(
"Generating WindRose"
)
wind_rose_file
=
generate_windrose
(
start_date
,
end_date
)
with
open
(
wind_rose_file
,
"rb"
)
as
f
:
temp_bytes
=
f
.
read
()
encoded_wind_rose
=
base64
.
b64encode
(
temp_bytes
)
data_json
[
"fileObject"
]
.
append
({
"fileName"
:
str
(
os
.
path
.
basename
(
wind_rose_file
)),
"data"
:
encoded_wind_rose
})
except
Exception
as
er
:
logger
.
exception
(
er
.
message
)
if
report_file_name
is
not
None
and
report_file_name
!=
""
:
encoded_report
=
""
try
:
open_pdf_location
=
UIReportLocation
+
report_file_name
with
open
(
open_pdf_location
,
"rb"
)
as
f
:
temp_bytes
=
f
.
read
()
encoded_report
=
base64
.
b64encode
(
temp_bytes
)
except
Exception
as
er
:
logger
.
exception
(
str
(
er
.
message
))
if
mail_flag
==
"1"
:
try
:
data_json
[
"fileObject"
]
.
append
(
{
"fileName"
:
str
(
os
.
path
.
basename
(
report_file_name
)),
"data"
:
encoded_report
})
data_json
[
"siteId"
]
=
site_id
data_json
[
"configServer"
]
=
config_server
data_json
[
"category"
]
=
reportName
+
" "
+
stationName
+
" "
+
str
(
datetime
.
datetime
.
now
()
.
date
())
data_json
[
"description"
]
=
"Please find the reports attached with this mail. The report is from "
+
str
(
start_date
)
+
" to "
+
str
(
end_date
)
+
"."
data_json
[
"name"
]
=
"Sir"
data_json
[
"senderName"
]
=
"GLens"
data_json
[
"email_id"
]
=
mailIds
data_json
[
"fileCount"
]
=
1
send_mail
(
data_json
)
except
Exception
as
er
:
logger
.
exception
(
str
(
er
.
message
))
del_db
()
else
:
logger
.
debug
(
"No File Created."
)
def
move_to_archive
():
"""
This function is to move the files to ArchiveReport
"""
try
:
pdf_files
=
[]
csv_files
=
[]
for
temp_file_name
in
os
.
listdir
(
UIReportLocation
):
if
".pdf"
in
temp_file_name
:
pdf_files
.
append
(
temp_file_name
)
elif
".csv"
in
temp_file_name
:
csv_files
.
append
(
temp_file_name
)
else
:
logger
.
debug
(
"Unwanted file type: "
+
str
(
temp_file_name
))
pdf_file_list
=
[]
csv_file_list
=
[]
now
=
datetime
.
datetime
.
now
()
delete_minutes
=
int
(
delete_days
)
*
1440
delete_date
=
datetime
.
timedelta
(
minutes
=
int
(
delete_minutes
))
delete_date
=
now
-
delete_date
for
archive_pdf
in
pdf_files
:
temp_pdf_file
=
archive_pdf
.
split
(
"_Average_"
)[
1
]
.
split
(
".pdf"
)[
0
]
pdf_file_object
=
parser
.
parse
(
temp_pdf_file
)
if
pdf_file_object
<
delete_date
:
pdf_file_list
.
append
(
UIReportLocation
+
archive_pdf
)
logger
.
debug
(
"PDF Files to be moved"
)
logger
.
debug
(
pdf_file_list
)
for
archive_csv
in
csv_files
:
temp_csv_file
=
archive_csv
.
split
(
"temp"
)[
1
]
.
split
(
".csv"
)[
0
]
csv_file_object
=
parser
.
parse
(
temp_csv_file
[
0
:
12
])
if
csv_file_object
<
delete_date
:
csv_file_list
.
append
(
UIReportLocation
+
archive_csv
)
logger
.
debug
(
"CSV Files to be moved"
)
logger
.
debug
(
csv_file_list
)
try
:
if
not
os
.
path
.
exists
(
ReportArchivePath
):
os
.
makedirs
(
ReportArchivePath
)
for
files
in
pdf_file_list
:
shutil
.
move
(
UIReportLocation
+
files
,
ReportArchivePath
)
for
files
in
csv_file_list
:
shutil
.
move
(
UIReportLocation
+
files
,
ReportArchivePath
)
except
Exception
as
er
:
logger
.
exception
(
"Error in moving the files to Archive folder"
+
str
(
er
))
except
Exception
as
er
:
logger
.
exception
(
"Error in moving the files to Archive folder"
+
str
(
er
))
if
__name__
==
"__main__"
:
try
:
if
int
(
reportDays
)
!=
0
:
if
str
(
reportType
)
.
lower
()
==
"moving"
:
from_date
=
str
((
datetime
.
datetime
.
now
()
-
datetime
.
timedelta
(
days
=
int
(
reportDays
))))
.
split
(
"."
)[
0
]
.
replace
(
"T"
,
" "
)
.
replace
(
"Z"
,
""
)
to_date
=
str
(
datetime
.
datetime
.
now
()
.
date
())
+
" "
+
str
(
datetime
.
datetime
.
now
()
.
time
())
.
split
(
"."
)[
0
]
if
str
(
reportType
)
.
lower
()
==
"fixed"
:
from_date
=
datetime
.
datetime
.
today
()
-
datetime
.
timedelta
(
days
=
int
(
reportDays
))
from_date
=
datetime
.
datetime
.
strftime
(
from_date
,
"
%
Y-
%
m-
%
d 00:00:00"
)
to_date
=
datetime
.
datetime
.
today
()
-
datetime
.
timedelta
(
hours
=
24
)
to_date
=
datetime
.
datetime
.
strftime
(
to_date
,
"
%
Y-
%
m-
%
d 23:59:59"
)
if
str
(
reportType
)
.
lower
()
==
"custom"
:
date
=
datetime
.
datetime
.
today
()
date_time_object
=
datetime
.
datetime
.
strptime
(
customTime
,
"
%
H
%
M
%
S"
)
custom_time
=
date_time_object
.
strftime
(
'
%
H:
%
M:
%
S'
)
to_date
=
datetime
.
datetime
.
strftime
(
date
,
"
%
Y-
%
m-
%
d"
+
" "
+
custom_time
)
from_date
=
datetime
.
datetime
.
today
()
-
datetime
.
timedelta
(
days
=
int
(
reportDays
))
from_date
=
datetime
.
datetime
.
strftime
(
from_date
,
"
%
Y-
%
m-
%
d"
+
" "
+
custom_time
)
if
int
(
reportDays
)
==
0
:
customTimeRange
=
customTimeRange
.
split
(
":"
)
from_date
=
customTimeRange
[
0
]
to_date
=
customTimeRange
[
1
]
date_time_object
=
datetime
.
datetime
.
strptime
(
from_date
,
"
%
Y
%
m
%
d
%
H
%
M
%
S"
)
from_date
=
date_time_object
.
strftime
(
'
%
Y-
%
m-
%
d
%
H:
%
M:
%
S'
)
date_time_object
=
datetime
.
datetime
.
strptime
(
to_date
,
"
%
Y
%
m
%
d
%
H
%
M
%
S"
)
to_date
=
date_time_object
.
strftime
(
'
%
Y-
%
m-
%
d
%
H:
%
M:
%
S'
)
generate
(
from_date
,
to_date
)
except
Exception
as
e
:
logger
.
exception
(
e
.
message
)
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment