Commit 96429f2a by ajil.k

added

parents
# Default ignored files
/shelf/
/workspace.xml
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (json_pandas)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/json_pandas.iml" filepath="$PROJECT_DIR$/.idea/json_pandas.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
# importing the necessary library
from scripts.services.pandas_json_operations import start_operations
start_operations()
\ No newline at end of file
[xlsx_file]
excel_path = scripts/utils/task3.xlsx
[json_file]
json_path = scripts/utils/json_file.json
pivot_path = scripts/utils/pivot.json
melt_path = scripts/utils/melted.json
merged_path = scripts/utils/merged.json
\ No newline at end of file
import configparser
config = configparser.ConfigParser()
config.read("conf/application.conf")
xlsx_path = config.get('xlsx_file', 'excel_path')
json_path = config.get('json_file', 'json_path')
melt_path = config.get('json_file', 'melt_path')
pivot_path = config.get('json_file', 'pivot_path')
merged_path = config.get('json_file', 'merged_path')
import pandas as pd
def create_dataframe(dataframe):
try:
# Takes first 10 rows in the Excel file
first_ten_data = dataframe.head(10)
# Takes last 10 rows in the Excel file
last_ten_data = dataframe.tail(10)
# Creating new dataframe
new_dataframe = pd.DataFrame()
new_dataframe = pd.concat([new_dataframe, first_ten_data], ignore_index=True)
new_dataframe = pd.concat([new_dataframe, last_ten_data], ignore_index=True)
return new_dataframe
except Exception as e:
print(("Error: ", e))
import pandas as pd
import json
from scripts.config.application_config import json_path
from scripts.core.handlers.create_new_dataframe import create_dataframe
def create_json_file(path):
try:
# Read the Excel file
dataframe = pd.read_excel(path)
dataframe['Timestamp'] = dataframe['Timestamp'].apply(lambda x: pd.to_datetime(x, unit='ms')
.strftime('%Y-%m-%d %H:%M:%S'))
new_dataframe = create_dataframe(dataframe)
# Setting "header" dictionary
keys = []
column_names = dataframe.columns.tolist()
for i in range(len(column_names)):
keys.append("column" + str(i))
header_dict = {'header': dict(zip(keys, column_names))}
# Setting 'body' dictionary
data = {"body": new_dataframe.to_dict(orient='records')}
json_dict = {**header_dict, **data}
print(json_dict)
# Create the JSON file
with open(json_path, "w") as json_file:
# Write data to the file
json.dump(json_dict, json_file, indent=4)
return True
except Exception as e:
print("Error: ", e)
return False
import json
import pandas as pd
from scripts.config.application_config import melt_path
def melt_data(path):
try:
# Read the Excel file
dataframe = pd.read_excel(path)
dataframe['Timestamp'] = dataframe['Timestamp'].apply(lambda x: pd.to_datetime(x, unit='ms')
.strftime('%Y-%m-%d %H:%M:%S'))
# melt the dataframe
melted_dataframe = pd.melt(dataframe, id_vars=['Timestamp'], value_vars=["current"])
data = {"melt": melted_dataframe.to_dict(orient='records')}
with open(melt_path, "w") as json_file:
# Write data to the file
json.dump(data, json_file, indent=4)
return True
except Exception as e:
print("Error:", e)
return False
# importing libraries
import json
import pandas as pd
from scripts.config.application_config import merged_path
def merge_dataframe(path):
try:
# Read the Excel file
dataframe = pd.read_excel(path)
# Converting Timestamp's datatype into string
dataframe['Timestamp'] = dataframe['Timestamp'].apply(lambda x: pd.to_datetime(x, unit='ms')
.strftime('%Y-%m-%d %H:%M:%S'))
# Perform the merge operation
# Creates 2 Dataframes
first_dataframe = dataframe[['Timestamp', 'kW']]
second_dataframe = dataframe[['Timestamp', 'current']]
# Merging 2 Dataframes
merged_dataframe = pd.merge(first_dataframe, second_dataframe, on='Timestamp')
data = {"merged": merged_dataframe.to_dict(orient='records')}
with open(merged_path, "w") as json_file:
# Write data to the file
json.dump(data, json_file, indent=4)
return True
return False
except Exception as e:
print("Error:", e)
return False
# importing libraries
import json
import pandas as pd
from scripts.config.application_config import pivot_path
def pivot_data(path):
try:
# Read the Excel file
dataframe = pd.read_excel(path)
# Perform the pivot operation
pivot_table = dataframe.pivot_table(values=dataframe.head(10), index='Timestamp', columns=[])
data = {"pivot": pivot_table.to_dict(orient='records')}
with open(pivot_path, "w") as json_file:
# Write data to the file
json.dump(data, json_file, indent=4)
return True
return False
except Exception as e:
print("Error:", e)
return False
from scripts.core.handlers.json_file import create_json_file
from scripts.config.application_config import xlsx_path
from scripts.core.handlers.melt_datas import melt_data
from scripts.core.handlers.merge_dataframes import merge_dataframe
from scripts.core.handlers.pivot_function import pivot_data
def start_operations():
try:
print("---------------JSON Parsing---------------")
json_status = create_json_file(xlsx_path)
if json_status:
print("Data stored in json_file.json file")
print("-------------------PIVOT-------------------")
pivot_status = pivot_data(xlsx_path)
if pivot_status:
print("Pivoted data stored in pivot.json file")
print("-------------------MELT-------------------")
melt_status = melt_data(xlsx_path)
if melt_status:
print("Melted data stored in melted.json file")
print("-------------------Merge-------------------")
merge_status = merge_dataframe(xlsx_path)
if merge_status:
print("Merged data stored in merged.json file")
except Exception as e:
print("Error: ", e)
{
"header": {
"column0": "Timestamp",
"column1": "kWh",
"column2": "kVAh",
"column3": "kW",
"column4": "kVA",
"column5": "current"
},
"body": [
{
"Timestamp": "2019-12-01 21:00:00",
"kWh": 46.228215767638176,
"kVAh": 49.759336099588836,
"kW": 183.66897265625,
"kVA": 200.53249609375,
"current": 281.58292388916016
},
{
"Timestamp": "2019-12-01 21:15:00",
"kWh": 45.76348547717498,
"kVAh": 50.2282157676309,
"kW": 183.46255208333332,
"kVA": 200.56794270833333,
"current": 281.617431640625
},
{
"Timestamp": "2019-12-01 21:30:00",
"kWh": 45.76348547718226,
"kVAh": 50.49377593360987,
"kW": 184.88955078125,
"kVA": 201.79479687499997,
"current": 283.0400848388672
},
{
"Timestamp": "2019-12-01 21:45:00",
"kWh": 46.552716320351465,
"kVAh": 50.48294589788566,
"kW": 184.3258984375,
"kVA": 201.42360937499998,
"current": 282.44676971435547
},
{
"Timestamp": "2019-12-01 22:00:00",
"kWh": 46.00458310000249,
"kVAh": 50.13828877951164,
"kW": 182.37034895833335,
"kVA": 199.271421875,
"current": 279.42852783203125
},
{
"Timestamp": "2019-12-01 22:15:00",
"kWh": 46.110687022897764,
"kVAh": 50.63358778625843,
"kW": 184.18944140625,
"kVA": 201.03991015625,
"current": 281.9737777709961
},
{
"Timestamp": "2019-12-01 22:30:00",
"kWh": 46.47655815858161,
"kVAh": 50.83407914363488,
"kW": 186.40050520833333,
"kVA": 203.86802604166667,
"current": 285.9699300130208
},
{
"Timestamp": "2019-12-01 22:45:00",
"kWh": 46.296574167587096,
"kVAh": 50.86875390756177,
"kW": 185.91884374999998,
"kVA": 203.10286718749998,
"current": 285.10118103027344
},
{
"Timestamp": "2019-12-01 23:00:00",
"kWh": 46.59622562891309,
"kVAh": 51.31205402872001,
"kW": 185.94254687499998,
"kVA": 203.36761458333334,
"current": 285.23462931315106
},
{
"Timestamp": "2019-12-01 23:15:00",
"kWh": 46.76348547718226,
"kVAh": 50.514522821576975,
"kW": 185.391484375,
"kVA": 202.44480468749998,
"current": 283.9910583496094
},
{
"Timestamp": "2019-12-02 18:30:00",
"kWh": 46.56323514282849,
"kVAh": 50.29440005487413,
"kW": 184.82915104166668,
"kVA": 202.09330729166666,
"current": 283.2255350748698
},
{
"Timestamp": "2019-12-02 18:45:00",
"kWh": 45.987603305780794,
"kVAh": 50.71900826445926,
"kW": 184.39455078125002,
"kVA": 201.15467187500002,
"current": 281.9291687011719
},
{
"Timestamp": "2019-12-02 19:00:00",
"kWh": 46.71440921193425,
"kVAh": 50.687471904282575,
"kW": 185.46109374999997,
"kVA": 202.85015625,
"current": 284.17213439941406
},
{
"Timestamp": "2019-12-02 19:15:00",
"kWh": 45.83848677928472,
"kVAh": 50.32816541953798,
"kW": 184.31598958333333,
"kVA": 201.36305208333332,
"current": 282.21966552734375
},
{
"Timestamp": "2019-12-02 19:30:00",
"kWh": 46.59999657076696,
"kVAh": 50.61659408114065,
"kW": 184.44642578124999,
"kVA": 201.67296484375,
"current": 282.61204528808594
},
{
"Timestamp": "2019-12-02 19:45:00",
"kWh": 46.07635197695345,
"kVAh": 50.9892150474916,
"kW": 182.8969296875,
"kVA": 199.87789062499996,
"current": 280.1493377685547
},
{
"Timestamp": "2019-12-02 20:00:00",
"kWh": 45.95020746887894,
"kVAh": 50.03734439834079,
"kW": 182.440359375,
"kVA": 199.39240234375,
"current": 279.73744201660156
},
{
"Timestamp": "2019-12-02 20:15:00",
"kWh": 46.498233942598745,
"kVAh": 50.41558931449981,
"kW": 184.44230729166668,
"kVA": 201.8952552083333,
"current": 283.39756266276044
},
{
"Timestamp": "2019-12-02 20:30:00",
"kWh": 45.89180755117559,
"kVAh": 50.27735674359428,
"kW": 183.25395312499998,
"kVA": 200.42523828125,
"current": 281.13245391845703
},
{
"Timestamp": "2019-12-02 20:45:00",
"kWh": 41.18257261411054,
"kVAh": 44.87966804979078,
"kW": 183.2521640625,
"kVA": 200.4773046875,
"current": 281.26131439208984
}
]
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment