import pandas as pd
import numpy as np
import urllib.request
import urllib.parse
import psycopg2
import json
from datetime import date
from pandas.io.json import json_normalize
from sqlalchemy import create_engine
from sqlalchemy.engine import URL
drivername = 'postgresql'
# EDIT THIS LINE host = '<postgresSQLHost>'
port = '5432'
username = 'postgres'
# EDIT THIS LINE password = '<postgreSQLPassword>'
database = 'postgres'
DATABASE = {
'drivername': drivername,
'host': host,
'port': port,
'username': username,
'password': password,
'database': database
}
df_columns = df.columns.tolist()
df_columns = [c.replace('.','_') for c in df_columns]
df.columns = df_columns
df_columns
time = df["hourly_time"]
temperature = df["hourly_temperature_2m"]
relativehumidity = df["hourly_relativehumidity_2m"]
dewpoint = df["hourly_dewpoint_2m"]
apparent_temperature = df["hourly_apparent_temperature"]
cloudcover = df["hourly_cloudcover"]
rain = df["hourly_rain"]
pressure_msl = df["hourly_pressure_msl"]
time_conc = np.stack(time)
temperature_conc = np.stack(temperature)
relativehumidity_conc = np.stack(relativehumidity)
dewpoint_conc = np.stack(dewpoint)
apparent_temperature_conc = np.stack(apparent_temperature)
cloudcover_conc = np.stack(cloudcover)
rain_conc = np.stack(rain)
pressure_msl_conc = np.stack(pressure_msl)
time_transposed = pd.DataFrame(time_conc).transpose()
temperature_transposed = pd.DataFrame(temperature_conc).transpose()
relativehumidity_transposed = pd.DataFrame(relativehumidity_conc).transpose()
dewpoint_transposed = pd.DataFrame(dewpoint_conc).transpose()
apparent_temperature_transposed = pd.DataFrame(apparent_temperature_conc).transpose()
cloudcover_transposed = pd.DataFrame(cloudcover_conc).transpose()
rain_transposed = pd.DataFrame(rain_conc).transpose()
pressure_msl_transposed = pd.DataFrame(pressure_msl_conc).transpose()
time_transposed.columns = ['hourly_time']
temperature_transposed.columns = ['hourly_temperature_2m']
relativehumidity_transposed.columns = ['hourly_relativehumidity']
dewpoint_transposed.columns = ['hourly_dewpoint']
apparent_temperature_transposed.columns = ['hourly_apparent_temperature']
cloudcover_transposed.columns = ['hourly_cloudcover']
rain_transposed.columns = ['hourly_rain']
pressure_msl_transposed.columns = ['hourly_pressure_msl']
hourly_data = pd.concat([time_transposed.reset_index(drop = True),temperature_transposed.reset_index(drop = True),relativehumidity_transposed.reset_index(drop = True),dewpoint_transposed.reset_index(drop = True),apparent_temperature_transposed.reset_index(drop = True),cloudcover_transposed.reset_index(drop = True),rain_transposed.reset_index(drop = True),pressure_msl_transposed.reset_index(drop = True)],axis = 1)
hourly_data
def create_table_hourly_data():
# a) Connect to the PostgreSQL instance through psycopg2 library: the database parameters defined at the beginning of the code are here used.
conn = psycopg2.connect(database=database, user=username,password=password,host=host,port=port)
# b) Create a cursor object
cur=conn.cursor()
# c) Execute the SQL statement to create the table with the needed columns and data formats.
cur.execute("CREATE TABLE IF NOT EXISTS hourly_data (hourly_time DATE,hourly_temperature_2m FLOAT,hourly_relativehumidity FLOAT,hourly_dewpoint FLOAT,hourly_apparent_temperature FLOAT,hourly_cloudcover DECIMAL,hourly_rain FLOAT,hourly_pressure_msl FLOAT)")
# d) Commit to actually perform the executed SQL statement.
conn.commit()
# e) Close the connection with the PostgreSQL instance.
conn.close()
create_table_hourly_data()
engine = create_engine(URL(**DATABASE))
hourly_data.to_sql("hourly_data", engine, index=False, if_exists='replace')
print("Done")
metadata = df.iloc[:,0:15]
metadata['execution_date'] = date.today()
metadata
def create_table_metadata():
# a) Connect to the PostgreSQL instance through psycopg2 library
conn = psycopg2.connect(database=database,user=username,password=password,host=host,port=port)
# b) Create a cursor object
cur=conn.cursor()
# c) Execute the SQL statement to create the table
cur.execute("CREATE TABLE IF NOT EXISTS metadata (latitude FLOAT,longitude FLOAT,generationtime_ms FLOAT,utc_offset_seconds INT,timezone VARCHAR,timezone_abbreviation VARCHAR,elevation FLOAT,hourly_units_time VARCHAR,hourly_units_temperature_2m VARCHAR)")
# d) Commit to actually perform the executed SQL statement
conn.commit()
# e) Close the connection with the PostgreSQL instance.
conn.close()
create_table_metadata()
engine = create_engine(URL(**DATABASE))
metadata.to_sql("metadata", engine, index=False, if_exists='replace')
print("Done")