I am trying to dump my crawled data to mysql using pandas and to_sql.
I am approaching in two ways 1>
{# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import MySQLdb
from sqlalchemy import create_engine
import os
from pandas.io import sql
import MySQLdb
columns_list= ['persons','c_number','name']
df = pd.DataFrame()
file_name = "/home/*****/****/data.xlsx"
data = pd.read_excel(file_name)
df = df.append(data)
df.columns = columns_list
engine=create_engine('mysql+mysqldb://root:root@localhost:3306/database_name?charset=utf8&use_unicode=1', echo=False)
con = MySQLdb.connect(host="localhost",user="user_name",passwd="password", db="database_name")
df.to_sql(con=con, name='table_name',if_exists='replace',flavor='mysql')}
Here I am getting TypeError: to_sql() got multiple values for keyword argument 'name' error
2>
import numpy as np
import pandas as pd
import MySQLdb
from sqlalchemy import create_engine
import os
columns_list= ['persons','c_number','name']
df = pd.DataFrame()
file_name = "data.xlsx"
data = pd.read_excel(file_name)
df = df.append(data)
df_n = pd.DataFrame()
df = df_n.append(df_new)
df = np.array_split(df, 100)
for x in range(len(df_new)):
print x
engine = create_engine('mysql+mysqldb://user_name:password@localhost:3306/database_name?charset=utf8&use_unicode=1', echo=False)
df[x].to_sql(name='table_name', con=engine, if_exists = 'append', index=False)
engine.dispose()
This way I am able to dump the data but name field I am getting Null in all records.
Is there a way to dump the data using column name "name"