Error:
Solution:
Both spark driver and executor need mysql driver on class path so specify
java.lang.ClassNotFoundException: com.mysql.jdbc.Driver
Solution:
Both spark driver and executor need mysql driver on class path so specify
- spark.driver.extraClassPath = /usr/share/java/mysql-connector-java.jar
- spark.executor.extraClassPath = /usr/share/java/mysql-connector-java.jar
#Initialize SparkSession and SparkContext
from pyspark.sql import SparkSession
from pyspark import SparkContext
#Create a Spark Session
SpSession = SparkSession \
.builder \
.master("local[2]") \
.appName("prabhath") \
.config("spark.executor.memory", "1g") \
.config("spark.cores.max","2") \
.config("spark.driver.extraClassPath", "/usr/share/java/mysql-connector-java.jar") \
.config("spark.executor.extraClassPath", "/usr/share/java/mysql-connector-java.jar") \
.config("spark.sql.warehouse.dir", "/Users/jlyang/Spark/spark-warehouse")\
.getOrCreate()
#Get the Spark Context from Spark Session
SpContext = SpSession.sparkContext
demoDf = SpSession.read.format("jdbc").options(
url="jdbc:mysql://localhost:3306/testpraba1",
driver = "com.mysql.jdbc.Driver",
dbtable = "users_userprofile",
user="root",
password="XXXXX").load()
demoDf.show()
No comments:
Post a Comment