Anyone can help me? Need to run my Redshift Stored Procedure, always have and error to this query. Do I have input a wrong query? Please help
import sys
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
args = getResolvedOptions(sys.argv, ['JOB_NAME'])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
logger = glueContext.get_logger()
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
# dw-poc-dev spark test
source_jdbc_conf = glueContext.extract_jdbc_conf('glue_connection')
from py4j.java_gateway import java_import
java_import(sc._gateway.jvm,"java.sql.Connection")
java_import(sc._gateway.jvm,"java.sql.DatabaseMetaData")
java_import(sc._gateway.jvm,"java.sql.DriverManager")
java_import(sc._gateway.jvm,"java.sql.SQLException")
conn = sc._gateway.jvm.DriverManager.getConnection(source_jdbc_conf.get('url'),source_jdbc_conf.get('user','username'), source_jdbc_conf.get('password','password'))
# call stored procedure
rs = conn.executeQuery('call ss_schema.sp_weekly_kpi_01_bbbklg()');
conn.close()
Thank you for this, but i got a new problem,
Question is this correct format of code in calling a stored procedure in redshift.
stmt = conn.createStatement();
try: rs = stmt.executeQuery('call dbo.sp_copy_csv_gz_staging('target_db', 's3_source')'); except: print("An exception occurred but proc has run")
conn.close()