Scnfinal
Scnfinal
System.setProperty("hadoop.home.dir", "C:\\hadoop")
println("================Started1============")
day1.show()
val finaldf =
addColumnIndex(spark,day1).withColumn("batchid",lit(1))
.select("id","tdate","custnno","amt","state","batch
id")
finaldf.show()
finaldf.write.format("snowflake")
.option("sfURL","https://eogjppo-
wl54107.snowflakecomputing.com")
.option("sfAccount","eogjppo")
.option("sfUser","zeyobronanalytics66")
.option("sfPassword","Zeyo@908")
.option("sfDatabase","zeyodb")
.option("sfSchema","PUBLIC")
.option("sfRole","ACCOUNTADMIN")
.option("sfWarehouse","COMPUTE_WH")
.option("dbtable","dtab")
.save()
=============
Day 2
=============
val max_id_batc = spark.read.format("snowflake")
.option("sfURL","https://eogjppo-wl54107.snowflakecomputing.com")
.option("sfAccount","eogjppo")
.option("sfUser","zeyobronanalytics66")
.option("sfPassword","Zeyo@908")
.option("sfDatabase","zeyodb")
.option("sfSchema","PUBLIC")
.option("sfRole","ACCOUNTADMIN")
.option("sfWarehouse","COMPUTE_WH")
.option("query","select max(id) as maxid,max(batchid) as maxbatch from
zeyodb.public.dtab")
.load()
day1.show()
max_id_batc.show()
batchid.show()
crossjoin.show()
indexcolumn.show()
finaldf.write.format("snowflake")
.option("sfURL","https://eogjppo-wl54107.snowflakecomputing.com")
.option("sfAccount","eogjppo")
.option("sfUser","zeyobronanalytics66")
.option("sfPassword","Zeyo@908")
.option("sfDatabase","zeyodb")
.option("sfSchema","PUBLIC")
.option("sfRole","ACCOUNTADMIN")
.option("sfWarehouse","COMPUTE_WH")
.option("dbtable","dtab")
.mode("append")
.save()
===========
Day 5
===========
package pack
import org.apache.spark._
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.functions._
import org.apache.spark.sql.functions.udf
import scala.io.Source
import pack.urlobj
import org.apache.spark.sql._
object obj {
StructType(df.schema.fields :+ StructField("id",
LongType, false)))
}
def main(args:Array[String]):Unit={
System.setProperty("hadoop.home.dir", "C:\\hadoop")
println("================Started1============")
val df = spark.read.format("csv")
.option("header","true")
.load("file:///C:/data/scenaridata/data55.csv")
df.show(100)
inbatch.show(100)
val limit = 10
moddf.show(100)
increid.show(100)
snowdf.show()
finaldf1.show(100)
finaldf1.write.format("snowflake")
.option("sfURL","https://eogjppo-wl54107.snowflakecomputing.com")
.option("sfAccount","eogjppo")
.option("sfUser","zeyobronanalytics66")
.option("sfPassword","Zeyo@908")
.option("sfDatabase","zeyodb")
.option("sfSchema","PUBLIC")
.option("sfRole","ACCOUNTADMIN")
.option("sfWarehouse","COMPUTE_WH")
.option("dbtable","dtab")
.mode("append")
.save()
/*df.show()
batchdf.show()
increid.show()
val finaldf =
increid.select("id","tdate","custnno","amt","state","batchid")
finaldf.show()
snowdf.show()
finaldf1.show(100)
finaldf1.write.format("snowflake")
.option("sfURL","https://eogjppo-wl54107.snowflakecomputing.com")
.option("sfAccount","eogjppo")
.option("sfUser","zeyobronanalytics66")
.option("sfPassword","Zeyo@908")
.option("sfDatabase","zeyodb")
.option("sfSchema","PUBLIC")
.option("sfRole","ACCOUNTADMIN")
.option("sfWarehouse","COMPUTE_WH")
.option("dbtable","dtab")
.mode("append")
.save()
*/
}
}