0% found this document useful (0 votes)
11 views

Log4j Active

Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
11 views

Log4j Active

Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 25

24/03/20 17:25:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit

code 1 out: err:


24/03/20 17:25:57 ERROR RetryingHMSHandler: NoSuchObjectException(message:There is
no database named global_temp)
at
org.apache.hadoop.hive.metastore.ObjectStore.getMDatabase(ObjectStore.java:508)
at
org.apache.hadoop.hive.metastore.ObjectStore.getDatabase(ObjectStore.java:519)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
at com.sun.proxy.$Proxy118.getDatabase(Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_database(HiveMetaStor
e.java:796)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:
105)
at com.sun.proxy.$Proxy120.get_database(Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getDatabase(HiveMetaStoreClien
t.java:949)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreCl
ient.java:89)
at com.sun.proxy.$Proxy121.getDatabase(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1165)
at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1154)
at
org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:619)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$databaseExists$1(HiveClientImpl.scala:451)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$withHiveState$1(HiveClientImpl.scala:348)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$retryLocked$1(HiveClientImpl.scala:247)
at
org.apache.spark.sql.hive.client.HiveClientImpl.synchronizeOnObject(HiveClientImpl.
scala:285)
at
org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:23
9)
at
org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:
328)
at
org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala
:451)
at org.apache.spark.sql.hive.client.PoolingHiveClient.
$anonfun$databaseExists$1(PoolingHiveClient.scala:321)
at org.apache.spark.sql.hive.client.PoolingHiveClient.
$anonfun$databaseExists$1$adapted(PoolingHiveClient.scala:320)
at
org.apache.spark.sql.hive.client.PoolingHiveClient.withHiveClient(PoolingHiveClient
.scala:149)
at
org.apache.spark.sql.hive.client.PoolingHiveClient.databaseExists(PoolingHiveClient
.scala:320)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$databaseExists$1(HiveExternalCatalog.scala:326)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$withClient$2(HiveExternalCatalog.scala:156)
at
org.apache.spark.sql.hive.HiveExternalCatalog.maybeSynchronized(HiveExternalCatalog
.scala:117)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$withClient$1(HiveExternalCatalog.scala:155)
at
com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressRepor
ter.scala:403)
at
com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressRepor
ter.scala:389)
at
com.databricks.spark.util.SparkDatabricksProgressReporter$.withStatusCode(ProgressR
eporter.scala:34)
at
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:
154)
at
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.sc
ala:326)
at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.
$anonfun$databaseExists$1(ExternalCatalogWithListener.scala:84)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.databaseExists(Ex
ternalCatalogWithListener.scala:84)
at org.apache.spark.sql.internal.SharedState.
$anonfun$globalTempViewExternalCatalogNameCheck$1(SharedState.scala:354)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at scala.util.Try$.apply(Try.scala:213)
at
org.apache.spark.sql.internal.SharedState.globalTempViewExternalCatalogNameCheck(Sh
aredState.scala:354)
at
org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedSt
ate.scala:382)
at
org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:3
78)
at org.apache.spark.sql.hive.HiveSessionStateBuilder.
$anonfun$hiveCatalog$2(HiveSessionStateBuilder.scala:78)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.globalTempViewManager$lzyc
ompute(SessionCatalog.scala:570)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.globalTempViewManager(Sess
ionCatalog.scala:570)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.isGlobalTempViewDB(Session
Catalog.scala:1596)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.getRawLocalOrGlobalTempVie
w(SessionCatalog.scala:1276)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.isTempView(SessionCatalog.
scala:1603)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.isTempView(SessionCatalog.
scala:1610)
at
com.databricks.sql.managedcatalog.ManagedCatalogSessionCatalog.isTempView(ManagedCa
talogSessionCatalog.scala:1194)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.isTempView$1(ResolveSe
ssionCatalog.scala:971)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.toIdentifier(ResolveSe
ssionCatalog.scala:984)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.resolvePathIdentifier(
ResolveSessionCatalog.scala:957)
at org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog$
$anonfun$apply$1.applyOrElse(ResolveSessionCatalog.scala:314)
at org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog$
$anonfun$apply$1.applyOrElse(ResolveSessionCatalog.scala:71)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.
$anonfun$resolveOperatorsUpWithPruning$3(AnalysisHelper.scala:141)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:106)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.
$anonfun$resolveOperatorsUpWithPruning$1(AnalysisHelper.scala:141)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransforms
InAnalyzer(AnalysisHelper.scala:372)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPr
uning(AnalysisHelper.scala:137)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPr
uning$(AnalysisHelper.scala:133)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUpWithPruni
ng(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUp(Analy
sisHelper.scala:114)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUp$
(AnalysisHelper.scala:113)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalP
lan.scala:32)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.apply(ResolveSessionCa
talog.scala:71)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.apply(ResolveSessionCa
talog.scala:65)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$4(RuleExecutor.scala:229)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$3(RuleExecutor.scala:229)
at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
at scala.collection.LinearSeqOptimized.foldLeft$
(LinearSeqOptimized.scala:122)
at scala.collection.immutable.List.foldLeft(List.scala:91)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$2(RuleExecutor.scala:226)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeBatch$1(RuleExecutor.scala:
218)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$8(RuleExecutor.scala:296)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$8$adapted(RuleExecutor.scala:296)
at scala.collection.immutable.List.foreach(List.scala:431)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$1(RuleExecutor.scala:296)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:197)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.executeSameContext(Analyzer.scala:3
68)
at org.apache.spark.sql.catalyst.analysis.Analyzer.
$anonfun$execute$1(Analyzer.scala:361)
at
org.apache.spark.sql.catalyst.analysis.AnalysisContext$.withNewAnalysisContext(Anal
yzer.scala:270)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:361)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:289)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$executeAndTrack$1(RuleExecutor.scala:189)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracke
r.scala:165)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala
:189)
at org.apache.spark.sql.catalyst.analysis.Analyzer.
$anonfun$executeAndCheck$1(Analyzer.scala:341)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(Analysis
Helper.scala:379)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:340)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$analyzed$1(QueryExecution.scala:171)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracke
r.scala:352)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$4(QueryExecution.scala:393)
at
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.sca
la:841)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$2(QueryExecution.scala:393)
at
com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$1(QueryExecution.scala:389)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1063)
at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:389
)
at
org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.sc
ala:165)
at
org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:165)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecu
tion.scala:198)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:
189)
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.
scala:305)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecuti
on.scala:310)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:30
7)
at
org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:
325)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecutio
n.scala:344)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:341
)
at
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:457
)
at
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryE
xecution$$explainString(QueryExecution.scala:522)
at
org.apache.spark.sql.execution.QueryExecution.explainStringLocal(QueryExecution.sca
la:484)
at org.apache.spark.sql.execution.SQLExecution$.
$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:205)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.sca
la:431)
at org.apache.spark.sql.execution.SQLExecution$.
$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:188)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1063)
at
org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.sc
ala:130)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:
381)
at io.delta.tables.DeltaTableBuilder.execute(DeltaTableBuilder.scala:387)
at com.telefonica.haac.state.StateManager.readState(StateManager.scala:45)
at com.telefonica.haac.state.StateManager.<init>(StateManager.scala:22)
at com.telefonica.haac.state.StateManager$.
$anonfun$getOrCreate$1(StateManager.scala:136)
at scala.Option.getOrElse(Option.scala:189)
at
com.telefonica.haac.state.StateManager$.getOrCreate(StateManager.scala:135)
at
com.telefonica.haac.importers.Importer.buildStateManager(Importer.scala:66)
at com.telefonica.haac.importers.Importer.buildStateManager$
(Importer.scala:65)
at
com.telefonica.haac.importers.TelemetryEventsImporter.buildStateManager(TelemetryEv
entsImporter.scala:16)
at
com.telefonica.haac.importers.TelemetryEventsImporter.runImporter(TelemetryEventsIm
porter.scala:48)
at com.telefonica.haac.importers.Importer.run(Importer.scala:83)
at com.telefonica.haac.importers.Importer.run$(Importer.scala:71)
at
com.telefonica.haac.importers.TelemetryEventsImporter.run(TelemetryEventsImporter.s
cala:16)
at com.telefonica.haac.JobLauncher$.runJob(JobLauncher.scala:51)
at com.telefonica.haac.JobLauncher$.launch(JobLauncher.scala:19)
at com.telefonica.haac.Main$.main(Main.scala:7)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$$iw$$iw$
$iw.<init>(command--1:1)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$$iw$
$iw.<init>(command--1:43)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$
$iw.<init>(command--1:45)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw.<init>(command--
1:47)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw.<init>(command--
1:49)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw.<init>(command--1:51)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read.<init>(command--1:53)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$.<init>(command--1:57)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$.<clinit>(command--1)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval$.
$print$lzycompute(<notebook>:7)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval$.$print(<notebook>:6)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval.$print(<notebook>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020)
at scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568)
at
scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36)
at scala.reflect.internal.util.ScalaClassLoader.asContext$
(ScalaClassLoader.scala:116)
at
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoad
er.scala:41)
at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564)
at
com.databricks.backend.daemon.driver.DriverILoop.execute(DriverILoop.scala:223)
at com.databricks.backend.daemon.driver.ScalaDriverLocal.
$anonfun$repl$1(ScalaDriverLocal.scala:227)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at
com.databricks.backend.daemon.driver.DriverLocal$TrapExitInternal$.trapExit(DriverL
ocal.scala:1283)
at
com.databricks.backend.daemon.driver.DriverLocal$TrapExit$.apply(DriverLocal.scala:
1236)
at
com.databricks.backend.daemon.driver.ScalaDriverLocal.repl(ScalaDriverLocal.scala:2
27)
at com.databricks.backend.daemon.driver.DriverLocal.
$anonfun$execute$24(DriverLocal.scala:889)
at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:124)
at com.databricks.backend.daemon.driver.DriverLocal.
$anonfun$execute$21(DriverLocal.scala:872)
at com.databricks.logging.UsageLogging.
$anonfun$withAttributionContext$1(UsageLogging.scala:414)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)
at
com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:158)
at
com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:412)
at com.databricks.logging.UsageLogging.withAttributionContext$
(UsageLogging.scala:409)
at
com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal
.scala:69)
at
com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:457)
at com.databricks.logging.UsageLogging.withAttributionTags$
(UsageLogging.scala:442)
at
com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.sc
ala:69)
at
com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:849)
at com.databricks.backend.daemon.driver.DriverWrapper.
$anonfun$tryExecutingCommand$1(DriverWrapper.scala:660)
at scala.util.Try$.apply(Try.scala:213)
at
com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrappe
r.scala:652)
at
com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(Driver
Wrapper.scala:571)
at
com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.sca
la:606)
at
com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala
:448)
at
com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:389
)
at
com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:247)
at java.lang.Thread.run(Thread.java:750)

24/03/20 17:25:57 ERROR RetryingHMSHandler: NoSuchObjectException(message:There is


no database named delta)
at
org.apache.hadoop.hive.metastore.ObjectStore.getMDatabase(ObjectStore.java:508)
at
org.apache.hadoop.hive.metastore.ObjectStore.getDatabase(ObjectStore.java:519)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
at com.sun.proxy.$Proxy118.getDatabase(Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_database(HiveMetaStor
e.java:796)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:
105)
at com.sun.proxy.$Proxy120.get_database(Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getDatabase(HiveMetaStoreClien
t.java:949)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreCl
ient.java:89)
at com.sun.proxy.$Proxy121.getDatabase(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1165)
at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1154)
at
org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:619)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$databaseExists$1(HiveClientImpl.scala:451)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$withHiveState$1(HiveClientImpl.scala:348)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$retryLocked$1(HiveClientImpl.scala:247)
at
org.apache.spark.sql.hive.client.HiveClientImpl.synchronizeOnObject(HiveClientImpl.
scala:285)
at
org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:23
9)
at
org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:
328)
at
org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala
:451)
at org.apache.spark.sql.hive.client.PoolingHiveClient.
$anonfun$databaseExists$1(PoolingHiveClient.scala:321)
at org.apache.spark.sql.hive.client.PoolingHiveClient.
$anonfun$databaseExists$1$adapted(PoolingHiveClient.scala:320)
at
org.apache.spark.sql.hive.client.PoolingHiveClient.withHiveClient(PoolingHiveClient
.scala:149)
at
org.apache.spark.sql.hive.client.PoolingHiveClient.databaseExists(PoolingHiveClient
.scala:320)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$databaseExists$1(HiveExternalCatalog.scala:326)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$withClient$2(HiveExternalCatalog.scala:156)
at
org.apache.spark.sql.hive.HiveExternalCatalog.maybeSynchronized(HiveExternalCatalog
.scala:117)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$withClient$1(HiveExternalCatalog.scala:155)
at
com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressRepor
ter.scala:403)
at
com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressRepor
ter.scala:389)
at
com.databricks.spark.util.SparkDatabricksProgressReporter$.withStatusCode(ProgressR
eporter.scala:34)
at
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:
154)
at
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.sc
ala:326)
at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.
$anonfun$databaseExists$1(ExternalCatalogWithListener.scala:84)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.databaseExists(Ex
ternalCatalogWithListener.scala:84)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.databaseExists(SessionCata
log.scala:818)
at
com.databricks.sql.managedcatalog.ManagedCatalogSessionCatalog.databaseExists(Manag
edCatalogSessionCatalog.scala:625)
at
com.databricks.sql.managedcatalog.ManagedCatalogSessionCatalog.databaseExists(Manag
edCatalogSessionCatalog.scala:618)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.databaseExists$1(Resol
veSessionCatalog.scala:974)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.toIdentifier(ResolveSe
ssionCatalog.scala:985)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.resolvePathIdentifier(
ResolveSessionCatalog.scala:957)
at org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog$
$anonfun$apply$1.applyOrElse(ResolveSessionCatalog.scala:314)
at org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog$
$anonfun$apply$1.applyOrElse(ResolveSessionCatalog.scala:71)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.
$anonfun$resolveOperatorsUpWithPruning$3(AnalysisHelper.scala:141)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:106)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.
$anonfun$resolveOperatorsUpWithPruning$1(AnalysisHelper.scala:141)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransforms
InAnalyzer(AnalysisHelper.scala:372)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPr
uning(AnalysisHelper.scala:137)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPr
uning$(AnalysisHelper.scala:133)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUpWithPruni
ng(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUp(Analy
sisHelper.scala:114)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUp$
(AnalysisHelper.scala:113)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalP
lan.scala:32)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.apply(ResolveSessionCa
talog.scala:71)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.apply(ResolveSessionCa
talog.scala:65)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$4(RuleExecutor.scala:229)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$3(RuleExecutor.scala:229)
at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
at scala.collection.LinearSeqOptimized.foldLeft$
(LinearSeqOptimized.scala:122)
at scala.collection.immutable.List.foldLeft(List.scala:91)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$2(RuleExecutor.scala:226)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeBatch$1(RuleExecutor.scala:
218)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$8(RuleExecutor.scala:296)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$8$adapted(RuleExecutor.scala:296)
at scala.collection.immutable.List.foreach(List.scala:431)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$1(RuleExecutor.scala:296)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:197)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.executeSameContext(Analyzer.scala:3
68)
at org.apache.spark.sql.catalyst.analysis.Analyzer.
$anonfun$execute$1(Analyzer.scala:361)
at
org.apache.spark.sql.catalyst.analysis.AnalysisContext$.withNewAnalysisContext(Anal
yzer.scala:270)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:361)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:289)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$executeAndTrack$1(RuleExecutor.scala:189)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracke
r.scala:165)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala
:189)
at org.apache.spark.sql.catalyst.analysis.Analyzer.
$anonfun$executeAndCheck$1(Analyzer.scala:341)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(Analysis
Helper.scala:379)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:340)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$analyzed$1(QueryExecution.scala:171)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracke
r.scala:352)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$4(QueryExecution.scala:393)
at
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.sca
la:841)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$2(QueryExecution.scala:393)
at
com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$1(QueryExecution.scala:389)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1063)
at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:389
)
at
org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.sc
ala:165)
at
org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:165)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecu
tion.scala:198)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:
189)
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.
scala:305)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecuti
on.scala:310)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:30
7)
at
org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:
325)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecutio
n.scala:344)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:341
)
at
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:457
)
at
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryE
xecution$$explainString(QueryExecution.scala:522)
at
org.apache.spark.sql.execution.QueryExecution.explainStringLocal(QueryExecution.sca
la:484)
at org.apache.spark.sql.execution.SQLExecution$.
$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:205)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.sca
la:431)
at org.apache.spark.sql.execution.SQLExecution$.
$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:188)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1063)
at
org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.sc
ala:130)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:
381)
at io.delta.tables.DeltaTableBuilder.execute(DeltaTableBuilder.scala:387)
at com.telefonica.haac.state.StateManager.readState(StateManager.scala:45)
at com.telefonica.haac.state.StateManager.<init>(StateManager.scala:22)
at com.telefonica.haac.state.StateManager$.
$anonfun$getOrCreate$1(StateManager.scala:136)
at scala.Option.getOrElse(Option.scala:189)
at
com.telefonica.haac.state.StateManager$.getOrCreate(StateManager.scala:135)
at
com.telefonica.haac.importers.Importer.buildStateManager(Importer.scala:66)
at com.telefonica.haac.importers.Importer.buildStateManager$
(Importer.scala:65)
at
com.telefonica.haac.importers.TelemetryEventsImporter.buildStateManager(TelemetryEv
entsImporter.scala:16)
at
com.telefonica.haac.importers.TelemetryEventsImporter.runImporter(TelemetryEventsIm
porter.scala:48)
at com.telefonica.haac.importers.Importer.run(Importer.scala:83)
at com.telefonica.haac.importers.Importer.run$(Importer.scala:71)
at
com.telefonica.haac.importers.TelemetryEventsImporter.run(TelemetryEventsImporter.s
cala:16)
at com.telefonica.haac.JobLauncher$.runJob(JobLauncher.scala:51)
at com.telefonica.haac.JobLauncher$.launch(JobLauncher.scala:19)
at com.telefonica.haac.Main$.main(Main.scala:7)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$$iw$$iw$
$iw.<init>(command--1:1)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$$iw$
$iw.<init>(command--1:43)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$
$iw.<init>(command--1:45)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw.<init>(command--
1:47)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw.<init>(command--
1:49)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw.<init>(command--1:51)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read.<init>(command--1:53)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$.<init>(command--1:57)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$.<clinit>(command--1)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval$.
$print$lzycompute(<notebook>:7)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval$.$print(<notebook>:6)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval.$print(<notebook>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020)
at scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568)
at
scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36)
at scala.reflect.internal.util.ScalaClassLoader.asContext$
(ScalaClassLoader.scala:116)
at
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoad
er.scala:41)
at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564)
at
com.databricks.backend.daemon.driver.DriverILoop.execute(DriverILoop.scala:223)
at com.databricks.backend.daemon.driver.ScalaDriverLocal.
$anonfun$repl$1(ScalaDriverLocal.scala:227)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at
com.databricks.backend.daemon.driver.DriverLocal$TrapExitInternal$.trapExit(DriverL
ocal.scala:1283)
at
com.databricks.backend.daemon.driver.DriverLocal$TrapExit$.apply(DriverLocal.scala:
1236)
at
com.databricks.backend.daemon.driver.ScalaDriverLocal.repl(ScalaDriverLocal.scala:2
27)
at com.databricks.backend.daemon.driver.DriverLocal.
$anonfun$execute$24(DriverLocal.scala:889)
at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:124)
at com.databricks.backend.daemon.driver.DriverLocal.
$anonfun$execute$21(DriverLocal.scala:872)
at com.databricks.logging.UsageLogging.
$anonfun$withAttributionContext$1(UsageLogging.scala:414)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)
at
com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:158)
at
com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:412)
at com.databricks.logging.UsageLogging.withAttributionContext$
(UsageLogging.scala:409)
at
com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal
.scala:69)
at
com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:457)
at com.databricks.logging.UsageLogging.withAttributionTags$
(UsageLogging.scala:442)
at
com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.sc
ala:69)
at
com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:849)
at com.databricks.backend.daemon.driver.DriverWrapper.
$anonfun$tryExecutingCommand$1(DriverWrapper.scala:660)
at scala.util.Try$.apply(Try.scala:213)
at
com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrappe
r.scala:652)
at
com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(Driver
Wrapper.scala:571)
at
com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.sca
la:606)
at
com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala
:448)
at
com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:389
)
at
com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:247)
at java.lang.Thread.run(Thread.java:750)

24/03/20 17:26:40 ERROR RetryingHMSHandler: NoSuchObjectException(message:There is


no database named delta)
at
org.apache.hadoop.hive.metastore.ObjectStore.getMDatabase(ObjectStore.java:508)
at
org.apache.hadoop.hive.metastore.ObjectStore.getDatabase(ObjectStore.java:519)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
at com.sun.proxy.$Proxy118.getDatabase(Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_database(HiveMetaStor
e.java:796)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:
105)
at com.sun.proxy.$Proxy120.get_database(Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getDatabase(HiveMetaStoreClien
t.java:949)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreCl
ient.java:89)
at com.sun.proxy.$Proxy121.getDatabase(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1165)
at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1154)
at
org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:619)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$databaseExists$1(HiveClientImpl.scala:451)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$withHiveState$1(HiveClientImpl.scala:348)
at org.apache.spark.sql.hive.client.HiveClientImpl.
$anonfun$retryLocked$1(HiveClientImpl.scala:247)
at
org.apache.spark.sql.hive.client.HiveClientImpl.synchronizeOnObject(HiveClientImpl.
scala:285)
at
org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:23
9)
at
org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:
328)
at
org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala
:451)
at org.apache.spark.sql.hive.client.PoolingHiveClient.
$anonfun$databaseExists$1(PoolingHiveClient.scala:321)
at org.apache.spark.sql.hive.client.PoolingHiveClient.
$anonfun$databaseExists$1$adapted(PoolingHiveClient.scala:320)
at
org.apache.spark.sql.hive.client.PoolingHiveClient.withHiveClient(PoolingHiveClient
.scala:149)
at
org.apache.spark.sql.hive.client.PoolingHiveClient.databaseExists(PoolingHiveClient
.scala:320)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$databaseExists$1(HiveExternalCatalog.scala:326)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$withClient$2(HiveExternalCatalog.scala:156)
at
org.apache.spark.sql.hive.HiveExternalCatalog.maybeSynchronized(HiveExternalCatalog
.scala:117)
at org.apache.spark.sql.hive.HiveExternalCatalog.
$anonfun$withClient$1(HiveExternalCatalog.scala:155)
at
com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressRepor
ter.scala:403)
at
com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressRepor
ter.scala:389)
at
com.databricks.spark.util.SparkDatabricksProgressReporter$.withStatusCode(ProgressR
eporter.scala:34)
at
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:
154)
at
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.sc
ala:326)
at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.
$anonfun$databaseExists$1(ExternalCatalogWithListener.scala:84)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.databaseExists(Ex
ternalCatalogWithListener.scala:84)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.databaseExists(SessionCata
log.scala:818)
at
com.databricks.sql.managedcatalog.ManagedCatalogSessionCatalog.databaseExists(Manag
edCatalogSessionCatalog.scala:625)
at
com.databricks.sql.managedcatalog.ManagedCatalogSessionCatalog.databaseExists(Manag
edCatalogSessionCatalog.scala:618)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.databaseExists$1(Resol
veSessionCatalog.scala:974)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.toIdentifier(ResolveSe
ssionCatalog.scala:985)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.resolvePathIdentifier(
ResolveSessionCatalog.scala:957)
at org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog$
$anonfun$apply$1.applyOrElse(ResolveSessionCatalog.scala:314)
at org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog$
$anonfun$apply$1.applyOrElse(ResolveSessionCatalog.scala:71)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.
$anonfun$resolveOperatorsUpWithPruning$3(AnalysisHelper.scala:141)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:106)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.
$anonfun$resolveOperatorsUpWithPruning$1(AnalysisHelper.scala:141)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransforms
InAnalyzer(AnalysisHelper.scala:372)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPr
uning(AnalysisHelper.scala:137)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPr
uning$(AnalysisHelper.scala:133)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUpWithPruni
ng(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUp(Analy
sisHelper.scala:114)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUp$
(AnalysisHelper.scala:113)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalP
lan.scala:32)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.apply(ResolveSessionCa
talog.scala:71)
at
org.apache.spark.sql.catalyst.analysis.ResolveSessionCatalog.apply(ResolveSessionCa
talog.scala:65)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$4(RuleExecutor.scala:229)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$3(RuleExecutor.scala:229)
at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
at scala.collection.LinearSeqOptimized.foldLeft$
(LinearSeqOptimized.scala:122)
at scala.collection.immutable.List.foldLeft(List.scala:91)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$2(RuleExecutor.scala:226)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeBatch$1(RuleExecutor.scala:
218)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$8(RuleExecutor.scala:296)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$8$adapted(RuleExecutor.scala:296)
at scala.collection.immutable.List.foreach(List.scala:431)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$execute$1(RuleExecutor.scala:296)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:197)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.executeSameContext(Analyzer.scala:3
68)
at org.apache.spark.sql.catalyst.analysis.Analyzer.
$anonfun$execute$1(Analyzer.scala:361)
at
org.apache.spark.sql.catalyst.analysis.AnalysisContext$.withNewAnalysisContext(Anal
yzer.scala:270)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:361)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:289)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.
$anonfun$executeAndTrack$1(RuleExecutor.scala:189)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracke
r.scala:165)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala
:189)
at org.apache.spark.sql.catalyst.analysis.Analyzer.
$anonfun$executeAndCheck$1(Analyzer.scala:341)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(Analysis
Helper.scala:379)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:340)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$analyzed$1(QueryExecution.scala:171)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracke
r.scala:352)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$4(QueryExecution.scala:393)
at
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.sca
la:841)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$2(QueryExecution.scala:393)
at
com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)
at org.apache.spark.sql.execution.QueryExecution.
$anonfun$executePhase$1(QueryExecution.scala:389)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1063)
at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:389
)
at
org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.sc
ala:165)
at
org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:165)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecu
tion.scala:198)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:
189)
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.
scala:305)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecuti
on.scala:310)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:30
7)
at
org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:
325)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecutio
n.scala:344)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:341
)
at
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:457
)
at
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryE
xecution$$explainString(QueryExecution.scala:522)
at
org.apache.spark.sql.execution.QueryExecution.explainStringLocal(QueryExecution.sca
la:484)
at org.apache.spark.sql.execution.SQLExecution$.
$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:205)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.sca
la:431)
at org.apache.spark.sql.execution.SQLExecution$.
$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:188)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1063)
at
org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.sc
ala:130)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:
381)
at io.delta.tables.DeltaTableBuilder.execute(DeltaTableBuilder.scala:387)
at com.telefonica.haac.state.StateManager.readState(StateManager.scala:45)
at com.telefonica.haac.state.StateManager.
$anonfun$currentState$1(StateManager.scala:23)
at scala.Option.getOrElse(Option.scala:189)
at com.telefonica.haac.state.StateManager.<init>(StateManager.scala:22)
at com.telefonica.haac.state.StateManager$.
$anonfun$getOrCreate$1(StateManager.scala:136)
at scala.Option.getOrElse(Option.scala:189)
at
com.telefonica.haac.state.StateManager$.getOrCreate(StateManager.scala:135)
at
com.telefonica.haac.importers.Importer.buildStateManager(Importer.scala:66)
at com.telefonica.haac.importers.Importer.buildStateManager$
(Importer.scala:65)
at
com.telefonica.haac.importers.TelemetryEventsImporter.buildStateManager(TelemetryEv
entsImporter.scala:16)
at
com.telefonica.haac.importers.TelemetryEventsImporter.runImporter(TelemetryEventsIm
porter.scala:48)
at com.telefonica.haac.importers.Importer.run(Importer.scala:83)
at com.telefonica.haac.importers.Importer.run$(Importer.scala:71)
at
com.telefonica.haac.importers.TelemetryEventsImporter.run(TelemetryEventsImporter.s
cala:16)
at com.telefonica.haac.JobLauncher$.runJob(JobLauncher.scala:51)
at com.telefonica.haac.JobLauncher$.launch(JobLauncher.scala:19)
at com.telefonica.haac.Main$.main(Main.scala:7)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$$iw$$iw$
$iw.<init>(command--1:1)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$$iw$
$iw.<init>(command--1:43)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw$
$iw.<init>(command--1:45)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw$$iw.<init>(command--
1:47)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw$$iw.<init>(command--
1:49)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$$iw.<init>(command--1:51)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read.<init>(command--1:53)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$.<init>(command--1:57)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$read$.<clinit>(command--1)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval$.
$print$lzycompute(<notebook>:7)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval$.$print(<notebook>:6)
at $linefc85dfcefbc84d9f88c58ddc40b423d025.$eval.$print(<notebook>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:4
3)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020)
at scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568)
at
scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36)
at scala.reflect.internal.util.ScalaClassLoader.asContext$
(ScalaClassLoader.scala:116)
at
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoad
er.scala:41)
at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564)
at
com.databricks.backend.daemon.driver.DriverILoop.execute(DriverILoop.scala:223)
at com.databricks.backend.daemon.driver.ScalaDriverLocal.
$anonfun$repl$1(ScalaDriverLocal.scala:227)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at
com.databricks.backend.daemon.driver.DriverLocal$TrapExitInternal$.trapExit(DriverL
ocal.scala:1283)
at
com.databricks.backend.daemon.driver.DriverLocal$TrapExit$.apply(DriverLocal.scala:
1236)
at
com.databricks.backend.daemon.driver.ScalaDriverLocal.repl(ScalaDriverLocal.scala:2
27)
at com.databricks.backend.daemon.driver.DriverLocal.
$anonfun$execute$24(DriverLocal.scala:889)
at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:124)
at com.databricks.backend.daemon.driver.DriverLocal.
$anonfun$execute$21(DriverLocal.scala:872)
at com.databricks.logging.UsageLogging.
$anonfun$withAttributionContext$1(UsageLogging.scala:414)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)
at
com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:158)
at
com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:412)
at com.databricks.logging.UsageLogging.withAttributionContext$
(UsageLogging.scala:409)
at
com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal
.scala:69)
at
com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:457)
at com.databricks.logging.UsageLogging.withAttributionTags$
(UsageLogging.scala:442)
at
com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.sc
ala:69)
at
com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:849)
at com.databricks.backend.daemon.driver.DriverWrapper.
$anonfun$tryExecutingCommand$1(DriverWrapper.scala:660)
at scala.util.Try$.apply(Try.scala:213)
at
com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrappe
r.scala:652)
at
com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(Driver
Wrapper.scala:571)
at
com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.sca
la:606)
at
com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala
:448)
at
com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:389
)
at
com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:247)
at java.lang.Thread.run(Thread.java:750)

24/03/20 17:26:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit


code 1 out: err:
24/03/20 17:27:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:27:53 ERROR AbfsClient: HttpRequest:
409,err=PathAlreadyExists,appendpos=,cid=0320-172020-1uzxgmto------:c75cc53f-bd7f-
48a7-a8df-cf95ce8b9729:35ce507f-3e3c-4e30-aab5-7a626d61913c:3ee81d07-df31-44b0-
802d-8d36a233f185::CR:0,rid=f63154e8-501f-005f-05eb-
7a5ab2000000,connMs=0,sendMs=0,recvMs=13,sent=0,recv=168,method=PUT,https://
dbstorage7m4lqz4uvbouw.dfs.core.windows.net/root/5281021567391676/state/telemetry/
_delta_log/_last_checkpoint?resource=file&timeout=90&sig=XXXXX&st=2024-03-
20T16:54:42Z&se=2024-03-21T12:54:42Z&sv=2019-02-02&spr=https&sp=racwdl&sr=c
24/03/20 17:27:53 INFO TelemetryEventsImporter: Telemetry data path is:
wasbs://[email protected]/year=2024/month=03/
day=20/hour=16/
24/03/20 17:28:17 INFO TelemetryEventsImporter: Writing dataset Stations_Periodic
and version 9
24/03/20 17:28:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:29:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:30:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:31:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:32:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:32:51 INFO TelemetryEventsImporter: Writing dataset WiFi_Interface and
version 9
24/03/20 17:33:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:34:07 INFO TelemetryEventsImporter: Writing dataset WAN_Connection and
version 9
24/03/20 17:34:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:34:47 INFO TelemetryEventsImporter: Writing dataset Stations_OnDemand
and version 9
24/03/20 17:35:27 INFO TelemetryEventsImporter: Writing dataset Neighbors and
version 9
24/03/20 17:35:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:36:44 INFO TelemetryEventsImporter: Writing dataset Watchdog_GPON and
version 9
24/03/20 17:36:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:37:09 INFO TelemetryEventsImporter: Writing dataset Ethernet_Interface
and version 9
24/03/20 17:37:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:38:32 INFO TelemetryEventsImporter: There are not block of dataset
Multicast, skipping write.
24/03/20 17:38:32 INFO TelemetryEventsImporter: Writing dataset
Watchdog_Periodic_Info_Connections and version 9
24/03/20 17:38:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:39:06 INFO TelemetryEventsImporter: Writing dataset Device_Info and
version 9
24/03/20 17:39:12 ERROR AsyncEventQueue: Dropping event from queue shared. This
likely means one of the listeners is too slow and cannot keep up with the rate at
which tasks are being started by the scheduler.
24/03/20 17:39:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:39:52 INFO TelemetryEventsImporter: There are not block of dataset
IPTABLES, skipping write.
24/03/20 17:40:09 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_Periodic_Info_General, skipping write.
24/03/20 17:40:31 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_Periodic_Info_Processes, skipping write.
24/03/20 17:40:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:40:55 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_ARP_Table, skipping write.
24/03/20 17:40:55 INFO TelemetryEventsImporter: Writing dataset
Watchdog_Restart_Process and version 9
24/03/20 17:41:31 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_IP_Route, skipping write.
24/03/20 17:41:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:41:45 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_VoIP_DHCP_Alarm, skipping write.
24/03/20 17:42:01 INFO TelemetryEventsImporter: There are not block of dataset
DHCP_Lease_Table, skipping write.
24/03/20 17:42:20 INFO TelemetryEventsImporter: There are not block of dataset
Radar_Detection, skipping write.
24/03/20 17:42:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:42:47 ERROR AbfsClient: HttpRequest:
409,err=PathAlreadyExists,appendpos=,cid=0320-172020-1uzxgmto------:77beb5db-8e98-
48d4-9c97-0f678022ba0b:35ce507f-3e3c-4e30-aab5-7a626d61913c:ec6472c9-c24a-46c6-
bb26-43dee928783f::CR:0,rid=dcb729be-f01f-0069-33ee-
7ad7c2000000,connMs=0,sendMs=0,recvMs=14,sent=0,recv=168,method=PUT,https://
dbstorage7m4lqz4uvbouw.dfs.core.windows.net/root/5281021567391676/state/telemetry/
_delta_log/_last_checkpoint?resource=file&timeout=90&sig=XXXXX&st=2024-03-
20T16:54:42Z&se=2024-03-21T12:54:42Z&sv=2019-02-02&spr=https&sp=racwdl&sr=c
24/03/20 17:42:47 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_VoIP_Registration_Alarm, skipping write.
24/03/20 17:42:56 INFO TelemetryEventsImporter: There are not block of dataset
Roaming_Event, skipping write.
24/03/20 17:43:24 INFO TelemetryEventsImporter: There are not block of dataset
DR_Report, skipping write.
24/03/20 17:43:29 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_Robustness, skipping write.
24/03/20 17:43:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:43:44 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_Restart_Service, skipping write.
24/03/20 17:43:52 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_CLI, skipping write.
24/03/20 17:44:08 INFO TelemetryEventsImporter: There are not block of dataset
Core_File, skipping write.
24/03/20 17:44:16 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_Alarm_PPP, skipping write.
24/03/20 17:44:22 INFO TelemetryEventsImporter: There are not block of dataset
Steering_Event, skipping write.
24/03/20 17:44:32 INFO TelemetryEventsImporter: There are not block of dataset Log,
skipping write.
24/03/20 17:44:41 ERROR AbfsClient: HttpRequest:
409,err=PathAlreadyExists,appendpos=,cid=0320-172020-1uzxgmto------:5cc10e7b-54ff-
4aab-bfa4-788bf99d42ba:35ce507f-3e3c-4e30-aab5-7a626d61913c:99e542e0-36f1-40d3-
9582-1008ad4fee74::CR:0,rid=2fba67bf-f01f-0024-1cee-
7a182e000000,connMs=0,sendMs=0,recvMs=12,sent=0,recv=168,method=PUT,https://
dbstorage7m4lqz4uvbouw.dfs.core.windows.net/root/5281021567391676/state/telemetry/
_delta_log/_last_checkpoint?resource=file&timeout=90&sig=XXXXX&st=2024-03-
20T16:54:42Z&se=2024-03-21T12:54:42Z&sv=2019-02-02&spr=https&sp=racwdl&sr=c
24/03/20 17:44:41 INFO TelemetryEventsImporter: There are not block of dataset
HPNA_Diagnostics, skipping write.
24/03/20 17:44:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:44:54 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_Alarm_TR069, skipping write.
24/03/20 17:45:03 INFO TelemetryEventsImporter: There are not block of dataset
HPNA, skipping write.
24/03/20 17:45:03 INFO TelemetryEventsImporter: Writing dataset VoIP_Info and
version 9
24/03/20 17:45:37 INFO TelemetryEventsImporter: There are not block of dataset
Watchdog_Max_Time_Exceeded, skipping write.
24/03/20 17:45:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:46:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:47:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:48:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:49:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:50:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:51:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:52:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:53:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:54:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:55:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:
24/03/20 17:56:09 ERROR AbfsClient: HttpRequest:
409,err=PathAlreadyExists,appendpos=,cid=0320-172020-1uzxgmto------:0b1ef9ba-6358-
41ef-81a1-680229ae540e:35ce507f-3e3c-4e30-aab5-7a626d61913c:930986ad-468d-4a67-
ac7b-94d02a57d0fc::CR:0,rid=e36ad392-c01f-005d-16ef-
7ae40a000000,connMs=0,sendMs=0,recvMs=12,sent=0,recv=168,method=PUT,https://
dbstorage7m4lqz4uvbouw.dfs.core.windows.net/root/5281021567391676/state/telemetry/
_delta_log/_last_checkpoint?resource=file&timeout=90&sig=XXXXX&st=2024-03-
20T16:54:42Z&se=2024-03-21T12:54:42Z&sv=2019-02-02&spr=https&sp=racwdl&sr=c
24/03/20 17:56:44 ERROR CommandLineHelper$: Command [REDACTED] failed with exit
code 1 out: err:

You might also like