When saving the dataframe from external databricks into hopswork I am getting database not found error.
Code:
from pyspark.sql import functions as F
exogenous_features_df = exogenous_csv.withColumn(‘date’, F.to_date(“date”, ‘dd/MM/yyy’))
exogenous_fg_meta = fs.create_feature_group(name=“exogenous_fg_new”,
version=1,
primary_key=[‘store’, ‘date’],
description=“External features that influence sales, but are not under the control of the distribution chain”,
time_travel_format=None,
#statistics_config={“enabled”: True, “histograms”: True, “correlations”: True})
statistics_config=False)
#fs.get_feature_group(“exogenous_fg_new”, version=1).delete()
exogenous_fg_meta.save(exogenous_features_df)
Error:
/databricks/spark/python/pyspark/sql/readwriter.py in saveAsTable(self, name, format, mode, partitionBy, **options)
1183 if format is not None:
1184 self.format(format)
→ 1185 self._jwrite.saveAsTable(name)
1186
1187 def json(self, path, mode=None, compression=None, dateFormat=None, timestampFormat=None,
/databricks/spark/python/lib/py4j-0.10.9-src.zip/py4j/java_gateway.py in call(self, *args)
1302
1303 answer = self.gateway_client.send_command(command)
→ 1304 return_value = get_return_value(
1305 answer, self.gateway_client, self.target_id, self.name)
1306
/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)
114 # Hide where the exception came from that shows a non-Pythonic
115 # JVM exception message.
→ 116 raise converted from None
117 else:
118 raise
AnalysisException: Database dev_featurestore’ not found