@@ -131,6 +131,20 @@ def session_catalog() -> Catalog:
131131 )
132132
133133
134+ @pytest .fixture (scope = "session" )
135+ def session_catalog_hive () -> Catalog :
136+ return load_catalog (
137+ "local" ,
138+ ** {
139+ "type" : "hive" ,
140+ "uri" : "http://localhost:9083" ,
141+ "s3.endpoint" : "http://localhost:9000" ,
142+ "s3.access-key-id" : "admin" ,
143+ "s3.secret-access-key" : "password" ,
144+ },
145+ )
146+
147+
134148@pytest .fixture (scope = "session" )
135149def pa_schema () -> pa .Schema :
136150 return pa .schema ([
@@ -289,6 +303,13 @@ def spark() -> SparkSession:
289303 .config ("spark.sql.catalog.integration.s3.endpoint" , "http://localhost:9000" )
290304 .config ("spark.sql.catalog.integration.s3.path-style-access" , "true" )
291305 .config ("spark.sql.defaultCatalog" , "integration" )
306+ .config ("spark.sql.catalog.hive" , "org.apache.iceberg.spark.SparkCatalog" )
307+ .config ("spark.sql.catalog.hive.type" , "hive" )
308+ .config ("spark.sql.catalog.hive.uri" , "http://localhost:9083" )
309+ .config ("spark.sql.catalog.hive.io-impl" , "org.apache.iceberg.aws.s3.S3FileIO" )
310+ .config ("spark.sql.catalog.hive.warehouse" , "s3://warehouse/hive/" )
311+ .config ("spark.sql.catalog.hive.s3.endpoint" , "http://localhost:9000" )
312+ .config ("spark.sql.catalog.hive.s3.path-style-access" , "true" )
292313 .getOrCreate ()
293314 )
294315
0 commit comments