replace office-data-import by db-restore (#154)
Co-authored-by: Michael Hoennig <michael@hoennig.de> Reviewed-on: https://dev.hostsharing.net/hostsharing/hs.hsadmin.ng/pulls/154 Reviewed-by: Marc Sandlus <marc.sandlus@hostsharing.net>
This commit is contained in:
		| @@ -6,6 +6,7 @@ | |||||||
|           <entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" /> |           <entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" /> | ||||||
|           <entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" /> |           <entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" /> | ||||||
|           <entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" /> |           <entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" /> | ||||||
|  |           <entry key="HSADMINNG_SUPERUSER" value="import-superuser@hostsharing.net" /> | ||||||
|         </map> |         </map> | ||||||
|       </option> |       </option> | ||||||
|       <option name="executionName" /> |       <option name="executionName" /> | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| --liquibase formatted sql | --liquibase formatted sql | ||||||
|  |  | ||||||
| -- FIXME: check if we really need the restricted user | -- TODO.impl: check if we really need the restricted user | ||||||
|  |  | ||||||
| -- ============================================================================ | -- ============================================================================ | ||||||
| -- NUMERIC-HASH-FUNCTIONS | -- NUMERIC-HASH-FUNCTIONS | ||||||
|   | |||||||
| @@ -25,7 +25,7 @@ create table if not exists hs_booking.item | |||||||
|     caption             varchar(80) not null, |     caption             varchar(80) not null, | ||||||
|     resources           jsonb not null, |     resources           jsonb not null, | ||||||
|  |  | ||||||
|     constraint booking_item_has_project_or_parent_asset |     constraint booking_item_has_project_or_parent_item | ||||||
|         check (projectUuid is not null or parentItemUuid is not null) |         check (projectUuid is not null or parentItemUuid is not null) | ||||||
| ); | ); | ||||||
| --// | --// | ||||||
|   | |||||||
							
								
								
									
										38
									
								
								src/main/resources/db/changelog/9-hs-global/9800-cleanup.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								src/main/resources/db/changelog/9-hs-global/9800-cleanup.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | |||||||
|  | --liquibase formatted sql | ||||||
|  |  | ||||||
|  | -- ============================================================================ | ||||||
|  | --changeset michael.hoennig:hs-global-office-test-ddl-cleanup context:hosting-asset-import endDelimiter:--// | ||||||
|  | -- ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.bankaccount_create_test_data(IN givenholder character varying, IN giveniban character varying, IN givenbic character varying); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN contcaption character varying); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN startcount integer, IN endcount integer); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.coopassettx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.coopsharetx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.debitor_create_test_data(IN withdebitornumbersuffix numeric, IN forpartnerpersonname character varying, IN forbillingcontactcaption character varying, IN withdefaultprefix character varying); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.membership_create_test_data(IN forpartnernumber numeric, IN newmembernumbersuffix character); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.partner_create_test_data(IN mandanttradename character varying, IN newpartnernumber numeric, IN partnerpersonname character varying, IN contactcaption character varying); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.person_create_test_data(IN newpersontype hs_office.persontype, IN newtradename character varying, IN newfamilyname character varying, IN newgivenname character varying); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN startcount integer, IN endcount integer); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN holderpersonname character varying, IN relationtype hs_office.relationtype, IN anchorpersonname character varying, IN contactcaption character varying, IN mark character varying); | ||||||
|  | DROP PROCEDURE IF EXISTS hs_office.sepamandate_create_test_data(IN forpartnernumber numeric, IN fordebitorsuffix character, IN foriban character varying, IN withreference character varying); | ||||||
|  | --// | ||||||
|  |  | ||||||
|  |  | ||||||
|  | -- ============================================================================ | ||||||
|  | --changeset michael.hoennig:hs-global-rbac-test-ddl-cleanup context:hosting-asset-import endDelimiter:--// | ||||||
|  | -- ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | DROP SCHEMA IF EXISTS rbactest CASCADE; | ||||||
|  | --// | ||||||
|  |  | ||||||
|  |  | ||||||
|  | -- ============================================================================ | ||||||
|  | --changeset michael.hoennig:hs-global-rbac-test-dml-cleanup context:hosting-asset-import endDelimiter:--// | ||||||
|  | -- ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | call base.defineContext('9800-cleanup', null, '${HSADMINNG_SUPERUSER}', null); | ||||||
|  |  | ||||||
|  | DELETE FROM rbac.subject WHERE name='superuser-alex@hostsharing.net'; | ||||||
|  | DELETE FROM rbac.subject WHERE name='superuser-fran@hostsharing.net'; | ||||||
|  | --// | ||||||
| @@ -212,6 +212,10 @@ databaseChangeLog: | |||||||
|         file: db/changelog/9-hs-global/9000-statistics.sql |         file: db/changelog/9-hs-global/9000-statistics.sql | ||||||
|         context: "!only-office" |         context: "!only-office" | ||||||
|  |  | ||||||
|  |     - include: | ||||||
|  |           file: db/changelog/9-hs-global/9800-cleanup.sql | ||||||
|  |           context: "without-test-data" | ||||||
|  |  | ||||||
|     - include: |     - include: | ||||||
|         file: db/changelog/9-hs-global/9100-hs-integration-schema.sql |         file: db/changelog/9-hs-global/9100-hs-integration-schema.sql | ||||||
|     - include: |     - include: | ||||||
|   | |||||||
| @@ -115,11 +115,18 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     @Order(1) |     @Order(1) | ||||||
|     void verifyInitialDatabase() { |     void verifyInitialDatabaseHasNoTestData() { | ||||||
|         // SQL DELETE for thousands of records takes too long, so we make sure, we only start with initial or test data |         assertThat((Integer) em.createNativeQuery( | ||||||
|         final var contactCount = (Integer) em.createNativeQuery("select count(*) from hs_office.contact", Integer.class) |                         "select count(*) from hs_office.contact", | ||||||
|                 .getSingleResult(); |                         Integer.class) | ||||||
|         assertThat(contactCount).isLessThan(20); |                 .getSingleResult()).isEqualTo(0); | ||||||
|  |         assertThat((Integer) em.createNativeQuery( | ||||||
|  |                         """ | ||||||
|  |                         SELECT count(*) FROM information_schema.tables | ||||||
|  |                                  WHERE table_schema = 'rbactest' AND table_name = 'customer' | ||||||
|  |                         """, | ||||||
|  |                         Integer.class) | ||||||
|  |                 .getSingleResult()).isEqualTo(0); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -624,15 +631,13 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|     void persistOfficeEntities() { |     void persistOfficeEntities() { | ||||||
|  |  | ||||||
|         System.out.println("PERSISTING office data to database '" + jdbcUrl + "' as user '" + postgresAdminUser + "'"); |         System.out.println("PERSISTING office data to database '" + jdbcUrl + "' as user '" + postgresAdminUser + "'"); | ||||||
|         deleteTestDataFromHsOfficeTables(); |         makeSureThatTheImportAdminUserExists(); | ||||||
|         resetHsOfficeSequences(); |  | ||||||
|         deleteFromTestTables(); |  | ||||||
|         deleteFromCommonTables(); |  | ||||||
|  |  | ||||||
|  |         assertEmptyTable("hs_office.contact"); | ||||||
|         jpaAttempt.transacted(() -> { |         jpaAttempt.transacted(() -> { | ||||||
|             context(rbacSuperuser); |             context(rbacSuperuser); | ||||||
|             contacts.forEach(this::persist); |             contacts.forEach(this::persist); | ||||||
|            updateLegacyIds(contacts, "hs_office.contact_legacy_id", "contact_id"); |             updateLegacyIds(contacts, "hs_office.contact_legacy_id", "contact_id"); | ||||||
|         }).assertSuccessful(); |         }).assertSuccessful(); | ||||||
|  |  | ||||||
|         jpaAttempt.transacted(() -> { |         jpaAttempt.transacted(() -> { | ||||||
| @@ -646,6 +651,7 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|         }).assertSuccessful(); |         }).assertSuccessful(); | ||||||
|  |  | ||||||
|         System.out.println("persisting " + partners.size() + " partners"); |         System.out.println("persisting " + partners.size() + " partners"); | ||||||
|  |         assertEmptyTable("hs_office.partner"); | ||||||
|         jpaAttempt.transacted(() -> { |         jpaAttempt.transacted(() -> { | ||||||
|             context(rbacSuperuser); |             context(rbacSuperuser); | ||||||
|             partners.forEach((id, partner) -> { |             partners.forEach((id, partner) -> { | ||||||
| @@ -697,6 +703,12 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|         }).assertSuccessful(); |         }).assertSuccessful(); | ||||||
|  |  | ||||||
|     } |     } | ||||||
|  |     private void assertEmptyTable(final String qualifiedTableName) { | ||||||
|  |         assertThat((Integer) em.createNativeQuery( | ||||||
|  |                         "select count(*) from " + qualifiedTableName, | ||||||
|  |                         Integer.class) | ||||||
|  |                 .getSingleResult()).describedAs("expected empty " + qualifiedTableName).isEqualTo(0); | ||||||
|  |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     @Order(9190) |     @Order(9190) | ||||||
| @@ -883,7 +895,6 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|                     coopAssets.put(rec.getInteger("member_asset_id"), assetTransaction); |                     coopAssets.put(rec.getInteger("member_asset_id"), assetTransaction); | ||||||
|                 }); |                 }); | ||||||
|  |  | ||||||
|  |  | ||||||
|         coopAssets.entrySet().forEach(entry -> { |         coopAssets.entrySet().forEach(entry -> { | ||||||
|             final var legacyId = entry.getKey(); |             final var legacyId = entry.getKey(); | ||||||
|             final var assetTransaction = entry.getValue(); |             final var assetTransaction = entry.getValue(); | ||||||
| @@ -896,7 +907,9 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|         }); |         }); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     private static void connectToRelatedRevertedAssetTx(final int legacyId, final HsOfficeCoopAssetsTransactionEntity assetTransaction) { |     private static void connectToRelatedRevertedAssetTx( | ||||||
|  |             final int legacyId, | ||||||
|  |             final HsOfficeCoopAssetsTransactionEntity assetTransaction) { | ||||||
|         final var negativeValue = assetTransaction.getAssetValue().negate(); |         final var negativeValue = assetTransaction.getAssetValue().negate(); | ||||||
|         final var revertedAssetTx = coopAssets.values().stream().filter(a -> |         final var revertedAssetTx = coopAssets.values().stream().filter(a -> | ||||||
|                         a.getTransactionType() != HsOfficeCoopAssetsTransactionType.REVERSAL && |                         a.getTransactionType() != HsOfficeCoopAssetsTransactionType.REVERSAL && | ||||||
| @@ -909,11 +922,14 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|         //revertedAssetTx.setAssetReversalTx(assetTransaction); |         //revertedAssetTx.setAssetReversalTx(assetTransaction); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     private static void connectToRelatedAdoptionAssetTx(final int legacyId, final HsOfficeCoopAssetsTransactionEntity assetTransaction) { |     private static void connectToRelatedAdoptionAssetTx( | ||||||
|  |             final int legacyId, | ||||||
|  |             final HsOfficeCoopAssetsTransactionEntity assetTransaction) { | ||||||
|         final var negativeValue = assetTransaction.getAssetValue().negate(); |         final var negativeValue = assetTransaction.getAssetValue().negate(); | ||||||
|         final var adoptionAssetTx = coopAssets.values().stream().filter(a -> |         final var adoptionAssetTx = coopAssets.values().stream().filter(a -> | ||||||
|                         a.getTransactionType() == HsOfficeCoopAssetsTransactionType.ADOPTION && |                         a.getTransactionType() == HsOfficeCoopAssetsTransactionType.ADOPTION && | ||||||
|                                 (!a.getValueDate().equals(LocalDate.of( 2014 , 12 , 31)) || a.getComment().contains(Integer.toString(assetTransaction.getMembership().getMemberNumber()/100))) && |                                 (!a.getValueDate().equals(LocalDate.of(2014, 12, 31)) || a.getComment() | ||||||
|  |                                         .contains(Integer.toString(assetTransaction.getMembership().getMemberNumber() / 100))) && | ||||||
|                                 a.getMembership() != assetTransaction.getMembership() && |                                 a.getMembership() != assetTransaction.getMembership() && | ||||||
|                                 a.getValueDate().equals(assetTransaction.getValueDate()) && |                                 a.getValueDate().equals(assetTransaction.getValueDate()) && | ||||||
|                                 a.getAssetValue().equals(negativeValue)) |                                 a.getAssetValue().equals(negativeValue)) | ||||||
| @@ -1131,14 +1147,14 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|  |  | ||||||
|         final var personKey = ( |         final var personKey = ( | ||||||
|                 person.getPersonType() + "|" + |                 person.getPersonType() + "|" + | ||||||
|                 person.getSalutation() + "|" + |                         person.getSalutation() + "|" + | ||||||
|                 person.getTradeName() + "|" + |                         person.getTradeName() + "|" + | ||||||
|                 person.getTitle() + "|" + |                         person.getTitle() + "|" + | ||||||
|                 person.getGivenName() + "|" + |                         person.getGivenName() + "|" + | ||||||
|                 person.getFamilyName() |                         person.getFamilyName() | ||||||
|         ).toLowerCase(); |         ).toLowerCase(); | ||||||
|  |  | ||||||
|         if ( !distinctPersons.containsKey(personKey) ) { |         if (!distinctPersons.containsKey(personKey)) { | ||||||
|             distinctPersons.put(personKey, person); |             distinctPersons.put(personKey, person); | ||||||
|         } |         } | ||||||
|         return distinctPersons.get(personKey); |         return distinctPersons.get(personKey); | ||||||
| @@ -1164,24 +1180,24 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|             if (endsWithWord(tradeName, "OHG", "GbR", "KG", "UG", "PartGmbB", "mbB")) { |             if (endsWithWord(tradeName, "OHG", "GbR", "KG", "UG", "PartGmbB", "mbB")) { | ||||||
|                 return HsOfficePersonType.INCORPORATED_FIRM; // Personengesellschaft. Gesellschafter haften persönlich. |                 return HsOfficePersonType.INCORPORATED_FIRM; // Personengesellschaft. Gesellschafter haften persönlich. | ||||||
|             } else if (containsWord(tradeName, "e.K.", "e.G.", "eG", "gGmbH", "GmbH", "mbH", "AG", "e.V.", "eV", "e.V") |             } else if (containsWord(tradeName, "e.K.", "e.G.", "eG", "gGmbH", "GmbH", "mbH", "AG", "e.V.", "eV", "e.V") | ||||||
|                 || tradeName.toLowerCase().contains("haftungsbeschränkt") |                     || tradeName.toLowerCase().contains("haftungsbeschränkt") | ||||||
|                 || tradeName.toLowerCase().contains("stiftung") |                     || tradeName.toLowerCase().contains("stiftung") | ||||||
|                 || tradeName.toLowerCase().contains("stichting") |                     || tradeName.toLowerCase().contains("stichting") | ||||||
|                 || tradeName.toLowerCase().contains("foundation") |                     || tradeName.toLowerCase().contains("foundation") | ||||||
|                 || tradeName.toLowerCase().contains("schule") |                     || tradeName.toLowerCase().contains("schule") | ||||||
|                 || tradeName.toLowerCase().contains("verein") |                     || tradeName.toLowerCase().contains("verein") | ||||||
|                 || tradeName.toLowerCase().contains("gewerkschaft") |                     || tradeName.toLowerCase().contains("gewerkschaft") | ||||||
|                 || tradeName.toLowerCase().contains("gesellschaft") |                     || tradeName.toLowerCase().contains("gesellschaft") | ||||||
|                 || tradeName.toLowerCase().contains("kirche") |                     || tradeName.toLowerCase().contains("kirche") | ||||||
|                 || tradeName.toLowerCase().contains("fraktion") |                     || tradeName.toLowerCase().contains("fraktion") | ||||||
|                 || tradeName.toLowerCase().contains("landkreis") |                     || tradeName.toLowerCase().contains("landkreis") | ||||||
|                 || tradeName.toLowerCase().contains("behörde") |                     || tradeName.toLowerCase().contains("behörde") | ||||||
|                 || tradeName.toLowerCase().contains("bundesamt") |                     || tradeName.toLowerCase().contains("bundesamt") | ||||||
|                 || tradeName.toLowerCase().contains("bezirksamt") |                     || tradeName.toLowerCase().contains("bezirksamt") | ||||||
|                 ) { |             ) { | ||||||
|                 return HsOfficePersonType.LEGAL_PERSON; // Haftungsbeschränkt |                 return HsOfficePersonType.LEGAL_PERSON; // Haftungsbeschränkt | ||||||
|             } else if (roles.contains("contractual") && !roles.contains("partner") && |             } else if (roles.contains("contractual") && !roles.contains("partner") && | ||||||
|                    !familyName.isBlank() && !givenName.isBlank()) { |                     !familyName.isBlank() && !givenName.isBlank()) { | ||||||
|                 // REPRESENTATIVES are always natural persons |                 // REPRESENTATIVES are always natural persons | ||||||
|                 return HsOfficePersonType.NATURAL_PERSON; |                 return HsOfficePersonType.NATURAL_PERSON; | ||||||
|             } else { |             } else { | ||||||
| @@ -1203,9 +1219,9 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { | |||||||
|         final var lowerCaseValue = value.toLowerCase(); |         final var lowerCaseValue = value.toLowerCase(); | ||||||
|         for (String ending : endings) { |         for (String ending : endings) { | ||||||
|             if (lowerCaseValue.equals(ending.toLowerCase()) || |             if (lowerCaseValue.equals(ending.toLowerCase()) || | ||||||
|                 lowerCaseValue.startsWith(ending.toLowerCase() + " ") || |                     lowerCaseValue.startsWith(ending.toLowerCase() + " ") || | ||||||
|                 lowerCaseValue.contains(" " + ending.toLowerCase() + " ") || |                     lowerCaseValue.contains(" " + ending.toLowerCase() + " ") || | ||||||
|                 lowerCaseValue.endsWith(" " + ending.toLowerCase())) { |                     lowerCaseValue.endsWith(" " + ending.toLowerCase())) { | ||||||
|                 return true; |                 return true; | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|   | |||||||
| @@ -248,63 +248,22 @@ public class CsvDataImport extends ContextBasedTest { | |||||||
|         return json; |         return json; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     protected void deleteTestDataFromHsOfficeTables() { |     protected void makeSureThatTheImportAdminUserExists() { | ||||||
|         jpaAttempt.transacted(() -> { |         jpaAttempt.transacted(() -> { | ||||||
|             context(rbacSuperuser); |             context(null); | ||||||
|             // TODO.perf: could we instead skip creating test-data based on an env var? |             em.createNativeQuery(""" | ||||||
|             em.createNativeQuery("delete from hs_hosting.asset where true").executeUpdate(); |                 do language plpgsql $$ | ||||||
|             em.createNativeQuery("delete from hs_hosting.asset_ex where true").executeUpdate(); |                     declare | ||||||
|             em.createNativeQuery("delete from hs_booking.item where true").executeUpdate(); |                         admins uuid; | ||||||
|             em.createNativeQuery("delete from hs_booking.item_ex where true").executeUpdate(); |                     begin | ||||||
|             em.createNativeQuery("delete from hs_booking.project where true").executeUpdate(); |                         if not exists (select 1 from rbac.subject where name = '${rbacSuperuser}') then | ||||||
|             em.createNativeQuery("delete from hs_booking.project_ex where true").executeUpdate(); |                             admins = rbac.findRoleId(rbac.global_ADMIN()); | ||||||
|             em.createNativeQuery("delete from hs_office.coopassettx where true").executeUpdate(); |                             call rbac.grantRoleToSubjectUnchecked(admins, admins, rbac.create_subject('${rbacSuperuser}')); | ||||||
|             em.createNativeQuery("delete from hs_office.coopassettx_legacy_id where true").executeUpdate(); |                         end if; | ||||||
|             em.createNativeQuery("delete from hs_office.coopsharetx where true").executeUpdate(); |                     end; | ||||||
|             em.createNativeQuery("delete from hs_office.coopsharetx_legacy_id where true").executeUpdate(); |                 $$; | ||||||
|             em.createNativeQuery("delete from hs_office.membership where true").executeUpdate(); |                 """.replace("${rbacSuperuser}", rbacSuperuser)) | ||||||
|             em.createNativeQuery("delete from hs_office.sepamandate where true").executeUpdate(); |                 .executeUpdate(); | ||||||
|             em.createNativeQuery("delete from hs_office.sepamandate_legacy_id where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from hs_office.debitor where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from hs_office.bankaccount where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from hs_office.partner where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from hs_office.partner_details where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from hs_office.relation where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from hs_office.contact where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from hs_office.person where true").executeUpdate(); |  | ||||||
|         }).assertSuccessful(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     protected void resetHsOfficeSequences() { |  | ||||||
|         jpaAttempt.transacted(() -> { |  | ||||||
|             context(rbacSuperuser); |  | ||||||
|             em.createNativeQuery("alter sequence hs_office.contact_legacy_id_seq restart with 1000000000;").executeUpdate(); |  | ||||||
|             em.createNativeQuery("alter sequence hs_office.coopassettx_legacy_id_seq restart with 1000000000;") |  | ||||||
|                     .executeUpdate(); |  | ||||||
|             em.createNativeQuery("alter sequence public.hs_office.coopsharetx_legacy_id_seq restart with 1000000000;") |  | ||||||
|                     .executeUpdate(); |  | ||||||
|             em.createNativeQuery("alter sequence public.hs_office.partner_legacy_id_seq restart with 1000000000;") |  | ||||||
|                     .executeUpdate(); |  | ||||||
|             em.createNativeQuery("alter sequence public.hs_office.sepamandate_legacy_id_seq restart with 1000000000;") |  | ||||||
|                     .executeUpdate(); |  | ||||||
|         }); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     protected void deleteFromTestTables() { |  | ||||||
|         jpaAttempt.transacted(() -> { |  | ||||||
|             context(rbacSuperuser); |  | ||||||
|             em.createNativeQuery("delete from rbactest.domain where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from rbactest.package where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from rbactest.customer where true").executeUpdate(); |  | ||||||
|         }).assertSuccessful(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     protected void deleteFromCommonTables() { |  | ||||||
|         jpaAttempt.transacted(() -> { |  | ||||||
|             context(rbacSuperuser); |  | ||||||
|             em.createNativeQuery("delete from rbac.subject_rv where name not like 'superuser-%'").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from base.tx_journal where true").executeUpdate(); |  | ||||||
|             em.createNativeQuery("delete from base.tx_context where true").executeUpdate(); |  | ||||||
|         }).assertSuccessful(); |         }).assertSuccessful(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -7,6 +7,7 @@ import net.hostsharing.hsadminng.context.Context; | |||||||
| import net.hostsharing.hsadminng.hash.HashGenerator; | import net.hostsharing.hsadminng.hash.HashGenerator; | ||||||
| import net.hostsharing.hsadminng.hash.HashGenerator.Algorithm; | import net.hostsharing.hsadminng.hash.HashGenerator.Algorithm; | ||||||
| import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorEntity; | import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorEntity; | ||||||
|  | import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorRepository; | ||||||
| import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem; | import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem; | ||||||
| import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity; | import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity; | ||||||
| import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType; | import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType; | ||||||
| @@ -27,12 +28,14 @@ import org.junit.jupiter.api.Tag; | |||||||
| import org.junit.jupiter.api.Test; | import org.junit.jupiter.api.Test; | ||||||
| import org.junit.jupiter.api.TestMethodOrder; | import org.junit.jupiter.api.TestMethodOrder; | ||||||
| import org.junit.jupiter.api.extension.ExtendWith; | import org.junit.jupiter.api.extension.ExtendWith; | ||||||
|  | import org.springframework.beans.factory.annotation.Autowired; | ||||||
| import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; | import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; | ||||||
| import org.springframework.context.annotation.Import; | import org.springframework.context.annotation.Import; | ||||||
| import org.springframework.core.io.support.PathMatchingResourcePatternResolver; | import org.springframework.core.io.support.PathMatchingResourcePatternResolver; | ||||||
| import org.springframework.test.annotation.Commit; | import org.springframework.test.annotation.Commit; | ||||||
| import org.springframework.test.annotation.DirtiesContext; | import org.springframework.test.annotation.DirtiesContext; | ||||||
| import org.springframework.test.context.ActiveProfiles; | import org.springframework.test.context.ActiveProfiles; | ||||||
|  | import org.springframework.test.context.jdbc.Sql; | ||||||
|  |  | ||||||
| import java.io.Reader; | import java.io.Reader; | ||||||
| import java.net.IDN; | import java.net.IDN; | ||||||
| @@ -44,6 +47,7 @@ import java.util.Map; | |||||||
| import java.util.Objects; | import java.util.Objects; | ||||||
| import java.util.Set; | import java.util.Set; | ||||||
| import java.util.TreeMap; | import java.util.TreeMap; | ||||||
|  | import java.util.UUID; | ||||||
| import java.util.concurrent.atomic.AtomicInteger; | import java.util.concurrent.atomic.AtomicInteger; | ||||||
| import java.util.concurrent.atomic.AtomicReference; | import java.util.concurrent.atomic.AtomicReference; | ||||||
| import java.util.function.Function; | import java.util.function.Function; | ||||||
| @@ -76,56 +80,23 @@ import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.UNIX | |||||||
| import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange; | import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange; | ||||||
| import static org.assertj.core.api.Assertions.assertThat; | import static org.assertj.core.api.Assertions.assertThat; | ||||||
| import static org.assertj.core.api.Assumptions.assumeThat; | import static org.assertj.core.api.Assumptions.assumeThat; | ||||||
|  | import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; | ||||||
|  |  | ||||||
| /* |  | ||||||
|  * This 'test' includes the complete legacy 'office' data import. |  | ||||||
|  * |  | ||||||
|  * There is no code in 'main' because the import is not needed a normal runtime. |  | ||||||
|  * There is some test data in Java resources to verify the data conversion. |  | ||||||
|  * For a real import a main method will be added later |  | ||||||
|  * which reads CSV files from the file system. |  | ||||||
|  * |  | ||||||
|  * When run on a Hostsharing database, it needs the following settings (hsh99_... just examples). |  | ||||||
|  * |  | ||||||
|  * In a real Hostsharing environment, these are created via (the old) hsadmin: |  | ||||||
|  |  | ||||||
|     CREATE USER hsh99_admin WITH PASSWORD 'password'; |  | ||||||
|     CREATE DATABASE hsh99_hsadminng  ENCODING 'UTF8' TEMPLATE template0; |  | ||||||
|     REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that? |  | ||||||
|     ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin; |  | ||||||
|  |  | ||||||
|     CREATE USER hsh99_restricted WITH PASSWORD 'password'; |  | ||||||
|  |  | ||||||
|     \c hsh99_hsadminng |  | ||||||
|  |  | ||||||
|     GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin; |  | ||||||
|  |  | ||||||
|  * Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right): |  | ||||||
|  |  | ||||||
|     CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; |  | ||||||
|  |  | ||||||
|     -- maybe something like that is needed for the 2nd user |  | ||||||
|     -- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted; |  | ||||||
|  |  | ||||||
|  * Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values. |  | ||||||
|  |  | ||||||
|  * To finally import the office data, run: |  | ||||||
|  * |  | ||||||
|  *   gw-importHostingAssets # comes from .aliases file and uses .environment |  | ||||||
|  */ |  | ||||||
| @Tag("importHostingAssets") | @Tag("importHostingAssets") | ||||||
| @DataJpaTest(properties = { | @DataJpaTest(properties = { | ||||||
|         "spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importHostingAssetsTC}", |         "spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importHostingAssetsTC}", | ||||||
|         "spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}", |         "spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}", | ||||||
|         "spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}", |         "spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}", | ||||||
|         "hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}" |         "hsadminng.superuser=${HSADMINNG_SUPERUSER:import-superuser@hostsharing.net}", | ||||||
|  |         "spring.liquibase.enabled=false" // @Sql should go first, Liquibase will be initialized programmatically | ||||||
| }) | }) | ||||||
| @DirtiesContext | @DirtiesContext | ||||||
| @Import({ Context.class, JpaAttempt.class }) | @Import({ Context.class, JpaAttempt.class, LiquibaseConfig.class }) | ||||||
| @ActiveProfiles("without-test-data") | @ActiveProfiles({ "without-test-data", "liquibase-migration", "hosting-asset-import" }) | ||||||
| @TestMethodOrder(MethodOrderer.OrderAnnotation.class) | @TestMethodOrder(MethodOrderer.OrderAnnotation.class) | ||||||
| @ExtendWith(OrderedDependedTestsExtension.class) | @ExtendWith(OrderedDependedTestsExtension.class) | ||||||
| public class ImportHostingAssets extends BaseOfficeDataImport { | @Sql(value = "/db/released-only-office-schema-with-import-test-data.sql", executionPhase = BEFORE_TEST_CLASS) // release-schema | ||||||
|  | public class ImportHostingAssets extends CsvDataImport { | ||||||
|  |  | ||||||
|     private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce"); |     private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce"); | ||||||
|  |  | ||||||
| @@ -156,15 +127,50 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|  |  | ||||||
|     final ObjectMapper jsonMapper = new ObjectMapper(); |     final ObjectMapper jsonMapper = new ObjectMapper(); | ||||||
|  |  | ||||||
|  |     @Autowired | ||||||
|  |     HsBookingDebitorRepository debitorRepo; | ||||||
|  |  | ||||||
|  |     @Autowired | ||||||
|  |     LiquibaseMigration liquibase; | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(11000) | ||||||
|  |     void liquibaseMigrationForBookingAndHosting() { | ||||||
|  |         liquibase.assertReferenceStatusAfterRestore(286, "hs-booking-SCHEMA"); | ||||||
|  |         makeSureThatTheImportAdminUserExists(); | ||||||
|  |         liquibase.runWithContexts("migration", "without-test-data"); | ||||||
|  |         liquibase.assertThatCurrentMigrationsGotApplied(331, "hs-booking-SCHEMA"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     record PartnerLegacyIdMapping(UUID uuid, Integer bp_id){} | ||||||
|  |     record DebitorRecord(UUID uuid, Integer version, String defaultPrefix){} | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     @Order(11010) |     @Order(11010) | ||||||
|     void createBookingProjects() { |     void createBookingProjects() { | ||||||
|         debitors.forEach((id, debitor) -> { |  | ||||||
|             bookingProjects.put(id, HsBookingProjectRealEntity.builder() |         final var partnerLegacyIdMappings = em.createNativeQuery( | ||||||
|                     .caption(debitor.getDefaultPrefix() + " default project") |                 """ | ||||||
|                     .debitor(em.find(HsBookingDebitorEntity.class, debitor.getUuid())) |                         select debitor.uuid, pid.bp_id | ||||||
|                     .build()); |                             from hs_office.debitor debitor | ||||||
|         }); |                             join hs_office.relation debitorRel on debitor.debitorReluUid=debitorRel.uuid | ||||||
|  |                             join hs_office.relation partnerRel on partnerRel.holderUuid=debitorRel.anchorUuid | ||||||
|  |                             join hs_office.partner partner on partner.partnerReluUid=partnerRel.uuid | ||||||
|  |                             join hs_office.partner_legacy_id pid on partner.uuid=pid.uuid | ||||||
|  |                         """, PartnerLegacyIdMapping.class).getResultList(); | ||||||
|  |         //noinspection unchecked | ||||||
|  |         final var debitorUuidToLegacyBpIdMap = ((List<PartnerLegacyIdMapping>) partnerLegacyIdMappings).stream() | ||||||
|  |                 .collect(toMap(row -> row.uuid, row -> row.bp_id)); | ||||||
|  |         final var debitors = em.createNativeQuery("SELECT debitor.uuid, debitor.version, debitor.defaultPrefix FROM hs_office.debitor debitor", DebitorRecord.class).getResultList(); | ||||||
|  |         //noinspection unchecked | ||||||
|  |         ((List<DebitorRecord>)debitors).forEach(debitor -> { | ||||||
|  |                     bookingProjects.put( | ||||||
|  |                             debitorUuidToLegacyBpIdMap.get(debitor.uuid), HsBookingProjectRealEntity.builder() | ||||||
|  |                                     .version(debitor.version) | ||||||
|  |                                     .caption(debitor.defaultPrefix + " default project") | ||||||
|  |                                     .debitor(em.find(HsBookingDebitorEntity.class, debitor.uuid)) | ||||||
|  |                                     .build()); | ||||||
|  |                 }); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -501,11 +507,11 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|     @SneakyThrows |     @SneakyThrows | ||||||
|     void importZonenfiles() { |     void importZonenfiles() { | ||||||
|         final var resolver = new PathMatchingResourcePatternResolver(); |         final var resolver = new PathMatchingResourcePatternResolver(); | ||||||
|             final var resources = resolver.getResources("/" + MIGRATION_DATA_PATH + "/hosting/zonefiles/*.json"); |         final var resources = resolver.getResources("/" + MIGRATION_DATA_PATH + "/hosting/zonefiles/*.json"); | ||||||
|             for (var resource : resources) { |         for (var resource : resources) { | ||||||
|                 System.out.println("Processing zonenfile: " + resource); |             System.out.println("Processing zonenfile: " + resource); | ||||||
|                 importZonefiles(vmName(resource.getFilename()), resourceAsString(resource)); |             importZonefiles(vmName(resource.getFilename()), resourceAsString(resource)); | ||||||
|             } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -713,10 +719,10 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|     void validateHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) { |     void validateHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) { | ||||||
|         assets.forEach((id, ha) -> { |         assets.forEach((id, ha) -> { | ||||||
|             logError(() -> |             logError(() -> | ||||||
|                 new HostingAssetEntitySaveProcessor(em, ha) |                     new HostingAssetEntitySaveProcessor(em, ha) | ||||||
|                         .preprocessEntity() |                             .preprocessEntity() | ||||||
|                         .validateEntity() |                             .validateEntity() | ||||||
|                         .prepareForSave() |                             .prepareForSave() | ||||||
|             ); |             ); | ||||||
|         }); |         }); | ||||||
|     } |     } | ||||||
| @@ -728,9 +734,12 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|         if (isImportingControlledTestData()) { |         if (isImportingControlledTestData()) { | ||||||
|             expectError("zonedata dom_owner of mellis.de is old00 but expected to be mim00"); |             expectError("zonedata dom_owner of mellis.de is old00 but expected to be mim00"); | ||||||
|             expectError("\nexpected: \"vm1068\"\n but was: \"vm1093\""); |             expectError("\nexpected: \"vm1068\"\n but was: \"vm1093\""); | ||||||
|             expectError("['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]"); |             expectError( | ||||||
|             expectError("['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]"); |                     "['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]"); | ||||||
|             expectError("['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]"); |             expectError( | ||||||
|  |                     "['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]"); | ||||||
|  |             expectError( | ||||||
|  |                     "['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]"); | ||||||
|         } |         } | ||||||
|         this.assertNoErrors(); |         this.assertNoErrors(); | ||||||
|     } |     } | ||||||
| @@ -738,7 +747,7 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|     // -------------------------------------------------------------------------------------------- |     // -------------------------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     @Order(19000) |     @Order(19100) | ||||||
|     @Commit |     @Commit | ||||||
|     void persistBookingProjects() { |     void persistBookingProjects() { | ||||||
|  |  | ||||||
| @@ -751,7 +760,7 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     @Order(19010) |     @Order(19110) | ||||||
|     @Commit |     @Commit | ||||||
|     void persistBookingItems() { |     void persistBookingItems() { | ||||||
|  |  | ||||||
| @@ -1037,15 +1046,15 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|     void verifyMariaDbLegacyIds() { |     void verifyMariaDbLegacyIds() { | ||||||
|         assumeThatWeAreImportingControlledTestData(); |         assumeThatWeAreImportingControlledTestData(); | ||||||
|         assertThat(fetchHosingAssetLegacyIds(MARIADB_DATABASE)).isEqualTo(""" |         assertThat(fetchHosingAssetLegacyIds(MARIADB_DATABASE)).isEqualTo(""" | ||||||
|                 1786 |                  1786 | ||||||
|                1805 |                 1805 | ||||||
|                4908 |                 4908 | ||||||
|                4941 |                 4941 | ||||||
|                4942 |                 4942 | ||||||
|                7520 |                 7520 | ||||||
|                7521 |                 7521 | ||||||
|                7604 |                 7604 | ||||||
|                """.trim()); |                 """.trim()); | ||||||
|         assertThat(missingHostingAsstLegacyIds(MARIADB_DATABASE)).isEmpty(); |         assertThat(missingHostingAsstLegacyIds(MARIADB_DATABASE)).isEmpty(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -1070,14 +1079,15 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|         assumeThatWeAreImportingControlledTestData(); |         assumeThatWeAreImportingControlledTestData(); | ||||||
|  |  | ||||||
|         final var haCount = jpaAttempt.transacted(() -> { |         final var haCount = jpaAttempt.transacted(() -> { | ||||||
|                     context(rbacSuperuser, "hs_booking.project#D-1000300-mimdefaultproject:AGENT"); |             context(rbacSuperuser, "hs_booking.project#D-1000300-mimdefaultproject:AGENT"); | ||||||
|                     return (Integer) em.createNativeQuery("select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'", Integer.class) |             return (Integer) em.createNativeQuery( | ||||||
|                             .getSingleResult(); |                             "select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'", | ||||||
|                 }).assertSuccessful().returnedValue(); |                             Integer.class) | ||||||
|  |                     .getSingleResult(); | ||||||
|  |         }).assertSuccessful().returnedValue(); | ||||||
|         assertThat(haCount).isEqualTo(68); |         assertThat(haCount).isEqualTo(68); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|     // ============================================================================================ |     // ============================================================================================ | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -1262,14 +1272,14 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|                         managedWebspace.setParentAsset(parentAsset); |                         managedWebspace.setParentAsset(parentAsset); | ||||||
|  |  | ||||||
|                         if (parentAsset.getRelatedProject() != managedWebspace.getRelatedProject() |                         if (parentAsset.getRelatedProject() != managedWebspace.getRelatedProject() | ||||||
|                                 && managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00 ) { |                                 && managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00) { | ||||||
|                             assertThat(managedWebspace.getIdentifier()).startsWith("xyz"); |                             assertThat(managedWebspace.getIdentifier()).startsWith("xyz"); | ||||||
|                             final var hshDebitor = managedWebspace.getBookingItem().getProject().getDebitor(); |                             final var hshDebitor = managedWebspace.getBookingItem().getProject().getDebitor(); | ||||||
|                             final var newProject = HsBookingProjectRealEntity.builder() |                             final var newProject = HsBookingProjectRealEntity.builder() | ||||||
|                                     .debitor(hshDebitor) |                                     .debitor(hshDebitor) | ||||||
|                                     .caption(parentAsset.getIdentifier() + " Monitor") |                                     .caption(parentAsset.getIdentifier() + " Monitor") | ||||||
|                                     .build(); |                                     .build(); | ||||||
|                             bookingProjects.put(Collections.max(bookingProjects.keySet())+1, newProject); |                             bookingProjects.put(Collections.max(bookingProjects.keySet()) + 1, newProject); | ||||||
|                             managedWebspace.getBookingItem().setProject(newProject); |                             managedWebspace.getBookingItem().setProject(newProject); | ||||||
|                         } else { |                         } else { | ||||||
|                             managedWebspace.getBookingItem().setParentItem(parentAsset.getBookingItem()); |                             managedWebspace.getBookingItem().setParentItem(parentAsset.getBookingItem()); | ||||||
| @@ -1624,20 +1634,25 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|                                     entry("includes", options.contains("includes")), |                                     entry("includes", options.contains("includes")), | ||||||
|                                     entry("letsencrypt", options.contains("letsencrypt")), |                                     entry("letsencrypt", options.contains("letsencrypt")), | ||||||
|                                     entry("multiviews", options.contains("multiviews")), |                                     entry("multiviews", options.contains("multiviews")), | ||||||
|                                     entry("subdomains", withDefault(rec.getString("valid_subdomain_names"), "*") |                                     entry( | ||||||
|                                             .split(",")), |                                             "subdomains", withDefault(rec.getString("valid_subdomain_names"), "*") | ||||||
|                                     entry("fcgi-php-bin", withDefault( |                                                     .split(",")), | ||||||
|                                             rec.getString("fcgi_php_bin"), |                                     entry( | ||||||
|                                             httpDomainSetupValidator.getProperty("fcgi-php-bin").defaultValue())), |                                             "fcgi-php-bin", withDefault( | ||||||
|                                     entry("passenger-nodejs", withDefault( |                                                     rec.getString("fcgi_php_bin"), | ||||||
|                                             rec.getString("passenger_nodejs"), |                                                     httpDomainSetupValidator.getProperty("fcgi-php-bin").defaultValue())), | ||||||
|                                             httpDomainSetupValidator.getProperty("passenger-nodejs").defaultValue())), |                                     entry( | ||||||
|                                     entry("passenger-python", withDefault( |                                             "passenger-nodejs", withDefault( | ||||||
|                                             rec.getString("passenger_python"), |                                                     rec.getString("passenger_nodejs"), | ||||||
|                                             httpDomainSetupValidator.getProperty("passenger-python").defaultValue())), |                                                     httpDomainSetupValidator.getProperty("passenger-nodejs").defaultValue())), | ||||||
|                                     entry("passenger-ruby", withDefault( |                                     entry( | ||||||
|                                             rec.getString("passenger_ruby"), |                                             "passenger-python", withDefault( | ||||||
|                                             httpDomainSetupValidator.getProperty("passenger-ruby").defaultValue())) |                                                     rec.getString("passenger_python"), | ||||||
|  |                                                     httpDomainSetupValidator.getProperty("passenger-python").defaultValue())), | ||||||
|  |                                     entry( | ||||||
|  |                                             "passenger-ruby", withDefault( | ||||||
|  |                                                     rec.getString("passenger_ruby"), | ||||||
|  |                                                     httpDomainSetupValidator.getProperty("passenger-ruby").defaultValue())) | ||||||
|                             )) |                             )) | ||||||
|                             .build(); |                             .build(); | ||||||
|                     domainHttpSetupAssets.put(domain_id, domainHttpSetupAsset); |                     domainHttpSetupAssets.put(domain_id, domainHttpSetupAsset); | ||||||
| @@ -1744,9 +1759,10 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|                     logError(() -> assertThat(vmName).isEqualTo(domUser.getParentAsset().getParentAsset().getIdentifier())); |                     logError(() -> assertThat(vmName).isEqualTo(domUser.getParentAsset().getParentAsset().getIdentifier())); | ||||||
|  |  | ||||||
|                     //noinspection unchecked |                     //noinspection unchecked | ||||||
|                     zoneData.put("user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream() |                     zoneData.put( | ||||||
|                             .map(userRR -> userRR.stream().map(Object::toString).collect(joining(" "))) |                             "user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream() | ||||||
|                             .toArray(String[]::new) |                                     .map(userRR -> userRR.stream().map(Object::toString).collect(joining(" "))) | ||||||
|  |                                     .toArray(String[]::new) | ||||||
|                     ); |                     ); | ||||||
|                     domainDnsSetupAsset.getConfig().putAll(zoneData); |                     domainDnsSetupAsset.getConfig().putAll(zoneData); | ||||||
|                 } else { |                 } else { | ||||||
| @@ -1897,13 +1913,13 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|     private String fetchHosingAssetLegacyIds(final HsHostingAssetType type) { |     private String fetchHosingAssetLegacyIds(final HsHostingAssetType type) { | ||||||
|         //noinspection unchecked |         //noinspection unchecked | ||||||
|         return ((List<List<?>>) em.createNativeQuery( |         return ((List<List<?>>) em.createNativeQuery( | ||||||
|                     """ |                         """ | ||||||
|                      SELECT li.* FROM hs_hosting.asset_legacy_id li |                                  select li.* from hs_hosting.asset_legacy_id li | ||||||
|                      JOIN hs_hosting.asset ha ON ha.uuid=li.uuid |                                  join hs_hosting.asset ha on ha.uuid=li.uuid | ||||||
|                      WHERE CAST(ha.type AS text)=:type |                                  where cast(ha.type as text)=:type | ||||||
|                      ORDER BY legacy_id |                                  order by legacy_id | ||||||
|                     """, |                                 """, | ||||||
|                     List.class) |                         List.class) | ||||||
|                 .setParameter("type", type.name()) |                 .setParameter("type", type.name()) | ||||||
|                 .getResultList() |                 .getResultList() | ||||||
|         ).stream().map(row -> row.get(1).toString()).collect(joining("\n")); |         ).stream().map(row -> row.get(1).toString()).collect(joining("\n")); | ||||||
| @@ -1912,13 +1928,13 @@ public class ImportHostingAssets extends BaseOfficeDataImport { | |||||||
|     private String missingHostingAsstLegacyIds(final HsHostingAssetType type) { |     private String missingHostingAsstLegacyIds(final HsHostingAssetType type) { | ||||||
|         //noinspection unchecked |         //noinspection unchecked | ||||||
|         return ((List<List<?>>) em.createNativeQuery( |         return ((List<List<?>>) em.createNativeQuery( | ||||||
|                     """ |                         """ | ||||||
|                     SELECT ha.uuid, ha.type, ha.identifier FROM hs_hosting.asset ha |                                 select ha.uuid, ha.type, ha.identifier from hs_hosting.asset ha | ||||||
|                              JOIN hs_hosting.asset_legacy_id li ON li.uuid=ha.uuid |                                          join hs_hosting.asset_legacy_id li on li.uuid=ha.uuid | ||||||
|                              WHERE li.legacy_id is null AND CAST(ha.type AS text)=:type |                                          where li.legacy_id is null and cast(ha.type as text)=:type | ||||||
|                              ORDER BY li.legacy_id |                                          order by li.legacy_id | ||||||
|                     """, |                                 """, | ||||||
|                     List.class) |                         List.class) | ||||||
|                 .setParameter("type", type.name()) |                 .setParameter("type", type.name()) | ||||||
|                 .getResultList()).stream() |                 .getResultList()).stream() | ||||||
|                 .map(row -> row.stream().map(Object::toString).collect(joining(", "))) |                 .map(row -> row.stream().map(Object::toString).collect(joining(", "))) | ||||||
|   | |||||||
| @@ -4,11 +4,14 @@ import net.hostsharing.hsadminng.context.Context; | |||||||
| import net.hostsharing.hsadminng.rbac.test.JpaAttempt; | import net.hostsharing.hsadminng.rbac.test.JpaAttempt; | ||||||
| import org.junit.jupiter.api.*; | import org.junit.jupiter.api.*; | ||||||
| import org.junit.jupiter.api.extension.ExtendWith; | import org.junit.jupiter.api.extension.ExtendWith; | ||||||
|  | import org.springframework.beans.factory.annotation.Value; | ||||||
| import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; | import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; | ||||||
| import org.springframework.context.annotation.Import; | import org.springframework.context.annotation.Import; | ||||||
| import org.springframework.test.annotation.DirtiesContext; | import org.springframework.test.annotation.DirtiesContext; | ||||||
| import org.springframework.test.context.ActiveProfiles; | import org.springframework.test.context.ActiveProfiles; | ||||||
|  |  | ||||||
|  | import java.io.File; | ||||||
|  |  | ||||||
| /* | /* | ||||||
|  * This 'test' includes the complete legacy 'office' data import. |  * This 'test' includes the complete legacy 'office' data import. | ||||||
|  * |  * | ||||||
| @@ -50,7 +53,8 @@ import org.springframework.test.context.ActiveProfiles; | |||||||
|         "spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importOfficeDataTC}", |         "spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importOfficeDataTC}", | ||||||
|         "spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}", |         "spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}", | ||||||
|         "spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}", |         "spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}", | ||||||
|         "hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}" |         "hsadminng.superuser=${HSADMINNG_SUPERUSER:import-superuser@hostsharing.net}", | ||||||
|  |         "spring.liquibase.contexts=only-office,without-test-data" | ||||||
| }) | }) | ||||||
| @ActiveProfiles("without-test-data") | @ActiveProfiles("without-test-data") | ||||||
| @DirtiesContext | @DirtiesContext | ||||||
| @@ -58,4 +62,13 @@ import org.springframework.test.context.ActiveProfiles; | |||||||
| @TestMethodOrder(MethodOrderer.OrderAnnotation.class) | @TestMethodOrder(MethodOrderer.OrderAnnotation.class) | ||||||
| @ExtendWith(OrderedDependedTestsExtension.class) | @ExtendWith(OrderedDependedTestsExtension.class) | ||||||
| public class ImportOfficeData extends BaseOfficeDataImport { | public class ImportOfficeData extends BaseOfficeDataImport { | ||||||
|  |  | ||||||
|  |     @Value("${spring.datasource.url}") | ||||||
|  |     private String jdbcUrl; | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(9999) | ||||||
|  |     public void dumpOfficeData() { | ||||||
|  |         PostgresTestcontainer.dump(jdbcUrl, new File("build/db/released-only-office-schema-with-import-test-data.sql")); | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,33 +1,17 @@ | |||||||
| package net.hostsharing.hsadminng.hs.migration; | package net.hostsharing.hsadminng.hs.migration; | ||||||
|  |  | ||||||
| import liquibase.Liquibase; |  | ||||||
| import lombok.SneakyThrows; |  | ||||||
| import org.junit.jupiter.api.Tag; | import org.junit.jupiter.api.Tag; | ||||||
| import org.junit.jupiter.api.Test; | import org.junit.jupiter.api.Test; | ||||||
| import org.springframework.beans.factory.annotation.Autowired; | import org.springframework.beans.factory.annotation.Autowired; | ||||||
|  | import org.springframework.beans.factory.annotation.Value; | ||||||
| import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; | import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; | ||||||
| import org.springframework.context.annotation.Import; | import org.springframework.context.annotation.Import; | ||||||
| import org.springframework.test.annotation.DirtiesContext; | import org.springframework.test.annotation.DirtiesContext; | ||||||
| import org.springframework.test.context.ActiveProfiles; | import org.springframework.test.context.ActiveProfiles; | ||||||
| import org.springframework.test.context.jdbc.Sql; | import org.springframework.test.context.jdbc.Sql; | ||||||
| import org.testcontainers.containers.JdbcDatabaseContainer; |  | ||||||
| import org.testcontainers.jdbc.ContainerDatabaseDriver; |  | ||||||
|  |  | ||||||
| import jakarta.persistence.EntityManager; |  | ||||||
| import jakarta.persistence.PersistenceContext; |  | ||||||
| import javax.sql.DataSource; |  | ||||||
| import java.io.BufferedReader; |  | ||||||
| import java.io.File; | import java.io.File; | ||||||
| import java.io.InputStreamReader; |  | ||||||
| import java.util.List; |  | ||||||
| import java.util.Objects; |  | ||||||
| import java.util.stream.Collectors; |  | ||||||
|  |  | ||||||
| import static java.nio.charset.StandardCharsets.UTF_8; |  | ||||||
| import static org.apache.commons.io.FileUtils.readFileToString; |  | ||||||
| import static org.apache.commons.io.FileUtils.write; |  | ||||||
| import static org.apache.commons.io.FileUtils.writeStringToFile; |  | ||||||
| import static org.assertj.core.api.Assertions.assertThat; |  | ||||||
| import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; | import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; | ||||||
|  |  | ||||||
| // BLOG: Liquibase-migration-test (not before the reference-SQL-dump-generation is simplified) | // BLOG: Liquibase-migration-test (not before the reference-SQL-dump-generation is simplified) | ||||||
| @@ -40,9 +24,9 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE | |||||||
|  * <p>The test works as follows:</p> |  * <p>The test works as follows:</p> | ||||||
|  * |  * | ||||||
|  * <ol> |  * <ol> | ||||||
|  *     <li>the database is initialized by `db/prod-only-office-schema-with-test-data.sql` from the test-resources</li> |  *     <li>the database is initialized by `db/released-only-office-schema-with-test-data.sql` from the test-resources</li> | ||||||
|  *     <li>the current Liquibase-migrations (only-office but with-test-data) are performed</li> |  *     <li>the current Liquibase-migrations (only-office but with-test-data) are performed</li> | ||||||
|  *     <li>a new dump is written to `db/prod-only-office-schema-with-test-data.sql` in the build-directory</li> |  *     <li>a new dump is written to `db/released-only-office-schema-with-test-data.sql` in the build-directory</li> | ||||||
|  *     <li>an extra Liquibase-changeset (liquibase-migration-test) is applied</li> |  *     <li>an extra Liquibase-changeset (liquibase-migration-test) is applied</li> | ||||||
|  *     <li>it's asserted that the extra changeset got applied</li> |  *     <li>it's asserted that the extra changeset got applied</li> | ||||||
|  * </ol> |  * </ol> | ||||||
| @@ -58,123 +42,31 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE | |||||||
| @DirtiesContext | @DirtiesContext | ||||||
| @ActiveProfiles("liquibase-migration-test") | @ActiveProfiles("liquibase-migration-test") | ||||||
| @Import(LiquibaseConfig.class) | @Import(LiquibaseConfig.class) | ||||||
| @Sql(value = "/db/prod-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS) | @Sql(value = "/db/released-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS) // release-schema | ||||||
| public class LiquibaseCompatibilityIntegrationTest { | public class LiquibaseCompatibilityIntegrationTest { | ||||||
|  |  | ||||||
|     private static final String EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION = "hs-global-liquibase-migration-test"; |     private static final String EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION = "hs-global-liquibase-migration-test"; | ||||||
|  |     private static final int EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP = 287; | ||||||
|  |  | ||||||
|  |     @Value("${spring.datasource.url}") | ||||||
|  |     private String jdbcUrl; | ||||||
|  |  | ||||||
|     @Autowired |     @Autowired | ||||||
|     private DataSource dataSource; |     private LiquibaseMigration liquibase; | ||||||
|  |  | ||||||
|     @Autowired |  | ||||||
|     private Liquibase liquibase; |  | ||||||
|  |  | ||||||
|     @PersistenceContext |  | ||||||
|     private EntityManager em; |  | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     void migrationWorksBasedOnAPreviouslyPopulatedSchema() { |     void migrationWorksBasedOnAPreviouslyPopulatedSchema() { | ||||||
|         // check the initial status from the @Sql-annotation |         // check the initial status from the @Sql-annotation | ||||||
|         final var initialChangeSetCount = assertProdReferenceStatusAfterRestore(); |         final var initialChangeSetCount = liquibase.assertReferenceStatusAfterRestore( | ||||||
|  |                 EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION); | ||||||
|  |  | ||||||
|         // run the current migrations and dump the result to the build-directory |         // run the current migrations and dump the result to the build-directory | ||||||
|         runLiquibaseMigrationsWithContexts("only-office", "with-test-data"); |         liquibase.runWithContexts("only-office", "with-test-data"); | ||||||
|         dumpTo(new File("build/db/prod-only-office-schema-with-test-data.sql")); |         PostgresTestcontainer.dump(jdbcUrl, new File("build/db/released-only-office-schema-with-test-data.sql")); | ||||||
|  |  | ||||||
|         // then add another migration and assert if it was applied |         // then add another migration and assert if it was applied | ||||||
|         runLiquibaseMigrationsWithContexts("liquibase-migration-test"); |         liquibase.runWithContexts("liquibase-migration-test"); | ||||||
|         assertThatCurrentMigrationsGotApplied(initialChangeSetCount); |         liquibase.assertThatCurrentMigrationsGotApplied( | ||||||
|     } |                 initialChangeSetCount, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION); | ||||||
|  |  | ||||||
|     private int assertProdReferenceStatusAfterRestore() { |  | ||||||
|         final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'"); |  | ||||||
|         assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock"); |  | ||||||
|  |  | ||||||
|         final var liquibaseScripts1 = singleColumnSqlQuery("SELECT * FROM public.databasechangelog"); |  | ||||||
|         assertThat(liquibaseScripts1).hasSizeGreaterThan(285); |  | ||||||
|         assertThat(liquibaseScripts1).doesNotContain(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION); |  | ||||||
|         final var initialChangeSetCount = liquibaseScripts1.size(); |  | ||||||
|         return initialChangeSetCount; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     private void assertThatCurrentMigrationsGotApplied(final int initialChangeSetCount) { |  | ||||||
|         final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog"); |  | ||||||
|         assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount); |  | ||||||
|         assertThat(liquibaseScripts).contains(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SneakyThrows |  | ||||||
|     private void dumpTo(final File targetFileName) { |  | ||||||
|         makeDir(targetFileName.getParentFile()); |  | ||||||
|  |  | ||||||
|         final var jdbcDatabaseContainer = getJdbcDatabaseContainer(); |  | ||||||
|  |  | ||||||
|         final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName()); |  | ||||||
|         final var pb = new ProcessBuilder( |  | ||||||
|                 "pg_dump", "--column-inserts", "--disable-dollar-quoting", |  | ||||||
|                 "--host=" + jdbcDatabaseContainer.getHost(), |  | ||||||
|                 "--port=" + jdbcDatabaseContainer.getFirstMappedPort(), |  | ||||||
|                 "--username=" + jdbcDatabaseContainer.getUsername() , |  | ||||||
|                 "--dbname=" + jdbcDatabaseContainer.getDatabaseName(), |  | ||||||
|                 "--file=" + sqlDumpFile.getCanonicalPath() |  | ||||||
|         ); |  | ||||||
|         pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword()); |  | ||||||
|  |  | ||||||
|         final var process = pb.start(); |  | ||||||
|         int exitCode = process.waitFor(); |  | ||||||
|         final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream())) |  | ||||||
|                 .lines().collect(Collectors.joining("\n")); |  | ||||||
|         assertThat(exitCode).describedAs(stderr).isEqualTo(0); |  | ||||||
|  |  | ||||||
|         final var header = """ |  | ||||||
|               -- ================================================================================= |  | ||||||
|               -- Generated reference-SQL-dump (hopefully of latest prod-release). |  | ||||||
|               -- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest |  | ||||||
|               -- --------------------------------------------------------------------------------- |  | ||||||
|                |  | ||||||
|               -- |  | ||||||
|               -- Explicit pre-initialization because we cannot use `pg_dump --create ...` |  | ||||||
|               -- because the database is already created by Testcontainers. |  | ||||||
|               -- |  | ||||||
|                |  | ||||||
|               CREATE ROLE postgres; |  | ||||||
|               CREATE ROLE admin; |  | ||||||
|               CREATE ROLE restricted; |  | ||||||
|  |  | ||||||
|               """; |  | ||||||
|         writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite |  | ||||||
|  |  | ||||||
|         write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true); |  | ||||||
|  |  | ||||||
|         assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted"); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     private void makeDir(final File dir) { |  | ||||||
|         assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue(); |  | ||||||
|         assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SneakyThrows |  | ||||||
|     private void runLiquibaseMigrationsWithContexts(final String... contexts) { |  | ||||||
|         liquibase.update( |  | ||||||
|                 new liquibase.Contexts(contexts), |  | ||||||
|                 new liquibase.LabelExpression()); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     private List<String> singleColumnSqlQuery(final String sql) { |  | ||||||
|         //noinspection unchecked |  | ||||||
|         final var rows = (List<Object>) em.createNativeQuery(sql).getResultList(); |  | ||||||
|         return rows.stream().map(Objects::toString).toList(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SneakyThrows |  | ||||||
|     private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer() { |  | ||||||
|         final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class); |  | ||||||
|         getContainerMethod.setAccessible(true); |  | ||||||
|  |  | ||||||
|         @SuppressWarnings("rawtypes") |  | ||||||
|         final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null, |  | ||||||
|                 "jdbc:tc:postgresql:15.5-bookworm:///liquibaseMigrationTestTC"); |  | ||||||
|         return container; |  | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,28 +1,27 @@ | |||||||
| package net.hostsharing.hsadminng.hs.migration; | package net.hostsharing.hsadminng.hs.migration; | ||||||
|  |  | ||||||
| import liquibase.Liquibase; |  | ||||||
| import liquibase.database.DatabaseFactory; | import liquibase.database.DatabaseFactory; | ||||||
| import liquibase.database.jvm.JdbcConnection; | import liquibase.database.jvm.JdbcConnection; | ||||||
| import liquibase.resource.ClassLoaderResourceAccessor; |  | ||||||
| import org.springframework.context.annotation.Bean; | import org.springframework.context.annotation.Bean; | ||||||
| import org.springframework.context.annotation.Configuration; | import org.springframework.context.annotation.Configuration; | ||||||
| import org.springframework.context.annotation.Profile; | import org.springframework.context.annotation.Profile; | ||||||
|  |  | ||||||
|  | import jakarta.persistence.EntityManager; | ||||||
|  | import jakarta.persistence.PersistenceContext; | ||||||
| import javax.sql.DataSource; | import javax.sql.DataSource; | ||||||
|  |  | ||||||
| @Configuration | @Configuration | ||||||
| @Profile("liquibase-migration-test") | @Profile({"liquibase-migration", "liquibase-migration-test"}) | ||||||
| public class LiquibaseConfig { | public class LiquibaseConfig { | ||||||
|  |  | ||||||
|  |     @PersistenceContext | ||||||
|  |     private EntityManager em; | ||||||
|  |  | ||||||
|     @Bean |     @Bean | ||||||
|     public Liquibase liquibase(DataSource dataSource) throws Exception { |     public LiquibaseMigration liquibase(DataSource dataSource) throws Exception { | ||||||
|         final var connection = dataSource.getConnection(); |         final var connection = dataSource.getConnection(); | ||||||
|         final var database = DatabaseFactory.getInstance() |         final var database = DatabaseFactory.getInstance() | ||||||
|                 .findCorrectDatabaseImplementation(new JdbcConnection(connection)); |                 .findCorrectDatabaseImplementation(new JdbcConnection(connection)); | ||||||
|         return new Liquibase( |         return new LiquibaseMigration(em, "db/changelog/db.changelog-master.yaml", database); | ||||||
|                 "db/changelog/db.changelog-master.yaml", // Path to your Liquibase changelog |  | ||||||
|                 new ClassLoaderResourceAccessor(), |  | ||||||
|                 database |  | ||||||
|         ); |  | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,55 @@ | |||||||
|  | package net.hostsharing.hsadminng.hs.migration; | ||||||
|  |  | ||||||
|  | import liquibase.Liquibase; | ||||||
|  | import liquibase.database.Database; | ||||||
|  | import liquibase.resource.ClassLoaderResourceAccessor; | ||||||
|  | import lombok.SneakyThrows; | ||||||
|  |  | ||||||
|  | import jakarta.persistence.EntityManager; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.Objects; | ||||||
|  |  | ||||||
|  | import static org.assertj.core.api.Assertions.assertThat; | ||||||
|  |  | ||||||
|  | public class LiquibaseMigration extends Liquibase { | ||||||
|  |  | ||||||
|  |     private final EntityManager em; | ||||||
|  |  | ||||||
|  |     public LiquibaseMigration(final EntityManager em, final String changeLogFile, final Database db) { | ||||||
|  |         super(changeLogFile, new ClassLoaderResourceAccessor(), db); | ||||||
|  |         this.em = em; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SneakyThrows | ||||||
|  |     public void runWithContexts(final String... contexts) { | ||||||
|  |         update( | ||||||
|  |                 new liquibase.Contexts(contexts), | ||||||
|  |                 new liquibase.LabelExpression()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public int assertReferenceStatusAfterRestore( | ||||||
|  |             final int minExpectedLiquibaseChangelogs, | ||||||
|  |             final String expectedChangesetOnlyAfterNewMigration) { | ||||||
|  |         final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'"); | ||||||
|  |         assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock"); | ||||||
|  |  | ||||||
|  |         final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog"); | ||||||
|  |         assertThat(liquibaseScripts).hasSize(minExpectedLiquibaseChangelogs); | ||||||
|  |         assertThat(liquibaseScripts).doesNotContain(expectedChangesetOnlyAfterNewMigration); | ||||||
|  |         return liquibaseScripts.size(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void assertThatCurrentMigrationsGotApplied( | ||||||
|  |             final int initialChangeSetCount, | ||||||
|  |             final String expectedChangesetOnlyAfterNewMigration) { | ||||||
|  |         final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog"); | ||||||
|  |         assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount); | ||||||
|  |         assertThat(liquibaseScripts).contains(expectedChangesetOnlyAfterNewMigration); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private List<String> singleColumnSqlQuery(final String sql) { | ||||||
|  |         //noinspection unchecked | ||||||
|  |         final var rows = (List<Object>) em.createNativeQuery(sql).getResultList(); | ||||||
|  |         return rows.stream().map(Objects::toString).toList(); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,81 @@ | |||||||
|  | package net.hostsharing.hsadminng.hs.migration; | ||||||
|  |  | ||||||
|  | import lombok.SneakyThrows; | ||||||
|  | import org.testcontainers.containers.JdbcDatabaseContainer; | ||||||
|  | import org.testcontainers.jdbc.ContainerDatabaseDriver; | ||||||
|  |  | ||||||
|  | import java.io.BufferedReader; | ||||||
|  | import java.io.File; | ||||||
|  | import java.io.InputStreamReader; | ||||||
|  | import java.util.stream.Collectors; | ||||||
|  |  | ||||||
|  | import static java.nio.charset.StandardCharsets.UTF_8; | ||||||
|  | import static org.apache.commons.io.FileUtils.readFileToString; | ||||||
|  | import static org.apache.commons.io.FileUtils.write; | ||||||
|  | import static org.apache.commons.io.FileUtils.writeStringToFile; | ||||||
|  | import static org.assertj.core.api.Assertions.assertThat; | ||||||
|  |  | ||||||
|  | public class PostgresTestcontainer { | ||||||
|  |  | ||||||
|  |     @SneakyThrows | ||||||
|  |     public static void dump(final String jdbcUrl, final File targetFileName) { | ||||||
|  |         makeDir(targetFileName.getParentFile()); | ||||||
|  |  | ||||||
|  |         final var jdbcDatabaseContainer = getJdbcDatabaseContainer(jdbcUrl); | ||||||
|  |  | ||||||
|  |         final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName()); | ||||||
|  |         final var pb = new ProcessBuilder( | ||||||
|  |                 "pg_dump", "--column-inserts", "--disable-dollar-quoting", | ||||||
|  |                 "--host=" + jdbcDatabaseContainer.getHost(), | ||||||
|  |                 "--port=" + jdbcDatabaseContainer.getFirstMappedPort(), | ||||||
|  |                 "--username=" + jdbcDatabaseContainer.getUsername() , | ||||||
|  |                 "--dbname=" + jdbcDatabaseContainer.getDatabaseName(), | ||||||
|  |                 "--file=" + sqlDumpFile.getCanonicalPath() | ||||||
|  |         ); | ||||||
|  |         pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword()); | ||||||
|  |  | ||||||
|  |         final var process = pb.start(); | ||||||
|  |         int exitCode = process.waitFor(); | ||||||
|  |         final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream())) | ||||||
|  |                 .lines().collect(Collectors.joining("\n")); | ||||||
|  |         assertThat(exitCode).describedAs(stderr).isEqualTo(0); | ||||||
|  |  | ||||||
|  |         final var header = """ | ||||||
|  |               -- ================================================================================= | ||||||
|  |               -- Generated reference-SQL-dump (hopefully of latest prod-release). | ||||||
|  |               -- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest | ||||||
|  |               -- --------------------------------------------------------------------------------- | ||||||
|  |                | ||||||
|  |               -- | ||||||
|  |               -- Explicit pre-initialization because we cannot use `pg_dump --create ...` | ||||||
|  |               -- because the database is already created by Testcontainers. | ||||||
|  |               -- | ||||||
|  |                | ||||||
|  |               CREATE ROLE postgres; | ||||||
|  |               CREATE ROLE admin; | ||||||
|  |               CREATE ROLE restricted; | ||||||
|  |  | ||||||
|  |               """; | ||||||
|  |         writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite | ||||||
|  |  | ||||||
|  |         write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true); | ||||||
|  |  | ||||||
|  |         assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private static void makeDir(final File dir) { | ||||||
|  |         assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue(); | ||||||
|  |         assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SneakyThrows | ||||||
|  |     private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer(final String jdbcUrl) { | ||||||
|  |         // TODO.test: check if, in the future, there is a better way to access auto-created Testcontainers | ||||||
|  |         final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class); | ||||||
|  |         getContainerMethod.setAccessible(true); | ||||||
|  |  | ||||||
|  |         @SuppressWarnings("rawtypes") | ||||||
|  |         final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null, jdbcUrl); | ||||||
|  |         return container; | ||||||
|  |     } | ||||||
|  | } | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
		Reference in New Issue
	
	Block a user