Small amount of cleanup for JPA (Persistence) container

Signed-off-by: Arjan Tijms <arjan.tijms@gmail.com>
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/EmbeddedProviderContainerContractInfo.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/EmbeddedProviderContainerContractInfo.java
index deb3272..d4417f6 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/EmbeddedProviderContainerContractInfo.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/EmbeddedProviderContainerContractInfo.java
@@ -16,16 +16,13 @@
 
 package org.glassfish.persistence.jpa;
 
-import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
-import org.glassfish.api.ActionReport;
-import org.glassfish.api.admin.CommandRunner;
-import org.glassfish.api.admin.ParameterMap;
 import org.glassfish.api.deployment.DeploymentContext;
 
-import javax.naming.NamingException;
+import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
 
 /**
- * Implementation of ProviderContainerContractInfo while running inside embedded server
+ * Implementation of ProviderContainerContractInfo while running inside embedded
+ * server
  *
  * @author Mitesh Meswani
  */
@@ -37,7 +34,7 @@
 
     @Override
     public boolean isWeavingEnabled() {
-        return false; //Weaving is not enabled while running in embedded environment
+        return false; // Weaving is not enabled while running in embedded environment
     }
 
 }
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPAContainer.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPAContainer.java
index 2c6d478..23f2629 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPAContainer.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPAContainer.java
@@ -16,16 +16,19 @@
 
 package org.glassfish.persistence.jpa;
 
+import org.glassfish.api.container.Container;
 import org.glassfish.api.deployment.Deployer;
 import org.jvnet.hk2.annotations.Service;
 
 @Service(name = "org.glassfish.persistence.jpa.JPAContainer")
-public class JPAContainer implements org.glassfish.api.container.Container {
+public class JPAContainer implements Container {
 
+    @Override
     public Class<? extends Deployer> getDeployer() {
         return JPADeployer.class;
     }
 
+    @Override
     public String getName() {
         return "JPA";
     }
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPADeployer.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPADeployer.java
index 3c6cf6a..1265783 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPADeployer.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPADeployer.java
@@ -16,46 +16,64 @@
 
 package org.glassfish.persistence.jpa;
 
-import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
-import com.sun.enterprise.deployment.*;
-import com.sun.enterprise.deployment.util.DOLUtils;
-import com.sun.enterprise.module.bootstrap.StartupContext;
-import com.sun.logging.LogDomains;
+import static java.util.logging.Level.FINE;
+import static java.util.logging.Level.FINER;
+import static org.glassfish.internal.deployment.Deployment.APPLICATION_PREPARED;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
 import org.glassfish.api.deployment.DeployCommandParameters;
+import org.glassfish.api.deployment.DeploymentContext;
+import org.glassfish.api.deployment.MetaData;
+import org.glassfish.api.deployment.OpsParams;
 import org.glassfish.api.event.EventListener;
 import org.glassfish.api.event.Events;
+import org.glassfish.deployment.common.DeploymentException;
 import org.glassfish.deployment.common.RootDeploymentDescriptor;
+import org.glassfish.deployment.common.SimpleDeployer;
+import org.glassfish.hk2.api.PostConstruct;
 import org.glassfish.internal.data.ApplicationInfo;
 import org.glassfish.internal.data.ApplicationRegistry;
 import org.glassfish.internal.deployment.Deployment;
 import org.glassfish.internal.deployment.ExtendedDeploymentContext;
-import org.glassfish.server.ServerEnvironmentImpl;
-import org.glassfish.api.deployment.DeploymentContext;
-import org.glassfish.api.deployment.MetaData;
-import org.glassfish.api.deployment.OpsParams;
-import org.glassfish.deployment.common.SimpleDeployer;
-import org.glassfish.deployment.common.DeploymentException;
 import org.glassfish.persistence.common.Java2DBProcessorHelper;
-import jakarta.inject.Inject;
+import org.glassfish.server.ServerEnvironmentImpl;
 import org.jvnet.hk2.annotations.Service;
-import org.glassfish.hk2.api.PostConstruct;
 
+import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
+import com.sun.enterprise.deployment.Application;
+import com.sun.enterprise.deployment.BundleDescriptor;
+import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
+import com.sun.enterprise.deployment.PersistenceUnitsDescriptor;
+import com.sun.enterprise.deployment.util.DOLUtils;
+import com.sun.enterprise.module.bootstrap.StartupContext;
+import com.sun.logging.LogDomains;
+
+import jakarta.inject.Inject;
 import jakarta.persistence.EntityManager;
 import jakarta.persistence.EntityManagerFactory;
 import jakarta.persistence.PersistenceException;
-import java.io.IOException;
-import java.util.*;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 
 /**
  * Deployer for JPA applications
+ *
  * @author Mitesh Meswani
  */
 @Service
 public class JPADeployer extends SimpleDeployer<JPAContainer, JPApplicationContainer> implements PostConstruct, EventListener {
 
+    private static Logger logger = LogDomains.getLogger(PersistenceUnitLoader.class, LogDomains.PERSISTENCE_LOGGER + ".jpadeployer");
+
+    /** Key used to get/put emflists in transientAppMetadata */
+    private static final String EMF_KEY = EntityManagerFactory.class.toString();
+
     @Inject
     private ConnectorRuntime connectorRuntime;
 
@@ -63,7 +81,7 @@
     private ServerEnvironmentImpl serverEnvironment;
 
     @Inject
-    private volatile StartupContext sc = null;
+    private volatile StartupContext startupContext;
 
     @Inject
     private Events events;
@@ -71,25 +89,28 @@
     @Inject
     private ApplicationRegistry applicationRegistry;
 
-    private static Logger logger = LogDomains.getLogger(PersistenceUnitLoader.class, LogDomains.PERSISTENCE_LOGGER + ".jpadeployer");
-
-    /** Key used to get/put emflists in transientAppMetadata */
-    private static final String EMF_KEY = EntityManagerFactory.class.toString();
-
-    @Override public MetaData getMetaData() {
-
-        return new MetaData(true /*invalidateCL */ ,
-                null /* provides */,
-                new Class[] {Application.class} /* requires Application from dol */);
+    @Override
+    public void postConstruct() {
+        events.register(this);
     }
 
+    @Override
+    public MetaData getMetaData() {
+        return new MetaData(
+            true /* invalidateCL */,
+            null /* provides */,
+            new Class[] { Application.class } /* requires Application from dol */);
+    }
+
+    @Override
     protected void generateArtifacts(DeploymentContext dc) throws DeploymentException {
         // Noting to generate yet!!
     }
 
-    protected void cleanArtifacts(DeploymentContext dc) throws DeploymentException {
+    @Override
+    protected void cleanArtifacts(DeploymentContext deploymentContext) throws DeploymentException {
         // Drop tables if needed on undeploy.
-        OpsParams params = dc.getCommandParameters(OpsParams.class);
+        OpsParams params = deploymentContext.getCommandParameters(OpsParams.class);
         if (params.origin.isUndeploy() && isDas()) {
 
             boolean hasScopedResource = false;
@@ -98,108 +119,205 @@
             Application application = appInfo.getMetaData(Application.class);
             Set<BundleDescriptor> bundles = application.getBundleDescriptors();
 
-             // Iterate through all the bundles of the app and collect pu references in referencedPus
-             for (BundleDescriptor bundle : bundles) {
-                 Collection<? extends PersistenceUnitDescriptor> pusReferencedFromBundle = bundle.findReferencedPUs();
-                 for(PersistenceUnitDescriptor pud : pusReferencedFromBundle) {
-                     hasScopedResource = hasScopedResource(pud);
-                     if(hasScopedResource) {
-                         break;
-                     }
-                 }
-             }
+            // Iterate through all the bundles of the app and collect pu references in
+            // referencedPus
+            for (BundleDescriptor bundle : bundles) {
+                Collection<? extends PersistenceUnitDescriptor> pusReferencedFromBundle = bundle.findReferencedPUs();
+                for (PersistenceUnitDescriptor pud : pusReferencedFromBundle) {
+                    hasScopedResource = hasScopedResource(pud);
+                    if (hasScopedResource) {
+                        break;
+                    }
+                }
+            }
 
-            // if there are scoped resources, deploy them so that they are accessible for Java2DB to
-            // delete tables.
-            if(hasScopedResource){
+            // If there are scoped resources, deploy them so that they are accessible for
+            // Java2DB to delete tables.
+            if (hasScopedResource) {
                 connectorRuntime.registerDataSourceDefinitions(application);
             }
 
-             Java2DBProcessorHelper helper = new Java2DBProcessorHelper(dc);
-             helper.init();
-             helper.createOrDropTablesInDB(false, "JPA"); // NOI18N
+            Java2DBProcessorHelper helper = new Java2DBProcessorHelper(deploymentContext);
+            helper.init();
+            helper.createOrDropTablesInDB(false, "JPA");
 
-            //if there are scoped resources, undeploy them.
-            if(hasScopedResource){
+            // If there are scoped resources, undeploy them.
+            if (hasScopedResource) {
                 connectorRuntime.unRegisterDataSourceDefinitions(application);
             }
         }
     }
 
-    /**
-     * @inheritDoc
-     */
+    @Override
     public <V> V loadMetaData(Class<V> type, DeploymentContext context) {
         return null;
     }
 
     /**
-     * EMFs for refered pus are created and stored in JPAApplication instance.
-     * The JPAApplication instance is stored in given DeploymentContext to be retrieved by load
+     * EMFs for refered pus are created and stored in JPAApplication instance. The
+     * JPAApplication instance is stored in given DeploymentContext to be retrieved
+     * by load
      */
-    @Override public boolean prepare(DeploymentContext context) {
+    @Override
+    public boolean prepare(DeploymentContext context) {
         boolean prepared = super.prepare(context);
-        if(prepared) {
-            if(isEMFCreationRequired(context)) {
+        if (prepared) {
+            if (isEMFCreationRequired(context)) {
                 createEMFs(context);
             }
         }
         return prepared;
     }
 
+    @Override
+    public JPApplicationContainer load(JPAContainer container, DeploymentContext context) {
+        return new JPApplicationContainer();
+    }
+
+    @Override
+    public void event(@SuppressWarnings("rawtypes") Event event) {
+        if (logger.isLoggable(Level.FINEST)) {
+            logger.finest("JpaDeployer.event():" + event.name());
+        }
+
+        if (event.is(APPLICATION_PREPARED)) {
+            ExtendedDeploymentContext context = (ExtendedDeploymentContext) event.hook();
+            DeployCommandParameters deployCommandParameters = context.getCommandParameters(DeployCommandParameters.class);
+            if (logger.isLoggable(FINE)) {
+                logger.fine("JpaDeployer.event(): Handling APPLICATION_PREPARED origin is:" + deployCommandParameters.origin);
+            }
+
+            // When create-application-ref is called for an already deployed app,
+            // APPLICATION_PREPARED will be sent on DAS
+            // Obviously there is no new emf created for this event and we need not do
+            // java2db also. Ignore the event
+            // However, if target for create-application-ref is DAS => the app was deployed
+            // on other instance but now
+            // an application-ref is being created on DAS. Process the app
+            if (!deployCommandParameters.origin.isCreateAppRef() || isTargetDas(deployCommandParameters)) {
+                Map<String, ExtendedDeploymentContext> deploymentContexts = context.getModuleDeploymentContexts();
+
+                for (DeploymentContext deploymentContext : deploymentContexts.values()) {
+                    // bundle level pus
+                    iterateInitializedPUsAtApplicationPrepare(deploymentContext);
+                }
+                // app level pus
+                iterateInitializedPUsAtApplicationPrepare(context);
+            }
+        } else if (event.is(Deployment.APPLICATION_DISABLED)) {
+            logger.fine("JpaDeployer.event(): APPLICATION_DISABLED");
+            // APPLICATION_DISABLED will be generated when an app is
+            // disabled/undeployed/appserver goes down.
+            // close all the emfs created for this app
+            closeEntityManagerFactories((ApplicationInfo) event.hook());
+        }
+    }
+
+    /**
+     * Returns unique identifier for this pu within application
+     *
+     * @param pud The given pu
+     * @return Absolute pu root + pu name
+     */
+    private static String getUniquePuIdentifier(PersistenceUnitDescriptor pud) {
+        return pud.getAbsolutePuRoot() + pud.getName();
+    }
+
+    private static boolean isTargetDas(DeployCommandParameters deployCommandParameters) {
+        // TODO discuss with Hong. This comparison should be encapsulated somewhere
+        return "server".equals(deployCommandParameters.target);
+    }
+
+    private boolean isDas() {
+        return serverEnvironment.isDas() || serverEnvironment.isEmbedded();
+    }
+
+    private void closeEntityManagerFactories(ApplicationInfo appInfo) {
+        // Suppress warning required as there is no way to pass equivalent of
+        // List<EMF>.class to the method
+        @SuppressWarnings("unchecked")
+        List<EntityManagerFactory> emfsCreatedForThisApp = appInfo.getTransientAppMetaData(EMF_KEY, List.class);
+        if (emfsCreatedForThisApp != null) {
+
+            // Events are always dispatched to all registered listeners.
+            // emfsCreatedForThisApp will be null for an app that does not have PUs.
+
+            for (EntityManagerFactory entityManagerFactory : emfsCreatedForThisApp) {
+                entityManagerFactory.close();
+            }
+
+            // We no longer have the emfs in open state clear the list.
+            // On app enable(after a disable), for a cluster, the deployment framework calls
+            // prepare() for instances but not for DAS.
+            // So on DAS, at a disable, the emfs will be closed and we will not attempt to
+            // close emfs when appserver goes down even if the app is re-enabled.
+            emfsCreatedForThisApp.clear();
+        }
+    }
+
     /**
      * CreateEMFs and save them in persistence
+     *
      * @param context
      */
     private void createEMFs(DeploymentContext context) {
         Application application = context.getModuleMetaData(Application.class);
         Set<BundleDescriptor> bundles = application.getBundleDescriptors();
 
-        // Iterate through all the bundles for the app and collect pu references in referencedPus
+        // Iterate through all the bundles for the app and collect pu references in
+        // referencedPus
         boolean hasScopedResource = false;
         final List<PersistenceUnitDescriptor> referencedPus = new ArrayList<PersistenceUnitDescriptor>();
         for (BundleDescriptor bundle : bundles) {
             Collection<? extends PersistenceUnitDescriptor> pusReferencedFromBundle = bundle.findReferencedPUs();
-            for(PersistenceUnitDescriptor pud : pusReferencedFromBundle) {
+            for (PersistenceUnitDescriptor pud : pusReferencedFromBundle) {
                 referencedPus.add(pud);
-                if( hasScopedResource(pud) ) {
+                if (hasScopedResource(pud)) {
                     hasScopedResource = true;
                 }
             }
         }
         if (hasScopedResource) {
-            // Scoped resources are registered by connector runtime after prepare(). That is too late for JPA
+            // Scoped resources are registered by connector runtime after prepare(). That is
+            // too late for JPA
             // This is a hack to initialize connectorRuntime for scoped resources
             connectorRuntime.registerDataSourceDefinitions(application);
         }
 
-        //Iterate through all the PUDs for this bundle and if it is referenced, load the corresponding pu
+        // Iterate through all the PUDs for this bundle and if it is referenced, load
+        // the corresponding pu
         PersistenceUnitDescriptorIterator pudIterator = new PersistenceUnitDescriptorIterator() {
-            @Override void visitPUD(PersistenceUnitDescriptor pud, DeploymentContext context) {
-                if(referencedPus.contains(pud)) {
+            @Override
+            void visitPUD(PersistenceUnitDescriptor pud, DeploymentContext context) {
+                if (referencedPus.contains(pud)) {
                     boolean isDas = isDas();
 
-                    // While running in embedded mode, it is not possible to guarantee that entity classes are not loaded by the app classloader before transformers are installed
-                    // If that happens, weaving will not take place and EclipseLink will throw up. Provide users an option to disable weaving by passing the flag.
+                    // While running in embedded mode, it is not possible to guarantee that entity
+                    // classes are not loaded by the app classloader before transformers are
+                    // installed.
+                    //
+                    // If that happens, weaving will not take place and EclipseLink will throw up.
+                    // Provide users an option to disable weaving by passing the flag.
                     // Note that we enable weaving if not explicitly disabled by user
-                    boolean weavingEnabled = Boolean.valueOf(sc.getArguments().getProperty("org.glassfish.persistence.embedded.weaving.enabled", "true"));
+                    boolean weavingEnabled = Boolean
+                            .valueOf(startupContext.getArguments().getProperty("org.glassfish.persistence.embedded.weaving.enabled", "true"));
 
-                    ProviderContainerContractInfo providerContainerContractInfo = weavingEnabled ?
-                            new ServerProviderContainerContractInfo(context, connectorRuntime, isDas) :
-                            new EmbeddedProviderContainerContractInfo(context, connectorRuntime, isDas);
+                    ProviderContainerContractInfo providerContainerContractInfo = weavingEnabled
+                            ? new ServerProviderContainerContractInfo(context, connectorRuntime, isDas)
+                            : new EmbeddedProviderContainerContractInfo(context, connectorRuntime, isDas);
 
                     try {
                         ((ExtendedDeploymentContext) context).prepareScratchDirs();
                     } catch (IOException e) {
-                        // There is no way to recover if we are not able to create the scratch dirs. Just rethrow the exception.
+                        // There is no way to recover if we are not able to create the scratch dirs.
+                        // Just rethrow the exception.
                         throw new RuntimeException(e);
                     }
 
-
-                    PersistenceUnitLoader puLoader = new PersistenceUnitLoader(pud, providerContainerContractInfo);
+                    PersistenceUnitLoader persistenceUnitLoader = new PersistenceUnitLoader(pud, providerContainerContractInfo);
                     // Store the puLoader in context. It is retrieved to execute java2db and to
                     // store the loaded emfs in a JPAApplicationContainer object for cleanup
-                    context.addTransientAppMetaData(getUniquePuIdentifier(pud), puLoader );
+                    context.addTransientAppMetaData(getUniquePuIdentifier(pud), persistenceUnitLoader);
                 }
             }
         };
@@ -209,12 +327,13 @@
     /**
      * @return true if given <code>pud</code> is using scoped resource
      */
-    private boolean hasScopedResource(PersistenceUnitDescriptor pud) {
+    private boolean hasScopedResource(PersistenceUnitDescriptor persistenceUnitDescriptor) {
         boolean hasScopedResource = false;
-        String jtaDataSource = pud.getJtaDataSource();
-        if(jtaDataSource != null && jtaDataSource.startsWith("java:")){
+        String jtaDataSource = persistenceUnitDescriptor.getJtaDataSource();
+        if (jtaDataSource != null && jtaDataSource.startsWith("java:")) {
             hasScopedResource = true;
         }
+
         return hasScopedResource;
     }
 
@@ -223,182 +342,98 @@
      * @return true if emf creation is required false otherwise
      */
     private boolean isEMFCreationRequired(DeploymentContext context) {
-/*
-  Here are various use cases that needs to be handled.
-  This method handles EMF creation part, APPLICATION_PREPARED event handle handles java2db and closing of emf
+        /*
+         * Here are various use cases that needs to be handled. This method handles EMF
+         * creation part, APPLICATION_PREPARED event handle handles java2db and closing
+         * of emf
+         *
+         * To summarize, -Unconditionally create EMFs on DAS for java2db if it is
+         * deploy. We will close this EMF in APPLICATION_PREPARED after java2db if
+         * (target!= DAS || enable=false) -We will not create EMFs on instance if
+         * application is not enabled
+         *
+         * -----------------------------------------------------------------------------
+         * ------- Scenario Expected Behavior
+         * -----------------------------------------------------------------------------
+         * ------- deploy --target=server --enabled=true. DAS(EMF created, java2db, EMF
+         * remains open) -restart DAS(EMF created, EMF remains open) -undeploy DAS(EMF
+         * closed. Drop tables) -create-application-ref instance1 DAS(No action)
+         * INSTANCE1(EMF created)
+         *
+         * deploy --target=server --enabled=false. DAS(EMF created,java2db, EMF closed
+         * in APPLICATION_PREPARED) -restart DAS(No EMF created) -undeploy DAS(No EMF to
+         * close, Drop tables)
+         *
+         * -enable DAS(EMF created) -undelpoy DAS(EMF closed, Drop tables)
+         *
+         * -create-application-ref instance1 DAS(No action) INSTANCE1(EMF created)
+         *
+         * deploy --target=instance1 --enabled=true DAS(EMF created, java2db, EMF closed
+         * in APPLICATION_PREPARED) INSTANCE1(EMF created) -create-application-ref
+         * instance2 INSTANCE2(EMF created) -restart DAS(No EMF created) INSTANCE1(EMF
+         * created) INSTANCE2(EMF created) -undeploy DAS(No EMF to close, Drop tables)
+         * INSTANCE1(EMF closed)
+         *
+         * -create-application-ref server DAS(EMF created) -delete-application-ref
+         * server DAS(EMF closed) undeploy INSTANCE1(EMF closed)
+         *
+         *
+         * deploy --target=instance --enabled=false. DAS(EMF created, java2db, EMF
+         * closed in APPLICATION_PREPARED) INSTANCE1(No EMF created)
+         * -create-application-ref instance2 DAS(No action) INSTANCE2(No Action)
+         * -restart DAS(No EMF created) INSTANCE1(No EMF created) INSTANCE2(No EMF
+         * created) -undeploy DAS(No EMF to close, Drop tables) INSTANCE1(No EMF to
+         * close) INSTANCE2(No EMF to close)
+         *
+         * -enable --target=instance1 DAS(No EMF created) INSTANCE1(EMF created)
+         *
+         */
 
-  To summarize,
-  -Unconditionally create EMFs on DAS for java2db if it is deploy. We will close this EMF in APPLICATION_PREPARED after java2db if (target!= DAS || enable=false)
-  -We will not create EMFs on instance if application is not enabled
-
-        ------------------------------------------------------------------------------------
-            Scenario                                       Expected Behavior
-        ------------------------------------------------------------------------------------
-        deploy --target=server   --enabled=true.   DAS(EMF created, java2db, EMF remains open)
-           -restart                                DAS(EMF created, EMF remains open)
-           -undeploy                               DAS(EMF closed. Drop tables)
-           -create-application-ref instance1       DAS(No action)
-                                                   INSTANCE1(EMF created)
-
-        deploy --target=server   --enabled=false.  DAS(EMF created,java2db, EMF closed in APPLICATION_PREPARED)
-           -restart                                DAS(No EMF created)
-           -undeploy                               DAS(No EMF to close, Drop tables)
-
-           -enable                                 DAS(EMF created)
-           -undelpoy                               DAS(EMF closed, Drop tables)
-
-           -create-application-ref instance1       DAS(No action)
-                                                   INSTANCE1(EMF created)
-
-        deploy --target=instance1 --enabled=true   DAS(EMF created, java2db, EMF closed in APPLICATION_PREPARED)
-                                                   INSTANCE1(EMF created)
-            -create-application-ref instance2      INSTANCE2(EMF created)
-            -restart                               DAS(No EMF created)
-                                                   INSTANCE1(EMF created)
-                                                   INSTANCE2(EMF created)
-            -undeploy                              DAS(No EMF to close, Drop tables)
-                                                   INSTANCE1(EMF closed)
-
-            -create-application-ref server         DAS(EMF created)
-            -delete-application-ref server         DAS(EMF closed)
-            undeploy                               INSTANCE1(EMF closed)
-
-
-        deploy --target=instance --enabled=false.  DAS(EMF created, java2db, EMF closed in APPLICATION_PREPARED)
-                                                   INSTANCE1(No EMF created)
-            -create-application-ref instance2      DAS(No action)
-                                                   INSTANCE2(No Action)
-            -restart                               DAS(No EMF created)
-                                                   INSTANCE1(No EMF created)
-                                                   INSTANCE2(No EMF created)
-            -undeploy                              DAS(No EMF to close, Drop tables)
-                                                   INSTANCE1(No EMF to close)
-                                                   INSTANCE2(No EMF to close)
-
-            -enable --target=instance1             DAS(No EMF created)
-                                                   INSTANCE1(EMF created)
-
-*/
-
-        boolean createEMFs = false;
+        boolean createEntityManagerFactories = false;
         DeployCommandParameters deployCommandParameters = context.getCommandParameters(DeployCommandParameters.class);
-        boolean deploy  = deployCommandParameters.origin.isDeploy();
+        boolean deploy = deployCommandParameters.origin.isDeploy();
         boolean enabled = deployCommandParameters.enabled;
         boolean isDas = isDas();
 
-        if(logger.isLoggable(Level.FINER)) {
+        if (logger.isLoggable(FINER)) {
             logger.finer("isEMFCreationRequired(): deploy: " + deploy + " enabled: " + enabled + " isDas: " + isDas);
         }
 
-        if(isDas) {
-            if(deploy) {
-                createEMFs = true; // Always create emfs on DAS while deploying to take care of java2db and PU validation on deploy
+        if (isDas) {
+            if (deploy) {
+                // Always create emfs on DAS while deploying to take care of java2db and PU
+                // validation on deploy
+                createEntityManagerFactories = true;
             } else {
-                //We reach here for (!deploy && das) => server restart or enabling a disabled app on DAS
+                // We reach here for (!deploy && das) => server restart or enabling a disabled
+                // app on DAS
                 boolean isTargetDas = isTargetDas(deployCommandParameters);
-                if(logger.isLoggable(Level.FINER)) {
+                if (logger.isLoggable(FINER)) {
                     logger.finer("isEMFCreationRequired(): isTargetDas: " + isTargetDas);
                 }
 
-                if(enabled && isTargetDas) {
-                    createEMFs = true;
+                if (enabled && isTargetDas) {
+                    createEntityManagerFactories = true;
                 }
             }
-        } else { //!das => on an instance
-            if(enabled) {
-                createEMFs = true;
+        } else { // !das => on an instance
+            if (enabled) {
+                createEntityManagerFactories = true;
             }
         }
 
-        if(logger.isLoggable(Level.FINER)) {
-            logger.finer("isEMFCreationRequired(): returning createEMFs:" + createEMFs);
+        if (logger.isLoggable(FINER)) {
+            logger.finer("isEMFCreationRequired(): returning createEMFs:" + createEntityManagerFactories);
         }
 
-        return createEMFs;
-    }
-
-    private static boolean isTargetDas(DeployCommandParameters deployCommandParameters) {
-        return "server".equals(deployCommandParameters.target); // TODO discuss with Hong. This comparison should be encapsulated somewhere
+        return createEntityManagerFactories;
     }
 
     /**
-     * @inheritDoc
-     */
-    //@Override
-    public JPApplicationContainer load(JPAContainer container, DeploymentContext context) {
-        return new JPApplicationContainer();
-    }
-
-    /**
-     * Returns unique identifier for this pu within application
-     * @param pud The given pu
-     * @return Absolute pu root + pu name
-     */
-    private static String getUniquePuIdentifier(PersistenceUnitDescriptor pud) {
-        return pud.getAbsolutePuRoot() + pud.getName();
-     }
-
-    private boolean isDas() {
-        return serverEnvironment.isDas() || serverEnvironment.isEmbedded();
-    }
-
-    @Override
-    public void postConstruct() {
-        events.register(this);
-    }
-
-    @Override
-    public void event(Event event) {
-        if(logger.isLoggable(Level.FINEST)) {
-            logger.finest("JpaDeployer.event():" + event.name());
-        }
-        if (event.is(Deployment.APPLICATION_PREPARED) ) {
-            ExtendedDeploymentContext context = (ExtendedDeploymentContext)event.hook();
-            DeployCommandParameters deployCommandParameters = context.getCommandParameters(DeployCommandParameters.class);
-            if(logger.isLoggable(Level.FINE)) {
-                logger.fine("JpaDeployer.event(): Handling APPLICATION_PREPARED origin is:" + deployCommandParameters.origin);
-            }
-
-            // When create-application-ref is called for an already deployed app, APPLICATION_PREPARED will be sent on DAS
-            // Obviously there is no new emf created for this event and we need not do java2db also. Ignore the event
-            // However, if target for create-application-ref is DAS => the app was deployed on other instance but now
-            // an application-ref is being created on DAS. Process the app
-            if(!deployCommandParameters.origin.isCreateAppRef() || isTargetDas(deployCommandParameters)) {
-                Map<String, ExtendedDeploymentContext> deploymentContexts = context.getModuleDeploymentContexts();
-
-                for (DeploymentContext deploymentContext : deploymentContexts.values()) {
-                    //bundle level pus
-                    iterateInitializedPUsAtApplicationPrepare(deploymentContext);
-                }
-                //app level pus
-                iterateInitializedPUsAtApplicationPrepare(context);
-            }
-        } else if(event.is(Deployment.APPLICATION_DISABLED)) {
-            logger.fine("JpaDeployer.event(): APPLICATION_DISABLED");
-            // APPLICATION_DISABLED will be generated when an app is disabled/undeployed/appserver goes down.
-            //close all the emfs created for this app
-            ApplicationInfo appInfo = (ApplicationInfo) event.hook();
-            closeEMFs(appInfo);
-        }
-    }
-
-    private void closeEMFs(ApplicationInfo appInfo) {
-        //Suppress warning required as there is no way to pass equivalent of List<EMF>.class to the method
-        @SuppressWarnings("unchecked") List<EntityManagerFactory> emfsCreatedForThisApp = appInfo.getTransientAppMetaData(EMF_KEY, List.class);
-        if(emfsCreatedForThisApp != null) { // Events are always dispatched to all registered listeners. emfsCreatedForThisApp will be null for an app that does not have PUs.
-            for (EntityManagerFactory entityManagerFactory : emfsCreatedForThisApp) {
-                entityManagerFactory.close();
-            }
-            // We no longer have the emfs in open state clear the list.
-            // On app enable(after a disable), for a cluster, the deployment framework calls prepare() for instances but not for DAS.
-            // So on DAS, at a disable, the emfs will be closed and we will not attempt to close emfs when appserver goes down even if the app is re-enabled.
-            emfsCreatedForThisApp.clear();
-        }
-    }
-
-    /**
-     * Does java2db on DAS and saves emfs created during prepare to ApplicationInfo maintained by DOL.
-     * ApplicationInfo is not available during prepare() so we can not directly use it there.
+     * Does java2db on DAS and saves emfs created during prepare to ApplicationInfo
+     * maintained by DOL. ApplicationInfo is not available during prepare() so we
+     * can not directly use it there.
+     *
      * @param context
      */
     private void iterateInitializedPUsAtApplicationPrepare(final DeploymentContext context) {
@@ -407,84 +442,107 @@
         String appName = deployCommandParameters.name;
         final ApplicationInfo appInfo = applicationRegistry.get(appName);
 
-        //iterate through all the PersistenceUnitDescriptor for this bundle.
+        // iterate through all the PersistenceUnitDescriptor for this bundle.
         PersistenceUnitDescriptorIterator pudIterator = new PersistenceUnitDescriptorIterator() {
-            @Override void visitPUD(PersistenceUnitDescriptor pud, DeploymentContext context) {
-                //PersistenceUnitsDescriptor corresponds to  persistence.xml. A bundle can only have one persitence.xml except
-                // when the bundle is an application which can have multiple persitence.xml under jars in root of ear and lib.
-                PersistenceUnitLoader puLoader = context.getTransientAppMetaData(getUniquePuIdentifier(pud), PersistenceUnitLoader.class);
-                if (puLoader != null) { // We have initialized PU
-                    boolean saveEMF = true;
-                    if(isDas()) { //We do validation and execute Java2DB only on DAS
-                        if(deployCommandParameters.origin.isDeploy()) { //APPLICATION_PREPARED will be called for create-application-ref also. We should perform java2db only on first deploy
+            @Override
+            void visitPUD(PersistenceUnitDescriptor pud, DeploymentContext context) {
+                // PersistenceUnitsDescriptor corresponds to persistence.xml. A bundle can only
+                // have one persitence.xml except
+                // when the bundle is an application which can have multiple persitence.xml
+                // under jars in root of ear and lib.
+                PersistenceUnitLoader persistenceUnitLoader = context.getTransientAppMetaData(getUniquePuIdentifier(pud), PersistenceUnitLoader.class);
+                if (persistenceUnitLoader != null) { // We have initialized PU
+                    boolean saveEntityManagerFactory = true;
+                    if (isDas()) { // We do validation and execute Java2DB only on DAS
 
-                            //Create EM to trigger validation on PU
-                            EntityManagerFactory emf = puLoader.getEMF();
-                            EntityManager em = null;
+                        // APPLICATION_PREPARED will be called for create-application-ref
+                        // also. We should perform java2db only on first deploy
+                        if (deployCommandParameters.origin.isDeploy()) {
+
+                            // Create EntityManager to trigger validation on PU
+                            EntityManagerFactory entityManagerFactory = persistenceUnitLoader.getEMF();
+                            EntityManager entityManager = null;
                             try {
-                                // Create EM to trigger any validations that are lazily performed by the provider
+                                // Create EM to trigger any validations that are lazily performed by the
+                                // provider
                                 // EM creation also triggers DDL generation by provider.
-                                em = emf.createEntityManager();
+                                entityManager = entityManagerFactory.createEntityManager();
                             } catch (PersistenceException e) {
-                                // Exception indicates something went wrong while performing validation. Clean up and rethrow to fail deployment
-                                emf.close();
-                                throw new DeploymentException(e);  // Need to wrap exception in DeploymentException else deployment will not fail !!
+                                // Exception indicates something went wrong while performing validation. Clean
+                                // up and rethrow to fail deployment
+                                entityManagerFactory.close();
+
+                                // Need to wrap exception in DeploymentException else deployment will not
+                                // fail.
+                                throw new DeploymentException(e);
                             } finally {
-                                if (em != null) {
-                                    em.close();
+                                if (entityManager != null) {
+                                    entityManager.close();
                                 }
                             }
 
-                            puLoader.doJava2DB();
+                            persistenceUnitLoader.doJava2DB();
 
                             boolean enabled = deployCommandParameters.enabled;
                             boolean isTargetDas = isTargetDas(deployCommandParameters);
-                            if(logger.isLoggable(Level.FINER)) {
-                                logger.finer("iterateInitializedPUsAtApplicationPrepare(): enabled: " + enabled + " isTargetDas: " + isTargetDas);
+                            if (logger.isLoggable(FINER)) {
+                                logger.finer("iterateInitializedPUsAtApplicationPrepare(): enabled: " + enabled + " isTargetDas: "
+                                        + isTargetDas);
                             }
-                            if(!isTargetDas || !enabled) {
-                                // we are on DAS but target != das or app is not enabled on das => The EMF was just created for Java2Db. Close it.
-                                puLoader.getEMF().close();
-                                saveEMF = false; // Do not save EMF. We have already closed it
+
+                            if (!isTargetDas || !enabled) {
+                                // We are on DAS but target != das or app is not enabled on das => The EMF was
+                                // just created for Java2Db. Close it.
+                                persistenceUnitLoader.getEMF().close();
+                                saveEntityManagerFactory = false; // Do not save EMF. We have already closed it
                             }
                         }
                     }
 
-                    if(saveEMF) {
-                        // Save emf in ApplicationInfo so that it can be retrieved and closed for cleanup
-                        @SuppressWarnings("unchecked") //Suppress warning required as there is no way to pass equivalent of List<EMF>.class to the method
-                        List<EntityManagerFactory> emfsCreatedForThisApp = appInfo.getTransientAppMetaData(EMF_KEY, List.class );
-                        if(emfsCreatedForThisApp == null) {
-                            //First EMF for this app, initialize
+                    if (saveEntityManagerFactory) {
+                        // Save EntityManagerFactory in ApplicationInfo so that it can be retrieved and closed for cleanup
+
+                        // Suppress warning required as there is no way to pass equivalent of
+                        // List<EMF>.class to the method
+                        @SuppressWarnings("unchecked")
+                        List<EntityManagerFactory> emfsCreatedForThisApp = appInfo.getTransientAppMetaData(EMF_KEY, List.class);
+                        if (emfsCreatedForThisApp == null) {
+                            // First EMF for this app, initialize
                             emfsCreatedForThisApp = new ArrayList<EntityManagerFactory>();
                             appInfo.addTransientAppMetaData(EMF_KEY, emfsCreatedForThisApp);
                         }
-                        emfsCreatedForThisApp.add(puLoader.getEMF());
+                        emfsCreatedForThisApp.add(persistenceUnitLoader.getEMF());
                     } // if (saveEMF)
                 } // if(puLoader != null)
             }
         };
 
         pudIterator.iteratePUDs(context);
-
     }
 
     /**
-     * Helper class to centralize the code for loop that iterates through all the PersistenceUnitDescriptor for a given DeploymentContext (and hence the corresponding bundle)
+     * Helper class to centralize the code for loop that iterates through all the
+     * PersistenceUnitDescriptor for a given DeploymentContext (and hence the
+     * corresponding bundle)
      */
     private static abstract class PersistenceUnitDescriptorIterator {
         /**
-         * Iterate through all the PersistenceUnitDescriptors for the given context (and hence corresponding bundle) and call visitPUD for each of them
+         * Iterate through all the PersistenceUnitDescriptors for the given context (and
+         * hence corresponding bundle) and call visitPUD for each of them
+         *
          * @param context
          */
         void iteratePUDs(DeploymentContext context) {
             RootDeploymentDescriptor currentBundle = DOLUtils.getCurrentBundleForContext(context);
-            if (currentBundle != null) { // it can be null for non-JavaEE type of application deployment. e.g., issue 15869
-                Collection<PersistenceUnitsDescriptor> pusDescriptorForThisBundle = currentBundle.getExtensionsDescriptors(PersistenceUnitsDescriptor.class);
+            if (currentBundle != null) { // it can be null for non-JavaEE type of application deployment. e.g., issue
+                                         // 15869
+                Collection<PersistenceUnitsDescriptor> pusDescriptorForThisBundle =
+                    currentBundle.getExtensionsDescriptors(PersistenceUnitsDescriptor.class);
+
                 for (PersistenceUnitsDescriptor persistenceUnitsDescriptor : pusDescriptorForThisBundle) {
-                        for (PersistenceUnitDescriptor pud : persistenceUnitsDescriptor.getPersistenceUnitDescriptors()) {
-                            visitPUD(pud, context);
-                        }
+                    for (PersistenceUnitDescriptor pud : persistenceUnitsDescriptor.getPersistenceUnitDescriptors()) {
+                        visitPUD(pud, context);
+                    }
                 }
             }
 
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPApplicationContainer.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPApplicationContainer.java
index 328024d..2895e6f 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPApplicationContainer.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/JPApplicationContainer.java
@@ -19,26 +19,28 @@
 import org.glassfish.api.deployment.ApplicationContainer;
 import org.glassfish.api.deployment.ApplicationContext;
 
-
 /**
- * Represents Application Container for JPA
- * One instance of this object is created per deployed bundle.
+ * Represents Application Container for JPA One instance of this object is
+ * created per deployed bundle.
+ *
  * @author Mitesh Meswani
  */
-public class JPApplicationContainer implements ApplicationContainer {
+public class JPApplicationContainer implements ApplicationContainer<Object> {
 
     public JPApplicationContainer() {
     }
 
-    //-------------- Begin Methods implementing ApplicationContainer interface -------------- //
+    @Override
     public Object getDescriptor() {
         return null;
     }
 
+    @Override
     public boolean start(ApplicationContext startupContxt) {
         return true;
     }
 
+    @Override
     public boolean stop(ApplicationContext stopContext) {
         return true;
     }
@@ -48,6 +50,7 @@
      *
      * @return true if suspending was successful, false otherwise.
      */
+    @Override
     public boolean suspend() {
         // Not (yet) supported
         return false;
@@ -58,16 +61,16 @@
      *
      * @return true if resumption was successful, false otherwise.
      */
+    @Override
     public boolean resume() {
         // Not (yet) supported
         return false;
     }
 
+    @Override
     public ClassLoader getClassLoader() {
-        //TODO: Check with Jerome. Should this return anything but null? currently it does not seem so.
+        // TODO: Check with Jerome. Should this return anything but null? currently it
+        // does not seem so.
         return null;
     }
-
-    //-------------- End Methods implementing ApplicationContainer interface -------------- //
-
 }
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitInfoImpl.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitInfoImpl.java
index b14df70..6519923 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitInfoImpl.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitInfoImpl.java
@@ -16,73 +16,72 @@
 
 package org.glassfish.persistence.jpa;
 
+import static java.util.logging.Level.WARNING;
+import static org.glassfish.deployment.common.DeploymentUtils.getRelativeEmbeddedModulePath;
+import static org.glassfish.persistence.jpa.PersistenceUnitLoader.isNullOrEmpty;
 
-import com.sun.enterprise.deployment.BundleDescriptor;
-import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
-import org.glassfish.deployment.common.RootDeploymentDescriptor;
-import org.glassfish.deployment.common.ModuleDescriptor;
-import com.sun.enterprise.util.i18n.StringManager;
-import com.sun.logging.LogDomains;
-
-import javax.naming.NamingException;
-import jakarta.persistence.spi.ClassTransformer;
-import jakarta.persistence.spi.PersistenceUnitInfo;
-import jakarta.persistence.spi.PersistenceUnitTransactionType;
-import jakarta.persistence.SharedCacheMode;
-import jakarta.persistence.ValidationMode;
-import javax.sql.DataSource;
 import java.io.File;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
-import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import javax.naming.NamingException;
+import javax.sql.DataSource;
+
 import org.glassfish.deployment.common.DeploymentUtils;
+import org.glassfish.deployment.common.ModuleDescriptor;
+import org.glassfish.deployment.common.RootDeploymentDescriptor;
+
+import com.sun.enterprise.deployment.BundleDescriptor;
+import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
+import com.sun.logging.LogDomains;
+
+import jakarta.persistence.SharedCacheMode;
+import jakarta.persistence.ValidationMode;
+import jakarta.persistence.spi.ClassTransformer;
+import jakarta.persistence.spi.PersistenceUnitInfo;
+import jakarta.persistence.spi.PersistenceUnitTransactionType;
 
 /**
  * This class implements {@link PersistenceUnitInfo} interface.
  *
+ * This class is public because it is used in verifier
+ *
  * @author Sanjeeb.Sahoo@Sun.COM
  */
 public class PersistenceUnitInfoImpl implements PersistenceUnitInfo {
-    /* This class is public because it is used in verifier */
 
-    private static final String DEFAULT_PROVIDER_NAME = "org.eclipse.persistence.jpa.PersistenceProvider"; // NOI18N
+    private static Logger logger = LogDomains.getLogger(PersistenceUnitInfoImpl.class, LogDomains.PERSISTENCE_LOGGER);
+
+    private static final String DEFAULT_PROVIDER_NAME = "org.eclipse.persistence.jpa.PersistenceProvider";
 
     // We allow the default provider to be specified using -D option.
     private static String defaultProvider;
 
-    private static Logger logger = LogDomains.getLogger(PersistenceUnitInfoImpl.class, LogDomains.PERSISTENCE_LOGGER);
-
-    private static final StringManager localStrings = StringManager.getManager(PersistenceUnitInfoImpl.class);
-
     private PersistenceUnitDescriptor persistenceUnitDescriptor;
-
     private ProviderContainerContractInfo providerContainerContractInfo;
-
     private File absolutePuRootFile;
-
     private DataSource jtaDataSource;
-
     private DataSource nonJtaDataSource;
-
     private List<URL> jarFiles;
 
-
-    public PersistenceUnitInfoImpl(
-            PersistenceUnitDescriptor persistenceUnitDescriptor,
-            ProviderContainerContractInfo providerContainerContractInfo) {
+    public PersistenceUnitInfoImpl(PersistenceUnitDescriptor persistenceUnitDescriptor, ProviderContainerContractInfo providerContainerContractInfo) {
         this.persistenceUnitDescriptor = persistenceUnitDescriptor;
         this.providerContainerContractInfo = providerContainerContractInfo;
         jarFiles = _getJarFiles();
         String jtaDataSourceName = persistenceUnitDescriptor.getJtaDataSource();
         String nonJtaDataSourceName = persistenceUnitDescriptor.getNonJtaDataSource();
+
         try {
-            jtaDataSource = jtaDataSourceName == null ? null : providerContainerContractInfo.lookupDataSource(jtaDataSourceName);
-            nonJtaDataSource = nonJtaDataSourceName == null ? null : providerContainerContractInfo.lookupNonTxDataSource(nonJtaDataSourceName);
+            jtaDataSource = jtaDataSourceName == null ?
+                null :
+                providerContainerContractInfo.lookupDataSource(jtaDataSourceName);
+            nonJtaDataSource = nonJtaDataSourceName == null ?
+                null :
+                providerContainerContractInfo.lookupNonTxDataSource(nonJtaDataSourceName);
         } catch (NamingException e) {
             throw new RuntimeException(e);
         }
@@ -90,42 +89,32 @@
 
     // Implementation of PersistenceUnitInfo interface
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public String getPersistenceUnitName() {
         return persistenceUnitDescriptor.getName();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public String getPersistenceProviderClassName() {
         return getPersistenceProviderClassNameForPuDesc(persistenceUnitDescriptor);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public PersistenceUnitTransactionType getTransactionType() {
-        return PersistenceUnitTransactionType.valueOf(
-                persistenceUnitDescriptor.getTransactionType());
+        return PersistenceUnitTransactionType.valueOf(persistenceUnitDescriptor.getTransactionType());
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public DataSource getJtaDataSource() {
         return jtaDataSource;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public DataSource getNonJtaDataSource() {
         return nonJtaDataSource;
     }
 
+    @Override
     public URL getPersistenceUnitRootUrl() {
         try {
             return getAbsolutePuRootFile().toURI().toURL();
@@ -134,108 +123,98 @@
         }
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public List<String> getMappingFileNames() {
         return persistenceUnitDescriptor.getMappingFiles(); // its already unmodifiable
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public List<URL> getJarFileUrls() {
         return jarFiles;
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public List<String> getManagedClassNames() {
         return persistenceUnitDescriptor.getClasses(); // its already unmodifiable
     }
 
+    @Override
     public boolean excludeUnlistedClasses() {
         return persistenceUnitDescriptor.isExcludeUnlistedClasses();
     }
 
+    @Override
     public SharedCacheMode getSharedCacheMode() {
         return persistenceUnitDescriptor.getSharedCacheMode();
     }
 
+    @Override
     public ValidationMode getValidationMode() {
         return persistenceUnitDescriptor.getValidationMode();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public Properties getProperties() {
         return persistenceUnitDescriptor.getProperties(); // its already a clone
     }
 
+    @Override
     public String getPersistenceXMLSchemaVersion() {
         return persistenceUnitDescriptor.getParent().getSpecVersion();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public ClassLoader getClassLoader() {
         return providerContainerContractInfo.getClassLoader();
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public void addTransformer(ClassTransformer transformer) {
         providerContainerContractInfo.addTransformer(transformer);
     }
 
-    /**
-     * {@inheritDoc}
-     */
+    @Override
     public ClassLoader getNewTempClassLoader() {
         return providerContainerContractInfo.getTempClassloader();
     }
 
-    @Override public String toString() {
+    @Override
+    public String toString() {
         /*
          * This method is used for debugging only.
          */
-        StringBuilder result = new StringBuilder("<persistence-unit>"); // NOI18N
-        result.append("\n\t<PURoot>").append(getPersistenceUnitRootUrl()).append("</PURoot>"); // NOI18N
-        result.append("\n\t<name>").append(getPersistenceUnitName()).append("</name>"); // NOI18N
-        result.append("\n\t<provider>").append(getPersistenceProviderClassName()).append("</provider>"); // NOI18N
-        result.append("\n\t<transaction-type>").append(getTransactionType()).append("</transaction-type>"); // NOI18N
-        result.append("\n\t<jta-data-source>").append(getJtaDataSource()).append("</jta-data-source>"); // NOI18N
-        result.append("\n\t<non-jta-data-source>").append(getNonJtaDataSource()).append("</non-jta-data-source>"); // NOI18N
+        StringBuilder result = new StringBuilder("<persistence-unit>");
+        result.append("\n\t<PURoot>").append(getPersistenceUnitRootUrl()).append("</PURoot>");
+        result.append("\n\t<name>").append(getPersistenceUnitName()).append("</name>");
+        result.append("\n\t<provider>").append(getPersistenceProviderClassName()).append("</provider>");
+        result.append("\n\t<transaction-type>").append(getTransactionType()).append("</transaction-type>");
+        result.append("\n\t<jta-data-source>").append(getJtaDataSource()).append("</jta-data-source>");
+        result.append("\n\t<non-jta-data-source>").append(getNonJtaDataSource()).append("</non-jta-data-source>");
         for (URL jar : getJarFileUrls()) {
-            result.append("\n\t<jar-file>").append(jar).append("</jar-file>"); // NOI18N
+            result.append("\n\t<jar-file>").append(jar).append("</jar-file>");
         }
         for (String mappingFile : getMappingFileNames()) {
-            result.append("\n\t<mapping-file>").append(mappingFile).append("</mapping-file>"); // NOI18N
+            result.append("\n\t<mapping-file>").append(mappingFile).append("</mapping-file>");
         }
         for (String clsName : getManagedClassNames()) {
-            result.append("\n\t<class-name>").append(clsName).append("</class-name>"); // NOI18N
+            result.append("\n\t<class-name>").append(clsName).append("</class-name>");
         }
-        result.append("\n\t<exclude-unlisted-classes>").append(excludeUnlistedClasses()).append("</exclude-unlisted-classes>"); // NOI18N
-        result.append("\n\t<properties>").append(getProperties()).append("</properties>"); // NOI18N
-        result.append("\n\t<class-loader>").append(getClassLoader()).append("</class-loader>"); // NOI18N
-        result.append("\n</persistence-unit>\n"); // NOI18N
+        result.append("\n\t<exclude-unlisted-classes>").append(excludeUnlistedClasses()).append("</exclude-unlisted-classes>");
+        result.append("\n\t<properties>").append(getProperties()).append("</properties>");
+        result.append("\n\t<class-loader>").append(getClassLoader()).append("</class-loader>");
+        result.append("\n</persistence-unit>\n");
         return result.toString();
     }
 
     private List<URL> _getJarFiles() {
-        List<String> jarFileNames = new ArrayList<String>(
-                persistenceUnitDescriptor.getJarFiles());
+        List<String> jarFileNames = new ArrayList<String>(persistenceUnitDescriptor.getJarFiles());
         List<URL> jarFiles = new ArrayList<URL>(jarFileNames.size() + 1);
         String absolutePuRoot = getAbsolutePuRootFile().getAbsolutePath();
         for (String jarFileName : jarFileNames) {
-            String nativeJarFileName = jarFileName.replace('/',
-                    File.separatorChar);
+            String nativeJarFileName = jarFileName.replace('/', File.separatorChar);
             final File parentFile = new File(absolutePuRoot).getParentFile();
-            // only components are exploded, hence first look for original archives.
+
+            // Only components are exploded, hence first look for original archives.
             File jarFile = new File(parentFile, nativeJarFileName);
             if (!jarFile.exists()) {
                 // if the referenced jar is itself a component, then
@@ -249,20 +228,17 @@
                 // These are my own notions used here.
                 String pathComponent = "";
                 String nameComponent = jarFileName;
-                if(jarFileName.lastIndexOf("../") != -1) {
-                    final int separatorIndex = jarFileName.lastIndexOf("../")+3;
-                    pathComponent = jarFileName.substring(0,separatorIndex);
+                if (jarFileName.lastIndexOf("../") != -1) {
+                    final int separatorIndex = jarFileName.lastIndexOf("../") + 3;
+                    pathComponent = jarFileName.substring(0, separatorIndex);
                     nameComponent = jarFileName.substring(separatorIndex);
                 }
-                logger.fine("For jar-file="+ jarFileName+ ", " + // NOI18N
-                        "pathComponent=" +pathComponent + // NOI18N
-                        ", nameComponent=" + nameComponent); // NOI18N
+                logger.fine("For jar-file=" + jarFileName + ", " + "pathComponent=" + pathComponent + ", nameComponent=" + nameComponent);
                 File parentPath = new File(parentFile, pathComponent);
 
-                jarFile = new File(parentPath, DeploymentUtils.
-                        getRelativeEmbeddedModulePath(parentPath.
-                        getAbsolutePath(), nameComponent));
+                jarFile = new File(parentPath, DeploymentUtils.getRelativeEmbeddedModulePath(parentPath.getAbsolutePath(), nameComponent));
             }
+
             if (jarFile.exists()) {
                 try {
                     jarFiles.add(jarFile.toURI().toURL());
@@ -271,83 +247,81 @@
                 }
             } else {
                 // Should be a caught by verifier. So, just log a message
-                if (logger.isLoggable(Level.WARNING)) {
-                    logger.log(Level.WARNING, "puinfo.referenced_jar_not_found", new Object[]{absolutePuRoot, jarFileName, jarFile});
+                if (logger.isLoggable(WARNING)) {
+                    logger.log(WARNING, "puinfo.referenced_jar_not_found", new Object[] { absolutePuRoot, jarFileName, jarFile });
                 }
             }
         }
+
         return jarFiles;
     }
 
     private File getAbsolutePuRootFile() {
-        // TODO caller of this method are _getJarFiles() and getPersitenceUnitRootUrl(). Both of them can be implemented using helper methods in PersistenceUnitDescriptor to better encapsulate
+        // TODO caller of this method are _getJarFiles() and getPersitenceUnitRootUrl().
+        // Both of them can be implemented using helper methods in
+        // PersistenceUnitDescriptor to better encapsulate
         if (absolutePuRootFile == null) {
             absolutePuRootFile = new File(providerContainerContractInfo.getApplicationLocation(),
                     getAbsolutePuRootWithinApplication().replace('/', File.separatorChar));
             if (!absolutePuRootFile.exists()) {
-                throw new RuntimeException(
-                        absolutePuRootFile.getAbsolutePath() + " does not exist!");
+                throw new RuntimeException(absolutePuRootFile.getAbsolutePath() + " does not exist!");
             }
         }
+
         return absolutePuRootFile;
     }
 
     /**
-     * This method calculates the absolute path of the root of a PU.
-     * Absolute path is not the path with regards to root of file system.
-     * It is the path from the root of the Java EE application this
-     * persistence unit belongs to.
-     * Returned path always uses '/' as path separator.
+     * This method calculates the absolute path of the root of a PU. Absolute path
+     * is not the path with regards to root of file system. It is the path from the
+     * root of the Java EE application this persistence unit belongs to. Returned
+     * path always uses '/' as path separator.
+     *
      * @return the absolute path of the root of this persistence unit
      */
     private String getAbsolutePuRootWithinApplication() {
         // TODO shift this into PersistenceUnitDescriptor to better encapsulate
-        RootDeploymentDescriptor rootDD = persistenceUnitDescriptor.getParent().
-                getParent();
-        String puRoot = persistenceUnitDescriptor.getPuRoot();
-        if(rootDD.isApplication()){
-            return puRoot;
-        } else {
-            ModuleDescriptor module = BundleDescriptor.class.cast(rootDD).
-                    getModuleDescriptor();
-            if(module.isStandalone()) {
-                return puRoot;
-            } else {
-                // The module is embedded in an ear (an ejb jar or war)
-                final String moduleLocation =        // Would point to the directory where module is expanded. For example myejb_jar
-                        DeploymentUtils.getRelativeEmbeddedModulePath(
-                        providerContainerContractInfo.getApplicationLocation(), module.getArchiveUri());
-                return moduleLocation + '/' + puRoot; // see we always '/'
-            }
+        RootDeploymentDescriptor rootDD = persistenceUnitDescriptor.getParent().getParent();
+        String persistenceUnitRoot = persistenceUnitDescriptor.getPuRoot();
+        if (rootDD.isApplication()) {
+            return persistenceUnitRoot;
         }
+
+        ModuleDescriptor<?> module = BundleDescriptor.class.cast(rootDD).getModuleDescriptor();
+        if (module.isStandalone()) {
+            return persistenceUnitRoot;
+        }
+
+        // The module is embedded in an ear (an ejb jar or war)
+        final String moduleLocation = // Would point to the directory where module is expanded. For example myejb_jar
+                getRelativeEmbeddedModulePath(
+                    providerContainerContractInfo.getApplicationLocation(),
+                    module.getArchiveUri());
+
+        return moduleLocation + '/' + persistenceUnitRoot; // see we always '/'
     }
 
-
     /**
-     * This method first checks if default provider is specified in the
-     * environment (e.g. using -D option in domain.xml). If so, we use that.
-     * Else we defaults to EclipseLink.
+     * This method first checks if default provider is specified in the environment
+     * (e.g. using -D option in domain.xml). If so, we use that. Else we defaults to
+     * EclipseLink.
      *
      * @return
      */
     public static String getDefaultprovider() {
-        final String DEFAULT_PERSISTENCE_PROVIDER_PROPERTY =
-                "com.sun.persistence.defaultProvider"; // NOI18N
-        if(defaultProvider == null) {
-            defaultProvider =
-                    System.getProperty(DEFAULT_PERSISTENCE_PROVIDER_PROPERTY,
-                        DEFAULT_PROVIDER_NAME);
+        if (defaultProvider == null) {
+            defaultProvider = System.getProperty("com.sun.persistence.defaultProvider", DEFAULT_PROVIDER_NAME);
         }
 
         return defaultProvider;
     }
 
-    public static String getPersistenceProviderClassNameForPuDesc(
-            PersistenceUnitDescriptor persistenceUnitDescriptor) {
+    public static String getPersistenceProviderClassNameForPuDesc(PersistenceUnitDescriptor persistenceUnitDescriptor) {
         String provider = persistenceUnitDescriptor.getProvider();
-        if (PersistenceUnitLoader.isNullOrEmpty(provider)) {
+        if (isNullOrEmpty(provider)) {
             provider = getDefaultprovider();
         }
+
         return provider;
     }
 
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitLoader.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitLoader.java
index 0aa212b..1f0651f 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitLoader.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/PersistenceUnitLoader.java
@@ -16,45 +16,39 @@
 
 package org.glassfish.persistence.jpa;
 
-import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
+import static java.util.Collections.unmodifiableMap;
+import static java.util.logging.Level.FINE;
+import static java.util.logging.Level.INFO;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.logging.Logger;
+
 import org.glassfish.deployment.common.RootDeploymentDescriptor;
-import com.sun.enterprise.deployment.PersistenceUnitsDescriptor;
-import com.sun.enterprise.util.i18n.StringManager;
-import com.sun.logging.LogDomains;
 import org.glassfish.persistence.jpa.schemageneration.SchemaGenerationProcessor;
 import org.glassfish.persistence.jpa.schemageneration.SchemaGenerationProcessorFactory;
 
+import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
+import com.sun.enterprise.deployment.PersistenceUnitsDescriptor;
+import com.sun.enterprise.util.i18n.StringManager;
+import com.sun.logging.LogDomains;
+
 import jakarta.persistence.EntityManagerFactory;
 import jakarta.persistence.ValidationMode;
-import jakarta.persistence.spi.PersistenceUnitInfo;
 import jakarta.persistence.spi.PersistenceProvider;
+import jakarta.persistence.spi.PersistenceUnitInfo;
 import jakarta.persistence.spi.PersistenceUnitTransactionType;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.Collections;
-import java.util.Properties;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * Loads emf corresponding to a PersistenceUnit. Executes java2db if required.
+ *
  * @author Mitesh Meswani
  * @author Sanjeeb.Sahoo@Sun.COM
  */
 public class PersistenceUnitLoader {
 
-    /**
-     * Conduit to talk with container
-     */
-    private ProviderContainerContractInfo providerContainerContractInfo;
-
-    private EntityManagerFactory emf;
-
-    /**
-     * The schemaGenerationProcessor instance for the Java2DB work.
-     */
-    private SchemaGenerationProcessor schemaGenerationProcessor;
-
     private static Logger logger = LogDomains.getLogger(PersistenceUnitLoader.class, LogDomains.PERSISTENCE_LOGGER);
 
     private static final StringManager localStrings = StringManager.getManager(PersistenceUnitLoader.class);
@@ -62,7 +56,7 @@
     private static Map<String, String> integrationProperties;
 
     /** EclipseLink property name to enable/disable weaving **/
-    private static final String ECLIPSELINK_WEAVING_PROPERTY = "eclipselink.weaving"; // NOI18N
+    private static final String ECLIPSELINK_WEAVING_PROPERTY = "eclipselink.weaving";
 
     /** Name of property used to specify validation mode */
     private static final String VALIDATION_MODE_PROPERTY = "jakarta.persistence.validation.mode";
@@ -72,31 +66,76 @@
 
     private static final String DISABLE_UPGRADE_FROM_TOPLINK_ESSENTIALS = "org.glassfish.persistence.jpa.disable.upgrade.from.toplink.essentials";
 
+    static {
+        /*
+         * We set all the provider specific integration level properties here. It knows
+         * about all the integration level properties that are needed to integrate a
+         * provider with our container. When we add support for other containers, we
+         * should modify this code so that user does not have to specify such properties
+         * in their persistence.xml file. These properties can be overriden by
+         * persistence.xml as per the spec. Before applying default values for
+         * properties, this method first checks if the properties have been set in the
+         * system (typically done using -D option in domain.xml).
+         *
+         */
+        // ------------------- The Base -------------------------
+
+        Map<String, String> props = new HashMap<>();
+
+        final String ECLIPSELINK_SERVER_PLATFORM_CLASS_NAME_PROPERTY = "eclipselink.target-server";
+        props.put(
+            ECLIPSELINK_SERVER_PLATFORM_CLASS_NAME_PROPERTY,
+            System.getProperty(ECLIPSELINK_SERVER_PLATFORM_CLASS_NAME_PROPERTY, "Glassfish"));
+
+        // Hibernate specific properties:
+        final String HIBERNATE_TRANSACTION_MANAGER_LOOKUP_CLASS_PROPERTY = "hibernate.transaction.manager_lookup_class";
+        props.put(HIBERNATE_TRANSACTION_MANAGER_LOOKUP_CLASS_PROPERTY,
+            System.getProperty(HIBERNATE_TRANSACTION_MANAGER_LOOKUP_CLASS_PROPERTY,
+            "org.hibernate.transaction.SunONETransactionManagerLookup"));
+
+        integrationProperties = unmodifiableMap(props);
+    }
+
+    /**
+     * Conduit to talk with container
+     */
+    private ProviderContainerContractInfo providerContainerContractInfo;
+
+    private EntityManagerFactory entityManagerFactory;
+
+    /**
+     * The schemaGenerationProcessor instance for the Java2DB work.
+     */
+    private SchemaGenerationProcessor schemaGenerationProcessor;
+
     public PersistenceUnitLoader(PersistenceUnitDescriptor puToInstatntiate, ProviderContainerContractInfo providerContainerContractInfo) {
-       this.providerContainerContractInfo = providerContainerContractInfo;
+        this.providerContainerContractInfo = providerContainerContractInfo;
 
-       // A hack to work around EclipseLink issue https://bugs.eclipse.org/bugs/show_bug.cgi?id=248328 for prelude
-       // This should be removed once version of EclipseLink which fixes the issue is integrated.
-       // set the system property required by EclipseLink before we load it.
-       setSystemPropertyToEnableDoPrivilegedInEclipseLink();
+        // A hack to work around EclipseLink issue
+        // https://bugs.eclipse.org/bugs/show_bug.cgi?id=248328 for prelude
+        // This should be removed once version of EclipseLink which fixes the issue is
+        // integrated.
+        // set the system property required by EclipseLink before we load it.
+        setSystemPropertyToEnableDoPrivilegedInEclipseLink();
 
-       emf = loadPU(puToInstatntiate);
-   }
+        entityManagerFactory = loadPU(puToInstatntiate);
+    }
 
     /**
      * @return The emf loaded.
      */
     public EntityManagerFactory getEMF() {
-        return emf;
+        return entityManagerFactory;
     }
 
     private void setSystemPropertyToEnableDoPrivilegedInEclipseLink() {
         final String PROPERTY_NAME = "eclipselink.security.usedoprivileged";
-        // Need not invoke in doPrivileged block as the whole call stack consist of trusted code when this code
+        // Need not invoke in doPrivileged block as the whole call stack consist of
+        // trusted code when this code
         // is invoked
-        if(System.getProperty(PROPERTY_NAME) == null) {
+        if (System.getProperty(PROPERTY_NAME) == null) {
             // property not set. Set it to true
-            System.setProperty(PROPERTY_NAME, String.valueOf(Boolean.TRUE) );
+            System.setProperty(PROPERTY_NAME, String.valueOf(Boolean.TRUE));
         }
     }
 
@@ -104,49 +143,49 @@
      * Loads an individual PersistenceUnitDescriptor and registers the
      * EntityManagerFactory in appropriate DOL structure.
      *
-     * @param pud PersistenceUnitDescriptor to be loaded.
+     * @param persistenceUnitDescriptor PersistenceUnitDescriptor to be loaded.
      */
-    private EntityManagerFactory loadPU(PersistenceUnitDescriptor pud) {
+    private EntityManagerFactory loadPU(PersistenceUnitDescriptor persistenceUnitDescriptor) {
+        checkForUpgradeFromTopLinkEssentials(persistenceUnitDescriptor);
+        checkForDataSourceOverride(persistenceUnitDescriptor);
+        calculateDefaultDataSource(persistenceUnitDescriptor);
 
-
-        checkForUpgradeFromTopLinkEssentials(pud);
-
-        checkForDataSourceOverride(pud);
-
-        calculateDefaultDataSource(pud);
-
-        PersistenceUnitInfo pInfo = new PersistenceUnitInfoImpl(pud, providerContainerContractInfo);
+        PersistenceUnitInfo persistenceUnitInfo =
+            new PersistenceUnitInfoImpl(persistenceUnitDescriptor, providerContainerContractInfo);
 
         String applicationLocation = providerContainerContractInfo.getApplicationLocation();
-        final boolean fineMsgLoggable = logger.isLoggable(Level.FINE);
-        if(fineMsgLoggable) {
-            logger.fine("Loading persistence unit for application: \"" + applicationLocation + "\"pu Root is: " +
-                    pud.getPuRoot());
-            logger.fine("PersistenceInfo for this pud is :\n" + pInfo); // NOI18N
+        final boolean fineMsgLoggable = logger.isLoggable(FINE);
+        if (fineMsgLoggable) {
+            logger.fine("Loading persistence unit for application: \"" + applicationLocation + "\"pu Root is: " + persistenceUnitDescriptor.getPuRoot());
+            logger.fine("PersistenceInfo for this pud is :\n" + persistenceUnitInfo);
         }
 
         PersistenceProvider provider;
         try {
-            // See we use application CL as opposed to system CL to loadPU
+            // We use application CL as opposed to system CL to loadPU
             // provider. This allows user to get hold of provider specific
-            // implementation classes in their code. But this also means
+            // implementation classes in their code.
+
+            // But this also means
             // provider must not use appserver implementation classes directly
             // because once we implement isolation in our class loader hierarchy
             // the only classes available to application class loader would be
             // our appserver interface classes. By Sahoo
-            provider =
-                    PersistenceProvider.class.cast(
-                    providerContainerContractInfo.getClassLoader()
-                    .loadClass(pInfo.getPersistenceProviderClassName())
-                    .newInstance());
-        } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
+            provider = PersistenceProvider.class
+                    .cast(
+                        providerContainerContractInfo.getClassLoader()
+                                                     .loadClass(persistenceUnitInfo.getPersistenceProviderClassName())
+                                                     .getDeclaredConstructor()
+                                                     .newInstance());
+
+        } catch (ReflectiveOperationException | IllegalArgumentException | SecurityException e) {
             throw new RuntimeException(e);
         }
 
         Map<String, Object> schemaGenerationOverrides;
-        schemaGenerationProcessor = SchemaGenerationProcessorFactory.createSchemaGenerationProcessor(pud);
-        if(providerContainerContractInfo.isJava2DBRequired() ) {
-            schemaGenerationProcessor.init(pud, providerContainerContractInfo.getDeploymentContext());
+        schemaGenerationProcessor = SchemaGenerationProcessorFactory.createSchemaGenerationProcessor(persistenceUnitDescriptor);
+        if (providerContainerContractInfo.isJava2DBRequired()) {
+            schemaGenerationProcessor.init(persistenceUnitDescriptor, providerContainerContractInfo.getDeploymentContext());
             schemaGenerationOverrides = schemaGenerationProcessor.getOverridesForSchemaGeneration();
         } else {
             // schema generation is not required if this EMF is being created for
@@ -158,116 +197,127 @@
         }
 
         Map<String, Object> overRides = new HashMap<String, Object>(integrationProperties);
-        if(schemaGenerationOverrides != null) {
+        if (schemaGenerationOverrides != null) {
             overRides.putAll(schemaGenerationOverrides);
         }
 
         // Check if the persistence unit requires Bean Validation
-        ValidationMode validationMode = getValidationMode(pud);
-        if(validationMode == ValidationMode.AUTO || validationMode == ValidationMode.CALLBACK ) {
+        ValidationMode validationMode = getValidationMode(persistenceUnitDescriptor);
+        if (validationMode == ValidationMode.AUTO || validationMode == ValidationMode.CALLBACK) {
             overRides.put(VALIDATOR_FACTORY, providerContainerContractInfo.getValidatorFactory());
         }
 
-        if(!providerContainerContractInfo.isWeavingEnabled()) {
-            overRides.put(ECLIPSELINK_WEAVING_PROPERTY, System.getProperty(ECLIPSELINK_WEAVING_PROPERTY,"false")); // NOI18N
+        if (!providerContainerContractInfo.isWeavingEnabled()) {
+            overRides.put(ECLIPSELINK_WEAVING_PROPERTY, System.getProperty(ECLIPSELINK_WEAVING_PROPERTY, "false"));
         }
 
-        EntityManagerFactory emf = provider.createContainerEntityManagerFactory(pInfo, overRides);
+        EntityManagerFactory entityManagerFactory = provider.createContainerEntityManagerFactory(persistenceUnitInfo, overRides);
+
+        logger.logp(FINE, "PersistenceUnitLoader", "loadPU", "emf = {0}", entityManagerFactory);
+
+        PersistenceUnitsDescriptor parent = persistenceUnitDescriptor.getParent();
+        RootDeploymentDescriptor containingBundle = parent.getParent();
+        providerContainerContractInfo.registerEMF(
+            persistenceUnitInfo.getPersistenceUnitName(), persistenceUnitDescriptor.getPuRoot(), containingBundle, entityManagerFactory);
 
         if (fineMsgLoggable) {
-            logger.logp(Level.FINE, "PersistenceUnitLoader", "loadPU", // NOI18N
-                        "emf = {0}", emf); // NOI18N
-        }
-
-        PersistenceUnitsDescriptor parent = pud.getParent();
-        RootDeploymentDescriptor containingBundle = parent.getParent();
-        providerContainerContractInfo.registerEMF(pInfo.getPersistenceUnitName(), pud.getPuRoot(), containingBundle, emf);
-
-        if(fineMsgLoggable) {
-            logger.fine("Finished loading persistence unit for application: " +  // NOI18N
+            logger.fine("Finished loading persistence unit for application: " +
                     applicationLocation);
         }
-        return emf;
+
+        return entityManagerFactory;
     }
 
     /**
-     * If use provided data source is overridden, update PersistenceUnitDescriptor with it
+     * If use provided data source is overridden, update PersistenceUnitDescriptor
+     * with it
      */
-    private void checkForDataSourceOverride(PersistenceUnitDescriptor pud) {
+    private void checkForDataSourceOverride(PersistenceUnitDescriptor persistenceUnitDescriptor) {
         String jtaDataSourceOverride = providerContainerContractInfo.getJTADataSourceOverride();
-        if(jtaDataSourceOverride != null) {
-            pud.setJtaDataSource(jtaDataSourceOverride);
+        if (jtaDataSourceOverride != null) {
+            persistenceUnitDescriptor.setJtaDataSource(jtaDataSourceOverride);
         }
     }
 
     /** Calculate and set the default data source in given <code>pud</code> **/
-    private void calculateDefaultDataSource(PersistenceUnitDescriptor pud) {
-        String jtaDataSourceName = calculateJtaDataSourceName(pud.getTransactionType(), pud.getJtaDataSource(), pud.getNonJtaDataSource(), pud.getName());
-        String nonJtaDataSourceName = calculateNonJtaDataSourceName(pud.getJtaDataSource(), pud.getNonJtaDataSource());
-        pud.setJtaDataSource(jtaDataSourceName);
-        pud.setNonJtaDataSource(nonJtaDataSourceName);
+    private void calculateDefaultDataSource(PersistenceUnitDescriptor persistenceUnitDescriptor) {
+        String jtaDataSourceName =
+            calculateJtaDataSourceName(
+                persistenceUnitDescriptor.getTransactionType(),
+                persistenceUnitDescriptor.getJtaDataSource(),
+                persistenceUnitDescriptor.getNonJtaDataSource(),
+                persistenceUnitDescriptor.getName());
+
+        String nonJtaDataSourceName =
+            calculateNonJtaDataSourceName(
+                persistenceUnitDescriptor.getJtaDataSource(),
+                persistenceUnitDescriptor.getNonJtaDataSource());
+
+        persistenceUnitDescriptor.setJtaDataSource(jtaDataSourceName);
+        persistenceUnitDescriptor.setNonJtaDataSource(nonJtaDataSourceName);
     }
 
     /**
      * @return DataSource Name to be used as JTA data source.
      */
-    private String calculateJtaDataSourceName(String transactionType, String userSuppliedJTADSName, String userSuppliedNonJTADSName, String puName) {
+    private String calculateJtaDataSourceName(String transactionType, String userSuppliedJTADSName, String userSuppliedNonJTADSName,
+            String puName) {
         /*
-         * Use DEFAULT_DS_NAME iff user has not specified both jta-ds-name
-         * and non-jta-ds-name; and user has specified transaction-type as JTA.
-         * See Gf issue #1204 as well.
+         * Use DEFAULT_DS_NAME iff user has not specified both jta-ds-name and
+         * non-jta-ds-name; and user has specified transaction-type as JTA. See Gf issue
+         * #1204 as well.
          */
         if (PersistenceUnitTransactionType.valueOf(transactionType) != PersistenceUnitTransactionType.JTA) {
-            logger.logp(Level.FINE,
-                    "PersistenceUnitInfoImpl", // NOI18N
-                    "_getJtaDataSource", // NOI18N
-                    "This PU is configured as non-jta, so jta-data-source is null"); // NOI18N
+            logger.logp(FINE,
+                "PersistenceUnitInfoImpl",
+                 "_getJtaDataSource",
+                 "This PU is configured as non-jta, so jta-data-source is null");
             return null; // this is a non-jta-data-source
         }
-        String DSName;
+
+        String dataSourceName;
         if (!isNullOrEmpty(userSuppliedJTADSName)) {
-            DSName = userSuppliedJTADSName; // use user supplied jta-ds-name
-        } else if (isNullOrEmpty(userSuppliedNonJTADSName )) {
-            DSName = providerContainerContractInfo.getDefaultDataSourceName();
+            dataSourceName = userSuppliedJTADSName; // use user supplied jta-ds-name
+        } else if (isNullOrEmpty(userSuppliedNonJTADSName)) {
+            dataSourceName = providerContainerContractInfo.getDefaultDataSourceName();
         } else {
-            String msg = localStrings.getString("puinfo.jta-ds-not-configured", // NOI18N
-                    new Object[] {puName});
+            String msg = localStrings.getString("puinfo.jta-ds-not-configured",
+                    new Object[] { puName });
             throw new RuntimeException(msg);
         }
-        logger.logp(Level.FINE, "PersistenceUnitLoaderImpl", // NOI18N
-                "_getJtaDataSource", "JTADSName = {0}", // NOI18N
-                DSName);
-        return DSName;
+
+        logger.logp(FINE, "PersistenceUnitLoaderImpl",
+                "_getJtaDataSource",
+                "JTADSName = {0}",
+                dataSourceName);
+
+        return dataSourceName;
     }
 
-    private String calculateNonJtaDataSourceName(String userSuppliedJTADSName, String userSuppliedNonJTADSName ) {
+    private String calculateNonJtaDataSourceName(String userSuppliedJTADSName, String userSuppliedNonJTADSName) {
         /*
-         * If non-JTA name is *not* provided
-         * - use the JTA DS name (if supplied)
-         * If non-JTA name is provided
-         * - use non-JTA DS name
-         * (this is done for ease of use, because user does not have to
-         * explicitly mark a connection pool as non-transactional.
-         * Calling lookupNonTxDataSource() with a resource which is
-         * already configured as non-transactional has no side effects.)
-         * If neither non-JTA nor JTA name is provided
-         * use DEFAULT_DS_NAME.
+         * If non-JTA name is *not* provided - use the JTA DS name (if supplied) If
+         * non-JTA name is provided - use non-JTA DS name (this is done for ease of use,
+         * because user does not have to explicitly mark a connection pool as
+         * non-transactional. Calling lookupNonTxDataSource() with a resource which is
+         * already configured as non-transactional has no side effects.) If neither
+         * non-JTA nor JTA name is provided use DEFAULT_DS_NAME.
          */
-        String DSName;
+        String dataSourceName;
         if (!isNullOrEmpty(userSuppliedNonJTADSName)) {
-            DSName = userSuppliedNonJTADSName;
+            dataSourceName = userSuppliedNonJTADSName;
         } else {
             if (!isNullOrEmpty(userSuppliedJTADSName)) {
-                DSName = userSuppliedJTADSName;
+                dataSourceName = userSuppliedJTADSName;
             } else {
-                DSName = providerContainerContractInfo.getDefaultDataSourceName();
+                dataSourceName = providerContainerContractInfo.getDefaultDataSourceName();
             }
         }
-        logger.logp(Level.FINE,
-                "PersistenceUnitInfoImpl", // NOI18N
-                "_getNonJtaDataSource", "nonJTADSName = {0}", // NOI18N
-                DSName);
-        return DSName;
+        logger.logp(FINE, "PersistenceUnitInfoImpl",
+                "_getNonJtaDataSource", "nonJTADSName = {0}",
+                dataSourceName);
+
+        return dataSourceName;
     }
 
     static boolean isNullOrEmpty(String s) {
@@ -275,36 +325,40 @@
     }
 
     /**
-     * If the app is using Toplink Essentials as the provider and TopLink Essentials is not available in classpath
-     * We try to upgrade the app to use EclipseLink.
-     * Change the provider to EclipseLink and translate "toplink.*" properties to "eclipselink.*" properties
+     * If the app is using Toplink Essentials as the provider and TopLink Essentials
+     * is not available in classpath We try to upgrade the app to use EclipseLink.
+     * Change the provider to EclipseLink and translate "toplink.*" properties to
+     * "eclipselink.*" properties
      */
-    private void checkForUpgradeFromTopLinkEssentials(PersistenceUnitDescriptor pud) {
-        if(Boolean.getBoolean(DISABLE_UPGRADE_FROM_TOPLINK_ESSENTIALS) ) {
-            //Return if instructed by System property
+    private void checkForUpgradeFromTopLinkEssentials(PersistenceUnitDescriptor persistenceUnitDescriptor) {
+        if (Boolean.getBoolean(DISABLE_UPGRADE_FROM_TOPLINK_ESSENTIALS)) {
+            // Return if instructed by System property
             return;
         }
-        boolean upgradeTopLinkEssentialsProperties = false;
-        String providerClassName = pud.getProvider();
 
-        if (providerClassName == null || providerClassName.isEmpty() ) {
-            // This might be a JavaEE app running against V2 and relying in provider name being defaulted.
+        boolean upgradeTopLinkEssentialsProperties = false;
+        String providerClassName = persistenceUnitDescriptor.getProvider();
+
+        if (providerClassName == null || providerClassName.isEmpty()) {
+            // This might be a JavaEE app running against V2 and relying in provider name
+            // being defaulted.
             upgradeTopLinkEssentialsProperties = true;
-        } else if( "oracle.toplink.essentials.PersistenceProvider".equals(providerClassName) ||
-                "oracle.toplink.essentials.ejb.cmp3.EntityManagerFactoryProvider".equals(providerClassName) ) {
+        } else if ("oracle.toplink.essentials.PersistenceProvider".equals(providerClassName)
+                || "oracle.toplink.essentials.ejb.cmp3.EntityManagerFactoryProvider".equals(providerClassName)) {
             try {
                 providerContainerContractInfo.getClassLoader().loadClass(providerClassName);
             } catch (ClassNotFoundException e) {
-                // Toplink Essentials classes are not available to an application using it as the provider
+                // Toplink Essentials classes are not available to an application using it as
+                // the provider
                 // Migrate the application to use EclipseLink
 
                 String defaultProvider = PersistenceUnitInfoImpl.getDefaultprovider();
-                if(logger.isLoggable(Level.INFO)) {
-                    logger.log(Level.INFO, "puloader.defaulting.provider.on.upgrade", new Object[] {pud.getName(), defaultProvider});
+                if (logger.isLoggable(INFO)) {
+                    logger.log(INFO, "puloader.defaulting.provider.on.upgrade", new Object[] { persistenceUnitDescriptor.getName(), defaultProvider });
                 }
 
                 // Change the provider name
-                pud.setProvider(defaultProvider);
+                persistenceUnitDescriptor.setProvider(defaultProvider);
                 upgradeTopLinkEssentialsProperties = true;
             }
         }
@@ -313,12 +367,12 @@
             // For each "toplink*" property, add a "eclipselink* property
             final String TOPLINK = "toplink";
             final String ECLIPSELINK = "eclipselink";
-            Properties properties = pud.getProperties();
-            for (Map.Entry entry : properties.entrySet()) {
+            Properties properties = persistenceUnitDescriptor.getProperties();
+            for (Entry<Object, Object> entry : properties.entrySet()) {
                 String key = (String) entry.getKey();
-                if(key.startsWith(TOPLINK) ) {
+                if (key.startsWith(TOPLINK)) {
                     String translatedKey = ECLIPSELINK + key.substring(TOPLINK.length());
-                    pud.addProperty(translatedKey, entry.getValue());
+                    persistenceUnitDescriptor.addProperty(translatedKey, entry.getValue());
                 }
             }
         }
@@ -326,66 +380,31 @@
 
     /**
      * Called during load when the correct classloader and transformer had been
-     * already set.
-     * For emf that require Java2DB, call createEntityManager() to populate
-     * the DDL files, then iterate over those files and execute each line in them.
+     * already set. For emf that require Java2DB, call createEntityManager() to
+     * populate the DDL files, then iterate over those files and execute each line
+     * in them.
      */
     void doJava2DB() {
         if (schemaGenerationProcessor.isContainerDDLExecutionRequired()) {
-            final boolean fineMsgLoggable = logger.isLoggable(Level.FINE);
-            if(fineMsgLoggable) {
-                logger.fine("<--- To Create Tables"); // NOI18N
-            }
+            logger.fine("<--- To Create Tables");
 
             schemaGenerationProcessor.executeCreateDDL();
 
-            if(fineMsgLoggable) {
-                logger.fine("---> Done Create Tables"); // NOI18N
-            }
+            logger.fine("---> Done Create Tables");
         }
     }
 
-    private ValidationMode getValidationMode(PersistenceUnitDescriptor pud) {
-        ValidationMode validationMode = pud.getValidationMode(); //Initialize with value element <validation-mode> in persitence.xml
-        //Check is overridden in properties
-        String validationModeFromProperty = (String) pud.getProperties().get(VALIDATION_MODE_PROPERTY);
-        if(validationModeFromProperty != null) {
-            //User would get IllegalArgumentException if he has specified invalid mode
+    private ValidationMode getValidationMode(PersistenceUnitDescriptor persistenceUnitDescriptor) {
+        // Initialize with value element <validation-mode> in persitence.xml
+        ValidationMode validationMode = persistenceUnitDescriptor.getValidationMode();
+
+        // Check is overridden in properties
+        String validationModeFromProperty = (String) persistenceUnitDescriptor.getProperties().get(VALIDATION_MODE_PROPERTY);
+        if (validationModeFromProperty != null) {
+            // User would get IllegalArgumentException if he has specified invalid mode
             validationMode = ValidationMode.valueOf(validationModeFromProperty);
         }
+
         return validationMode;
     }
-
-
-    static {
-        /*
-         * We set all the provider specific integration level properties here.
-         * It knows about all the integration level properties that
-         * are needed to integrate a provider with our container. When we add
-         * support for other containers, we should modify this code so that user
-         * does not have to specify such properties in their persistence.xml file.
-         * These properties can be overriden by persistence.xml as per
-         * the spec. Before applying default values for properties, this method
-         * first checks if the properties have been set in the system
-         * (typically done using -D option in domain.xml).
-         *
-         */
-        // ------------------- The Base -------------------------
-
-        Map<String, String> props = new HashMap<>();
-
-        final String ECLIPSELINK_SERVER_PLATFORM_CLASS_NAME_PROPERTY = "eclipselink.target-server"; // NOI18N
-        props.put(ECLIPSELINK_SERVER_PLATFORM_CLASS_NAME_PROPERTY,
-                System.getProperty(ECLIPSELINK_SERVER_PLATFORM_CLASS_NAME_PROPERTY, "Glassfish")); // NOI18N
-
-        // Hibernate specific properties:
-        final String HIBERNATE_TRANSACTION_MANAGER_LOOKUP_CLASS_PROPERTY = "hibernate.transaction.manager_lookup_class"; // NOI18N
-        props.put(HIBERNATE_TRANSACTION_MANAGER_LOOKUP_CLASS_PROPERTY,
-                System.getProperty(HIBERNATE_TRANSACTION_MANAGER_LOOKUP_CLASS_PROPERTY, "org.hibernate.transaction.SunONETransactionManagerLookup")); // NOI18N
-
-        integrationProperties = Collections.unmodifiableMap(props);
-
-    }
-
-
 }
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfo.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfo.java
index 6c969fe..ebba544 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfo.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfo.java
@@ -16,35 +16,37 @@
 
 package org.glassfish.persistence.jpa;
 
-import jakarta.persistence.spi.ClassTransformer;
-import jakarta.persistence.EntityManagerFactory;
-import javax.sql.DataSource;
 import javax.naming.NamingException;
-import jakarta.validation.ValidatorFactory;
+import javax.sql.DataSource;
 
 import org.glassfish.api.deployment.DeploymentContext;
 import org.glassfish.deployment.common.RootDeploymentDescriptor;
 
+import jakarta.persistence.EntityManagerFactory;
+import jakarta.persistence.spi.ClassTransformer;
+import jakarta.validation.ValidatorFactory;
 
 /**
+ * This encapsulates information needed to load or unload
+ * persistence units.
+ *
  * @author Mitesh Meswani
- * This encapsulates information needed  to load or unload persistence units.
  */
 public interface ProviderContainerContractInfo {
 
-    static final String DEFAULT_DS_NAME = "jdbc/__default";
+    String DEFAULT_DS_NAME = "jdbc/__default";
 
     /**
      *
-     * @return a class loader that is used to load persistence entities
-     * bundled in this application.
+     * @return a class loader that is used to load persistence entities bundled in
+     * this application.
      */
     ClassLoader getClassLoader();
 
     /**
      *
-     * @return a temp class loader that is used to load persistence entities
-     * bundled in this application.
+     * @return a temp class loader that is used to load persistence entities bundled
+     * in this application.
      */
     ClassLoader getTempClassloader();
 
@@ -54,7 +56,6 @@
      */
     void addTransformer(ClassTransformer transformer);
 
-
     /**
      * @return absolute path of the location where application is exploded.
      */
@@ -62,6 +63,7 @@
 
     /**
      * Looks up DataSource with JNDI name given by <code>dataSourceName</code>
+     *
      * @param dataSourceName
      * @return DataSource with JNDI name given by <code>dataSourceName</code>
      * @throws javax.naming.NamingException
@@ -69,9 +71,12 @@
     DataSource lookupDataSource(String dataSourceName) throws NamingException;
 
     /**
-     * Looks up Non transactional DataSource with JNDI name given by <code>dataSourceName</code>
+     * Looks up Non transactional DataSource with JNDI name given by
+     * <code>dataSourceName</code>
+     *
      * @param dataSourceName
-     * @return Non transactional DataSource with JNDI name given by <code>dataSourceName</code>
+     * @return Non transactional DataSource with JNDI name given by
+     * <code>dataSourceName</code>
      * @throws NamingException
      */
     DataSource lookupNonTxDataSource(String dataSourceName) throws NamingException;
@@ -83,6 +88,7 @@
 
     /**
      * Will be called while loading an application.
+     *
      * @return true if java2DB is required false otherwise
      */
     boolean isJava2DBRequired();
@@ -92,11 +98,12 @@
      */
     DeploymentContext getDeploymentContext();
 
-
     /**
      * Register the give emf with underlying container
+     *
      * @param unitName Name of correspoding PersistenceUnit
-     * @param persistenceRootUri URI within application (excluding META-INF) for root of corresponding PersistenceUnit
+     * @param persistenceRootUri URI within application (excluding META-INF) for
+     * root of corresponding PersistenceUnit
      * @param containingBundle The bundle that contains PU for the given EMF
      * @param emf The emf that needs to be registered
      */
@@ -109,12 +116,14 @@
 
     /**
      *
-     * @return default data source name to be used if user has not defined a data source
+     * @return default data source name to be used if user has not defined a data
+     * source
      */
     String getDefaultDataSourceName();
 
     /**
-     * @return true if weaving is enabled for the current environment false otherwise
+     * @return true if weaving is enabled for the current environment false
+     * otherwise
      */
     boolean isWeavingEnabled();
 }
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfoBase.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfoBase.java
index 7c71aa3..79ac3e6 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfoBase.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ProviderContainerContractInfoBase.java
@@ -16,15 +16,19 @@
 
 package org.glassfish.persistence.jpa;
 
-import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
-import org.glassfish.api.deployment.DeploymentContext;
-import org.glassfish.persistence.common.PersistenceHelper;
+import static org.glassfish.persistence.common.PersistenceHelper.lookupNonTxResource;
+import static org.glassfish.persistence.common.PersistenceHelper.lookupPMResource;
 
 import javax.naming.NamingException;
 import javax.sql.DataSource;
 
+import org.glassfish.api.deployment.DeploymentContext;
+
+import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
+
 /**
  * Convenience base class for implementing ProviderContainerContractInfo.
+ *
  * @author Mitesh Meswani
  */
 public abstract class ProviderContainerContractInfoBase implements ProviderContainerContractInfo {
@@ -33,7 +37,9 @@
     private DeploymentContext context;
 
     public ProviderContainerContractInfoBase(ConnectorRuntime connectorRuntime) {
-        //This ctor is currently called only by ACC impl of ProviderContainerContractInfo which which will not deal with app/module scoped resources
+        // This ctor is currently called only by ACC impl of
+        // ProviderContainerContractInfo which which will not deal with app/module
+        // scoped resources
         this.connectorRuntime = connectorRuntime;
     }
 
@@ -44,12 +50,12 @@
 
     @Override
     public DataSource lookupDataSource(String dataSourceName) throws NamingException {
-        return DataSource.class.cast(PersistenceHelper.lookupPMResource(connectorRuntime, context, dataSourceName) );
+        return DataSource.class.cast(lookupPMResource(connectorRuntime, context, dataSourceName));
     }
 
     @Override
     public DataSource lookupNonTxDataSource(String dataSourceName) throws NamingException {
-        return DataSource.class.cast(PersistenceHelper.lookupNonTxResource(connectorRuntime, context, dataSourceName) );
+        return DataSource.class.cast(lookupNonTxResource(connectorRuntime, context, dataSourceName));
     }
 
     @Override
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ServerProviderContainerContractInfo.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ServerProviderContainerContractInfo.java
index e9024e8..20efec8 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ServerProviderContainerContractInfo.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/ServerProviderContainerContractInfo.java
@@ -16,126 +16,135 @@
 
 package org.glassfish.persistence.jpa;
 
-import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
-import com.sun.enterprise.deployment.Application;
-import com.sun.enterprise.deployment.BundleDescriptor;
-import org.glassfish.deployment.common.RootDeploymentDescriptor;
-import org.glassfish.api.deployment.DeploymentContext;
-import org.glassfish.api.deployment.InstrumentableClassLoader;
-import org.glassfish.api.deployment.OpsParams;
-import org.glassfish.api.deployment.archive.ReadableArchive;
-import org.glassfish.persistence.common.DatabaseConstants;
+import static org.glassfish.persistence.common.DatabaseConstants.JTA_DATASOURCE_JNDI_NAME_OVERRIDE;
 
-import jakarta.validation.Validation;
-import jakarta.validation.ValidatorFactory;
-import jakarta.persistence.EntityManagerFactory;
-import jakarta.persistence.spi.ClassTransformer;
 import java.lang.instrument.ClassFileTransformer;
 import java.lang.instrument.IllegalClassFormatException;
 import java.security.ProtectionDomain;
 
+import org.glassfish.api.deployment.DeploymentContext;
+import org.glassfish.api.deployment.InstrumentableClassLoader;
+import org.glassfish.api.deployment.OpsParams;
+import org.glassfish.api.deployment.archive.ReadableArchive;
+import org.glassfish.deployment.common.RootDeploymentDescriptor;
+
+import com.sun.appserv.connectors.internal.api.ConnectorRuntime;
+import com.sun.enterprise.deployment.Application;
+import com.sun.enterprise.deployment.BundleDescriptor;
+
+import jakarta.persistence.EntityManagerFactory;
+import jakarta.persistence.spi.ClassTransformer;
+import jakarta.validation.Validation;
+import jakarta.validation.ValidatorFactory;
+
 /**
  * Implementation of ProviderContainerContractInfo while running on server.
+ *
  * @author Mitesh Meswani
  */
 public class ServerProviderContainerContractInfo extends ProviderContainerContractInfoBase {
 
-       private final DeploymentContext deploymentContext;
-       private final ClassLoader finalClassLoader;
-       private ValidatorFactory validatorFactory;
-       boolean isDas;
+    private final DeploymentContext deploymentContext;
+    private final ClassLoader finalClassLoader;
+    private ValidatorFactory validatorFactory;
+    boolean isDas;
 
-       public ServerProviderContainerContractInfo(DeploymentContext deploymentContext, ConnectorRuntime connectorRuntime, boolean isDas) {
-           super(connectorRuntime, deploymentContext);
-           this.deploymentContext = deploymentContext;
-           // Cache finalClassLoader as deploymentContext.getFinalClassLoader() is expected to be called only once during deployment.
-           this.finalClassLoader = deploymentContext.getFinalClassLoader();
-           this.isDas = isDas;
-       }
+    public ServerProviderContainerContractInfo(DeploymentContext deploymentContext, ConnectorRuntime connectorRuntime, boolean isDas) {
+        super(connectorRuntime, deploymentContext);
+        this.deploymentContext = deploymentContext;
+        // Cache finalClassLoader as deploymentContext.getFinalClassLoader() is expected
+        // to be called only once during deployment.
+        this.finalClassLoader = deploymentContext.getFinalClassLoader();
+        this.isDas = isDas;
+    }
 
-      @Override
-      public ClassLoader getClassLoader() {
-           return finalClassLoader;
-       }
+    @Override
+    public ClassLoader getClassLoader() {
+        return finalClassLoader;
+    }
 
-       @Override
-       public ClassLoader getTempClassloader() {
-           return ( (InstrumentableClassLoader)deploymentContext.getClassLoader() ).copy();
-       }
+    @Override
+    public ClassLoader getTempClassloader() {
+        return ((InstrumentableClassLoader) deploymentContext.getClassLoader()).copy();
+    }
 
-       @Override
-       public void addTransformer(final ClassTransformer transformer) {
-           // Bridge between java.lang.instrument.ClassFileTransformer that DeploymentContext accepts
-           // and jakarta.persistence.spi.ClassTransformer that JPA supplies.
-           deploymentContext.addTransformer(new ClassFileTransformer() {
-               public byte[] transform(ClassLoader loader, String className, Class<?> classBeingRedefined,
-                                       ProtectionDomain protectionDomain, byte[] classfileBuffer)
-                       throws IllegalClassFormatException {
-                   return transformer.transform(loader, className, classBeingRedefined, protectionDomain, classfileBuffer);
-               }
-           });
-       }
+    @Override
+    public void addTransformer(final ClassTransformer transformer) {
+        // Bridge between java.lang.instrument.ClassFileTransformer that
+        // DeploymentContext accepts
+        // and jakarta.persistence.spi.ClassTransformer that JPA supplies.
+        deploymentContext.addTransformer(new ClassFileTransformer() {
+            @Override
+            public byte[] transform(
+                    ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain,
+                    byte[] classfileBuffer) throws IllegalClassFormatException {
+                return transformer.transform(loader, className, classBeingRedefined, protectionDomain, classfileBuffer);
+            }
+        });
+    }
 
-        @Override
-        public String getApplicationLocation() {
-           // Get source for current bundle. If it has not parent, it is the top level application
-           // else continue traversing up till we find one with not parent.
-           ReadableArchive archive = deploymentContext.getSource();
-           boolean appRootFound = false;
-           while (!appRootFound) {
-               ReadableArchive parentArchive = archive.getParentArchive();
-               if(parentArchive != null) {
-                   archive = parentArchive;
-               } else {
-                   appRootFound = true;
-               }
-           }
-           return archive.getURI().getPath();
-       }
+    @Override
+    public String getApplicationLocation() {
+        // Get source for current bundle. If it has not parent, it is the top level
+        // application.
+        // Else continue traversing up till we find one with not parent.
+        ReadableArchive archive = deploymentContext.getSource();
+        boolean appRootFound = false;
+        while (!appRootFound) {
+            ReadableArchive parentArchive = archive.getParentArchive();
+            if (parentArchive != null) {
+                archive = parentArchive;
+            } else {
+                appRootFound = true;
+            }
+        }
 
-       @Override
-       public ValidatorFactory getValidatorFactory() {
-           // TODO Once discussion about BeanValidation in JavaEE is done, ValidatorFactory should be available from deployment context
-           // We only create one validator factory per bundle.
-           if (validatorFactory == null) {
-               validatorFactory = Validation.buildDefaultValidatorFactory();
-           }
+        return archive.getURI().getPath();
+    }
 
-           return validatorFactory;
-       }
+    @Override
+    public ValidatorFactory getValidatorFactory() {
+        // TODO Once discussion about BeanValidation in JavaEE is done, ValidatorFactory
+        // should be available from deployment context
+        // We only create one validator factory per bundle.
+        if (validatorFactory == null) {
+            validatorFactory = Validation.buildDefaultValidatorFactory();
+        }
 
-       @Override
-       public boolean isJava2DBRequired() {
-           OpsParams params = deploymentContext.getCommandParameters(OpsParams.class);
-           // We only do java2db while being deployed on DAS. We do not do java2DB on load of an application or being deployed on an instance of a cluster
-           return params.origin.isDeploy() && isDas;
-       }
+        return validatorFactory;
+    }
 
-       @Override
-       public DeploymentContext getDeploymentContext() {
-           return deploymentContext;
-       }
+    @Override
+    public boolean isJava2DBRequired() {
+        OpsParams params = deploymentContext.getCommandParameters(OpsParams.class);
+        // We only do java2db while being deployed on DAS. We do not do java2DB on load
+        // of an application or being deployed on an instance of a cluster
+        return params.origin.isDeploy() && isDas;
+    }
 
-       @Override
-       public void registerEMF(String unitName, String persistenceRootUri, RootDeploymentDescriptor containingBundle, EntityManagerFactory emf) {
-           // We register the EMF into the bundle that declared the corresponding PU. This limits visibility of the emf
-           // to containing module.
-           // See EMFWrapper.lookupEntityManagerFactory() for corresponding look up logic
-           if (containingBundle.isApplication()) {
-               // ear level pu
-               assert containingBundle instanceof Application;
-               Application.class.cast(containingBundle).addEntityManagerFactory(
-                       unitName, persistenceRootUri, emf);
-           } else {
-               assert containingBundle instanceof BundleDescriptor;
-               BundleDescriptor.class.cast(containingBundle).addEntityManagerFactory(
-                       unitName, emf);
-           }
-       }
+    @Override
+    public DeploymentContext getDeploymentContext() {
+        return deploymentContext;
+    }
 
-       @Override
-       public String getJTADataSourceOverride() {
-           return deploymentContext.getTransientAppMetaData(DatabaseConstants.JTA_DATASOURCE_JNDI_NAME_OVERRIDE, String.class);
-       }
+    @Override
+    public void registerEMF(String unitName, String persistenceRootUri, RootDeploymentDescriptor containingBundle, EntityManagerFactory entityManagerFactory) {
+        // We register the EMF into the bundle that declared the corresponding PU. This
+        // limits visibility of the emf
+        // to containing module.
+        // See EMFWrapper.lookupEntityManagerFactory() for corresponding look up logic
+        if (containingBundle.isApplication()) {
+            // ear level pu
+            assert containingBundle instanceof Application;
+            Application.class.cast(containingBundle).addEntityManagerFactory(unitName, persistenceRootUri, entityManagerFactory);
+        } else {
+            assert containingBundle instanceof BundleDescriptor;
+            BundleDescriptor.class.cast(containingBundle).addEntityManagerFactory(unitName, entityManagerFactory);
+        }
+    }
+
+    @Override
+    public String getJTADataSourceOverride() {
+        return deploymentContext.getTransientAppMetaData(JTA_DATASOURCE_JNDI_NAME_OVERRIDE, String.class);
+    }
 }
-
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/EclipseLinkSchemaGenerationProcessor.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/EclipseLinkSchemaGenerationProcessor.java
index 8f6128e..593d09d 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/EclipseLinkSchemaGenerationProcessor.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/EclipseLinkSchemaGenerationProcessor.java
@@ -16,49 +16,48 @@
 
 package org.glassfish.persistence.jpa.schemageneration;
 
-import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
-
-import org.glassfish.api.deployment.DeploymentContext;
-import org.glassfish.persistence.common.*;
+import static org.glassfish.persistence.common.DatabaseConstants.CREATE_DDL_JDBC_FILE_SUFFIX;
+import static org.glassfish.persistence.common.DatabaseConstants.DROP_DDL_JDBC_FILE_SUFFIX;
 
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import com.sun.logging.LogDomains;
 
+import org.glassfish.api.deployment.DeploymentContext;
+import org.glassfish.persistence.common.DatabaseConstants;
+import org.glassfish.persistence.common.Java2DBProcessorHelper;
+
+import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
+import com.sun.logging.LogDomains;
 
 import jakarta.persistence.spi.PersistenceUnitTransactionType;
 
 /**
- * SchemaGenerationProcessor that handles schema generation while
- * running against EclipseLink in pre JPA 2.1 mode
- * For each persistence unit descriptors that is defined for
- * an application create the ddl scripts. Additionally if the
- * user has requested the tables to be created or dropped from
- * the database complete that action too.
+ * SchemaGenerationProcessor that handles schema generation while running
+ * against EclipseLink in pre JPA 2.1 mode For each persistence unit descriptors
+ * that is defined for an application create the ddl scripts. Additionally if
+ * the user has requested the tables to be created or dropped from the database
+ * complete that action too.
  *
- * These are the principles and expectations of the implementation.
- * We don't want TopLink code to execute the DDLs, it should only
- * generate them. So, we always set the *generation-mode* to *script*
- * in the PUInfo object before passing it to createContainerEMF().
- * As a result TopLink never creates the actual tables, nor does it drop
- * them. The DDLs are executed by our code based on user preference which
- * considers inputs from persistence.xml and CLI. We set the TopLink
- * property to DROP_AND_CREATE in that map because we want it to always
- * generate both create- and dropDDL.jdbc files.
+ * These are the principles and expectations of the implementation. We don't
+ * want TopLink code to execute the DDLs, it should only generate them. So, we
+ * always set the *generation-mode* to *script* in the PUInfo object before
+ * passing it to createContainerEMF(). As a result TopLink never creates the
+ * actual tables, nor does it drop them. The DDLs are executed by our code based
+ * on user preference which considers inputs from persistence.xml and CLI. We
+ * set the TopLink property to DROP_AND_CREATE in that map because we want it to
+ * always generate both create- and dropDDL.jdbc files.
+ *
  * @author pramodg
  */
 public class EclipseLinkSchemaGenerationProcessor implements SchemaGenerationProcessor {
 
     // Defining the persistence provider class names here that we would use to
     // check if schema generation is supported.
-    private static final String TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_OLD =
-        "oracle.toplink.essentials.ejb.cmp3.EntityManagerFactoryProvider"; // NOI18N
-    private static final String TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_NEW =
-        "oracle.toplink.essentials.PersistenceProvider"; // NOI18N
-    private static final String ECLIPSELINK_PERSISTENCE_PROVIDER_CLASS_NAME =
-        "org.eclipse.persistence.jpa.PersistenceProvider"; // NOI18N
+    private static final String TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_OLD = "oracle.toplink.essentials.ejb.cmp3.EntityManagerFactoryProvider";
+    private static final String TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_NEW = "oracle.toplink.essentials.PersistenceProvider";
+    private static final String ECLIPSELINK_PERSISTENCE_PROVIDER_CLASS_NAME = "org.eclipse.persistence.jpa.PersistenceProvider";
 
     // Constants for various property values.
 
@@ -66,29 +65,29 @@
     // oracle.toplink.essentials.ejb.cmp3.EntityManagerFactoryProvider
     // and org.eclipse.persistence.jpa.config.PersistenceUnitProperties
     // This code assumes that the value of constant at both the place is same
-    private static final String CREATE_ONLY             = "create-tables"; //NOI18N
-    private static final String DROP_AND_CREATE         = "drop-and-create-tables"; //NOI18N
-    private static final String NONE                    = "none"; //NOI18N
+    private static final String CREATE_ONLY = "create-tables"; // NOI18N
+    private static final String DROP_AND_CREATE = "drop-and-create-tables"; // NOI18N
+    private static final String NONE = "none"; // NOI18N
 
-    private static final String DDL_BOTH_GENERATION     = "both"; //NOI18N
-    private static final String DDL_DATABASE_GENERATION = "database"; //NOI18N
-    private static final String DDL_SQL_SCRIPT_GENERATION = "sql-script"; //NOI18N
+    private static final String DDL_BOTH_GENERATION = "both"; // NOI18N
+    private static final String DDL_DATABASE_GENERATION = "database"; // NOI18N
+    private static final String DDL_SQL_SCRIPT_GENERATION = "sql-script"; // NOI18N
 
     // property names for Toplink and EclipseLink
-    private static final String TOPLINK_DDL_GENERATION     = "toplink.ddl-generation"; // NOI18N
-    private static final String ECLIPSELINK_DDL_GENERATION = "eclipselink.ddl-generation"; // NOI18N
+    private static final String TOPLINK_DDL_GENERATION = "toplink.ddl-generation";
+    private static final String ECLIPSELINK_DDL_GENERATION = "eclipselink.ddl-generation";
 
-    private static final String TOPLINK_DDL_GENERATION_OUTPUT_MODE = "toplink.ddl-generation.output-mode"; // NOI18N
-    private static final String ECLIPSELINK_DDL_GENERATION_OUTPUT_MODE = "eclipselink.ddl-generation.output-mode"; // NOI18N
+    private static final String TOPLINK_DDL_GENERATION_OUTPUT_MODE = "toplink.ddl-generation.output-mode";
+    private static final String ECLIPSELINK_DDL_GENERATION_OUTPUT_MODE = "eclipselink.ddl-generation.output-mode";
 
-    private static final String TOPLINK_APP_LOCATION         = "toplink.application-location"; // NOI18N
-    private static final String ECLIPSELINK_APP_LOCATION     = "eclipselink.application-location"; // NOI18N
+    private static final String TOPLINK_APP_LOCATION = "toplink.application-location";
+    private static final String ECLIPSELINK_APP_LOCATION = "eclipselink.application-location";
 
-    private static final String TOPLINK_CREATE_JDBC_DDL_FILE     = "toplink.create-ddl-jdbc-file-name"; // NOI18N
-    private static final String ECLIPSELINK_CREATE_JDBC_DDL_FILE = "eclipselink.create-ddl-jdbc-file-name"; // NOI18N
+    private static final String TOPLINK_CREATE_JDBC_DDL_FILE = "toplink.create-ddl-jdbc-file-name";
+    private static final String ECLIPSELINK_CREATE_JDBC_DDL_FILE = "eclipselink.create-ddl-jdbc-file-name";
 
-    private static final String TOPLINK_DROP_JDBC_DDL_FILE       = "toplink.drop-ddl-jdbc-file-name"; // NOI18N
-    private static final String ECLIPSELINK_DROP_JDBC_DDL_FILE   = "eclipselink.drop-ddl-jdbc-file-name"; // NOI18N
+    private static final String TOPLINK_DROP_JDBC_DDL_FILE = "toplink.drop-ddl-jdbc-file-name";
+    private static final String ECLIPSELINK_DROP_JDBC_DDL_FILE = "eclipselink.drop-ddl-jdbc-file-name";
 
     private static Logger logger = LogDomains.getLogger(EclipseLinkSchemaGenerationProcessor.class, LogDomains.PERSISTENCE_LOGGER);
 
@@ -104,7 +103,8 @@
     private boolean isSchemaGenerationPU;
 
     /**
-     * Creates a new instance of EclipseLinkSchemaGenerationProcessor using Java2DBProcessorHelper
+     * Creates a new instance of EclipseLinkSchemaGenerationProcessor using
+     * Java2DBProcessorHelper
      */
     public EclipseLinkSchemaGenerationProcessor(String persistenceProviderClassName) {
         initializeProviderPropertyHolder(persistenceProviderClassName);
@@ -119,45 +119,38 @@
         this.helper = new Java2DBProcessorHelper(context);
         this.helper.init();
 
-        String ddlGenerate = getPersistencePropVal(pud,
-                providerPropertyNamesHolder.ddlGeneration, NONE);
-        String ddlMode = getPersistencePropVal(pud,
-                providerPropertyNamesHolder.ddlGenerationOutputMode, DDL_BOTH_GENERATION);
+        String ddlGenerate = getPersistencePropVal(pud, providerPropertyNamesHolder.ddlGeneration, NONE);
+        String ddlMode = getPersistencePropVal(pud, providerPropertyNamesHolder.ddlGenerationOutputMode, DDL_BOTH_GENERATION);
 
         // If CLI options are not set, use value from the the ddl-generate property
         // if defined in persistence.xml
-        boolean userCreateTables = (ddlGenerate.equals(CREATE_ONLY)
-                || ddlGenerate.equals(DROP_AND_CREATE))
-                && !ddlMode.equals(NONE);
+        boolean userCreateTables = (ddlGenerate.equals(CREATE_ONLY) || ddlGenerate.equals(DROP_AND_CREATE)) && !ddlMode.equals(NONE);
 
         boolean createTables = helper.getCreateTables(userCreateTables);
 
         boolean userDropTables = ddlGenerate.equals(DROP_AND_CREATE)
-                && (ddlMode.equals(DDL_DATABASE_GENERATION)
-                || ddlMode.equals(DDL_BOTH_GENERATION));
+                && (ddlMode.equals(DDL_DATABASE_GENERATION) || ddlMode.equals(DDL_BOTH_GENERATION));
 
         if (logger.isLoggable(Level.FINE)) {
-            logger.fine("Processing request with create tables: " + createTables //NOI18N
-                    + ", drop tables: " + userDropTables); //NOI18N
+            logger.fine("Processing request with create tables: " + createTables // NOI18N
+                    + ", drop tables: " + userDropTables); // NOI18N
         }
 
         if (createTables || userDropTables) {
-            helper.setProcessorType("JPA", pud.getName()); // NOI18N
+            helper.setProcessorType("JPA", pud.getName());
             helper.setDropTablesValue(userDropTables, pud.getName());
-            helper.setCreateTablesValue(userCreateTables && !ddlMode.equals(DDL_SQL_SCRIPT_GENERATION),
-                    pud.getName());
+            helper.setCreateTablesValue(userCreateTables && !ddlMode.equals(DDL_SQL_SCRIPT_GENERATION), pud.getName());
 
-
-            // For a RESOURCE_LOCAL, managed pu, only non-jta-data-source should be specified.
-            String dataSourceName =
-                    (PersistenceUnitTransactionType.JTA == PersistenceUnitTransactionType.valueOf(pud.getTransactionType())) ?
-                            pud.getJtaDataSource() : pud.getNonJtaDataSource();
+            // For a RESOURCE_LOCAL, managed pu, only non-jta-data-source should be
+            // specified.
+            String dataSourceName = (PersistenceUnitTransactionType.JTA == PersistenceUnitTransactionType.valueOf(pud.getTransactionType()))
+                    ? pud.getJtaDataSource()
+                    : pud.getNonJtaDataSource();
             helper.setJndiName(dataSourceName, pud.getName());
             constructJdbcFileNames(pud);
             if (logger.isLoggable(Level.FINE)) {
-                logger.fine("Processing request to create files - create file: " + //NOI18N
-                        helper.getCreateJdbcFileName(pud.getName())
-                        + ", drop  file: " + //NOI18N
+                logger.fine("Processing request to create files - create file: " + // NOI18N
+                        helper.getCreateJdbcFileName(pud.getName()) + ", drop  file: " + // NOI18N
                         helper.getDropJdbcFileName(pud.getName()));
             }
 
@@ -167,7 +160,6 @@
         }
     }
 
-
     @Override
     public Map<String, Object> getOverridesForSchemaGeneration() {
         return overrides;
@@ -180,7 +172,6 @@
         return overridesForSuppressingSchemaGeneration;
     }
 
-
     @Override
     public boolean isContainerDDLExecutionRequired() {
         // DDL execution is required if this is a schema generation pu
@@ -193,8 +184,8 @@
         providerPropertyNamesHolder = new ProviderPropertyNamesHolder();
 
         // Override with TLE names if running against TLE
-        if (TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_NEW.equals(providerClassName) ||
-                TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_OLD.equals(providerClassName)) {
+        if (TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_NEW.equals(providerClassName)
+                || TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_OLD.equals(providerClassName)) {
             // for backward compatibility
             providerPropertyNamesHolder.appLocation = TOPLINK_APP_LOCATION;
             providerPropertyNamesHolder.createJdbcDdlFile = TOPLINK_CREATE_JDBC_DDL_FILE;
@@ -205,36 +196,29 @@
     }
 
     /**
-     * Construct the name of the create and
-     * drop jdbc ddl files that would be
-     * created. These name would be either
-     * obtained from the persistence.xml file
-     * (if the user has defined them) or we would
-     * create default filenames
+     * Construct the name of the create and drop jdbc ddl files that would be
+     * created. These name would be either obtained from the persistence.xml file
+     * (if the user has defined them) or we would create default filenames
+     *
      * @param parBundle the persistence unit descriptor that is being worked on.
      */
-    private void constructJdbcFileNames(PersistenceUnitDescriptor parBundle)  {
-        String createJdbcFileName =
-                getPersistencePropVal(parBundle,
-                providerPropertyNamesHolder.createJdbcDdlFile, null);
-        String dropJdbcFileName =
-                getPersistencePropVal(parBundle,
-                providerPropertyNamesHolder.dropJdbcDdlFile, null);
+    private void constructJdbcFileNames(PersistenceUnitDescriptor parBundle) {
+        String createJdbcFileName = getPersistencePropVal(parBundle, providerPropertyNamesHolder.createJdbcDdlFile, null);
+        String dropJdbcFileName = getPersistencePropVal(parBundle, providerPropertyNamesHolder.dropJdbcDdlFile, null);
 
-        if((null != createJdbcFileName) && (null != dropJdbcFileName)) {
+        if (createJdbcFileName != null && dropJdbcFileName != null) {
             return;
         }
 
-        String filePrefix =
-                    Java2DBProcessorHelper.getDDLNamePrefix(parBundle.getParent().getParent());
+        String filePrefix = Java2DBProcessorHelper.getDDLNamePrefix(parBundle.getParent().getParent());
 
-        if(null == createJdbcFileName) {
-            createJdbcFileName = filePrefix + DatabaseConstants.NAME_SEPARATOR + parBundle.getName() +
-                DatabaseConstants.CREATE_DDL_JDBC_FILE_SUFFIX;
+        if (createJdbcFileName == null) {
+            createJdbcFileName = filePrefix + DatabaseConstants.NAME_SEPARATOR + parBundle.getName()
+                    + CREATE_DDL_JDBC_FILE_SUFFIX;
         }
-        if(null == dropJdbcFileName) {
-            dropJdbcFileName = filePrefix + DatabaseConstants.NAME_SEPARATOR + parBundle.getName() +
-                DatabaseConstants.DROP_DDL_JDBC_FILE_SUFFIX;
+        if (dropJdbcFileName == null) {
+            dropJdbcFileName = filePrefix + DatabaseConstants.NAME_SEPARATOR + parBundle.getName()
+                    + DROP_DDL_JDBC_FILE_SUFFIX;
         }
 
         helper.setCreateJdbcFileName(createJdbcFileName, parBundle.getName());
@@ -242,13 +226,13 @@
     }
 
     /**
-     * This method is called after the jdbc files have been created.
-     * Iterate over all created jdbc ddl files and
-     * execute it against the database to have the required objects created.
+     * This method is called after the jdbc files have been created. Iterate over
+     * all created jdbc ddl files and execute it against the database to have the
+     * required objects created.
      */
     @Override
     public void executeCreateDDL() {
-        helper.createOrDropTablesInDB(true, "JPA"); // NOI18N
+        helper.createOrDropTablesInDB(true, "JPA");
     }
 
     private void addSchemaGenerationPropertiesToOverrides(PersistenceUnitDescriptor puDescriptor, Map<String, Object> overrides) {
@@ -259,69 +243,67 @@
         addPropertyToOverride(puDescriptor, overrides, providerPropertyNamesHolder.dropJdbcDdlFile,
                 helper.getDropJdbcFileName(puDescriptor.getName()));
 
-        // The puDescriptor might not have this property if schema generation is triggered by deployment CLI override
-        addPropertyToOverride(puDescriptor, overrides,
-                providerPropertyNamesHolder.ddlGeneration, DROP_AND_CREATE);
+        // The puDescriptor might not have this property if schema generation is
+        // triggered by deployment CLI override
+        addPropertyToOverride(puDescriptor, overrides, providerPropertyNamesHolder.ddlGeneration, DROP_AND_CREATE);
+
         // If we are doing schema generation, we want DDL scripts to be generated
-        addPropertyToOverride(puDescriptor, overrides,
-                providerPropertyNamesHolder.ddlGenerationOutputMode, DDL_SQL_SCRIPT_GENERATION);
+        addPropertyToOverride(puDescriptor, overrides, providerPropertyNamesHolder.ddlGenerationOutputMode, DDL_SQL_SCRIPT_GENERATION);
 
     }
 
     /**
-     * Utility method that is used to actually set the property into the persistence unit descriptor.
+     * Utility method that is used to actually set the property into the persistence
+     * unit descriptor.
+     *
      * @param descriptor the persistence unit descriptor that is being worked on.
      * @param propertyName the name of the property.
      * @param propertyValue the value of the property.
      */
-    private static void addPropertyToOverride(PersistenceUnitDescriptor descriptor, Map<String, Object> overrides,
-                                       String propertyName, String propertyValue) {
+    private static void addPropertyToOverride(PersistenceUnitDescriptor descriptor, Map<String, Object> overrides, String propertyName, String propertyValue) {
         String oldPropertyValue = descriptor.getProperties().getProperty(propertyName);
-        if(null == oldPropertyValue) { //Do not override any value explicitly specified by the user
+        if (oldPropertyValue == null) { // Do not override any value explicitly specified by the user
             overrides.put(propertyName, propertyValue);
         }
     }
 
     /**
-     * Given a persistence unit descriptor
-     * return the value of a property if the
-     * user has specified it.
-     * If the user has not defined this property
-     * return the default value.
+     * Given a persistence unit descriptor return the value of a property if the
+     * user has specified it. If the user has not defined this property return the
+     * default value.
+     *
      * @param parBundle the persistence unit descriptor that is being worked on.
      * @param propertyName the property name being checked.
      * @param defaultValue the default value to be used.
      * @return the property value.
      */
-    private String getPersistencePropVal(PersistenceUnitDescriptor parBundle,
-            String propertyName, String defaultValue) {
+    private String getPersistencePropVal(PersistenceUnitDescriptor parBundle, String propertyName, String defaultValue) {
         return parBundle.getProperties().getProperty(propertyName, defaultValue);
     }
 
     /**
-     * This processor only supports EclipseLink, the default
-     * persistence povider in glassfish; or Toplink, the default provder for GF 2.x.
+     * This processor only supports EclipseLink, the default persistence povider in
+     * glassfish; or Toplink, the default provder for GF 2.x.
      *
      * @return true if persistence provider is EclipseLink or Toplink.
      */
     public static boolean isSupportedPersistenceProvider(final String providerClassName) {
 
-        return providerClassName.equals(TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_OLD) ||
-                providerClassName.equals(TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_NEW) ||
-                providerClassName.equals(ECLIPSELINK_PERSISTENCE_PROVIDER_CLASS_NAME);
+        return providerClassName.equals(TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_OLD)
+                || providerClassName.equals(TOPLINK_PERSISTENCE_PROVIDER_CLASS_NAME_NEW)
+                || providerClassName.equals(ECLIPSELINK_PERSISTENCE_PROVIDER_CLASS_NAME);
     }
 
-
     /**
      * Holds names of provider specific property
      */
     private static class ProviderPropertyNamesHolder {
         // Initialize property names with EL specific properties
-            String appLocation       = ECLIPSELINK_APP_LOCATION;
-            String createJdbcDdlFile = ECLIPSELINK_CREATE_JDBC_DDL_FILE;
-            String dropJdbcDdlFile   = ECLIPSELINK_DROP_JDBC_DDL_FILE;
-            String ddlGeneration     = ECLIPSELINK_DDL_GENERATION;
-            String ddlGenerationOutputMode = ECLIPSELINK_DDL_GENERATION_OUTPUT_MODE;
+        String appLocation = ECLIPSELINK_APP_LOCATION;
+        String createJdbcDdlFile = ECLIPSELINK_CREATE_JDBC_DDL_FILE;
+        String dropJdbcDdlFile = ECLIPSELINK_DROP_JDBC_DDL_FILE;
+        String ddlGeneration = ECLIPSELINK_DDL_GENERATION;
+        String ddlGenerationOutputMode = ECLIPSELINK_DDL_GENERATION_OUTPUT_MODE;
     }
 
 }
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/JPAStandardSchemaGenerationProcessor.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/JPAStandardSchemaGenerationProcessor.java
index 5fc8d53..28bf26f 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/JPAStandardSchemaGenerationProcessor.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/JPAStandardSchemaGenerationProcessor.java
@@ -16,16 +16,17 @@
 
 package org.glassfish.persistence.jpa.schemageneration;
 
-import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
-import org.glassfish.api.deployment.DeploymentContext;
-
 import java.io.CharArrayReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.glassfish.api.deployment.DeploymentContext;
+
+import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
 
 /**
  * Schema generation processor while using standard JPA based schema generation
+ *
  * @author Mitesh Meswani
  */
 public class JPAStandardSchemaGenerationProcessor implements SchemaGenerationProcessor {
@@ -42,7 +43,8 @@
 
     @Override
     public Map<String, Object> getOverridesForSchemaGeneration() {
-        // No override is needed now. When we wire in taking schema generation overrides from deploy CLI, this method will return corresponding overrides.
+        // No override is needed now. When we wire in taking schema generation overrides
+        // from deploy CLI, this method will return corresponding overrides.
         return null;
     }
 
@@ -51,7 +53,7 @@
         Map<String, Object> overrides = new HashMap<>();
 
         overrides.put(SCHEMA_GENERATION_DATABASE_ACTION_PROPERTY, SCHEMA_GENERATION_ACTION_NONE); // suppress database action
-        overrides.put(SCHEMA_GENERATION_SCRIPTS_ACTION_PROPERTY, SCHEMA_GENERATION_ACTION_NONE);  // suppress script action
+        overrides.put(SCHEMA_GENERATION_SCRIPTS_ACTION_PROPERTY, SCHEMA_GENERATION_ACTION_NONE); // suppress script action
         overrides.put(SQL_LOAD_SCRIPT_SOURCE, new CharArrayReader(new char[0])); // suppress execution of load scripts
 
         return overrides;
@@ -65,7 +67,8 @@
 
     @Override
     public void executeCreateDDL() {
-        // We should never reach here as this processor returns false for isContainerDDLExecutionRequired()
+        // We should never reach here as this processor returns false for
+        // isContainerDDLExecutionRequired()
         throw new UnsupportedOperationException();
     }
 }
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessor.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessor.java
index 4869be4..1db4f9d 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessor.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessor.java
@@ -16,13 +16,15 @@
 
 package org.glassfish.persistence.jpa.schemageneration;
 
-import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
+import java.util.Map;
+
 import org.glassfish.api.deployment.DeploymentContext;
 
-import java.util.Map;
+import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
 
 /**
  * Processor for schema generation
+ *
  * @author Mitesh Meswani
  */
 public interface SchemaGenerationProcessor {
@@ -38,9 +40,10 @@
     Map<String, Object> getOverridesForSchemaGeneration();
 
     /**
-     @return overrides that will be supplied to EMF creation for suppressing schema generation
+     * @return overrides that will be supplied to EMF creation for suppressing
+     * schema generation
      */
-    Map<String,Object> getOverridesForSuppressingSchemaGeneration();
+    Map<String, Object> getOverridesForSuppressingSchemaGeneration();
 
     /**
      * @return whether ddl needs to be executed by container
diff --git a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessorFactory.java b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessorFactory.java
index 276bc16..1bc2e65 100644
--- a/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessorFactory.java
+++ b/appserver/persistence/jpa-container/src/main/java/org/glassfish/persistence/jpa/schemageneration/SchemaGenerationProcessorFactory.java
@@ -16,49 +16,53 @@
 
 package org.glassfish.persistence.jpa.schemageneration;
 
-import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
+import static org.glassfish.persistence.jpa.schemageneration.EclipseLinkSchemaGenerationProcessor.isSupportedPersistenceProvider;
+
 import org.glassfish.persistence.jpa.PersistenceUnitInfoImpl;
 
+import com.sun.enterprise.deployment.PersistenceUnitDescriptor;
 
 /**
  * Factory for creating SchemaGenerationProcessor
- * @author  Mitesh Meswani
+ *
+ * @author Mitesh Meswani
  */
 public class SchemaGenerationProcessorFactory {
 
-    /**
-     * @return EclipseLink specific schema generation iff provider is EclipseLink or Toplink, and user has not specified
-     * any standard JPA schema generation property else return JPAStandardSchemaGenerationProcessor
-     */
-    public static SchemaGenerationProcessor createSchemaGenerationProcessor(PersistenceUnitDescriptor pud) {
-        // We use
+    private static final String STANDARD_SCHEMA_GENERATION_PREFIX = "jakarta.persistence.schema-generation";
 
-        String providerClassName = PersistenceUnitInfoImpl.getPersistenceProviderClassNameForPuDesc(pud);
+    /**
+     * @return EclipseLink specific schema generation iff provider is EclipseLink or
+     * Toplink, and user has not specified any standard JPA schema generation
+     * property else return JPAStandardSchemaGenerationProcessor
+     */
+    public static SchemaGenerationProcessor createSchemaGenerationProcessor(PersistenceUnitDescriptor persistenceUnitDescriptor) {
+        String providerClassName = PersistenceUnitInfoImpl.getPersistenceProviderClassNameForPuDesc(persistenceUnitDescriptor);
 
         boolean useJPA21Processor = true;
 
-        if(EclipseLinkSchemaGenerationProcessor.isSupportedPersistenceProvider(providerClassName) ) {
-           if(!containsStandardSchemaGenerationProperty(pud)) {
-               useJPA21Processor = false;
-           }
+        if (isSupportedPersistenceProvider(providerClassName)) {
+            if (!containsStandardSchemaGenerationProperty(persistenceUnitDescriptor)) {
+                useJPA21Processor = false;
+            }
         }
 
         return useJPA21Processor ? new JPAStandardSchemaGenerationProcessor() : new EclipseLinkSchemaGenerationProcessor(providerClassName);
     }
 
-    private static final String STANDARD_SCHEMA_GENERATION_PREFIX = "jakarta.persistence.schema-generation";
-
     /**
-     * @return true if the given <code>pud</code> contains a JPA standard property for schema generation
+     * @return true if the given <code>pud</code> contains a JPA standard property
+     * for schema generation
      */
-    private static boolean containsStandardSchemaGenerationProperty(PersistenceUnitDescriptor pud) {
+    private static boolean containsStandardSchemaGenerationProperty(PersistenceUnitDescriptor persistenceUnitDescriptor) {
         boolean containsStandardSchemaGenerationProperty = false;
-        for (Object puPropertyName : pud.getProperties().keySet()) {
-            if(puPropertyName instanceof String && String.class.cast(puPropertyName).startsWith(STANDARD_SCHEMA_GENERATION_PREFIX) ) {
+        for (Object puPropertyName : persistenceUnitDescriptor.getProperties().keySet()) {
+            if (puPropertyName instanceof String && String.class.cast(puPropertyName).startsWith(STANDARD_SCHEMA_GENERATION_PREFIX)) {
                 containsStandardSchemaGenerationProperty = true;
                 break;
             }
         }
+
         return containsStandardSchemaGenerationProperty;
     }