From e50fdc7a922dbf1ea4140d8b7f061734cec78f71 Mon Sep 17 00:00:00 2001 From: Arturo Bernal Date: Mon, 11 Sep 2023 22:58:05 +0200 Subject: [PATCH 01/10] [JSPWIKI-1178] Address potential deadlock with JDK 21 Virtual Threads. Refactored synchronized blocks/methods containing blocking operations to prevent potential deadlocks introduced by the upcoming Virtual Threads feature in JDK 21. --- .../apache/wiki/event/WikiEventManager.java | 70 ++++- .../search/kendra/KendraSearchProvider.java | 17 +- .../main/java/org/apache/wiki/WatchDog.java | 49 +++- .../main/java/org/apache/wiki/WikiEngine.java | 105 +++++--- .../auth/DefaultAuthenticationManager.java | 34 ++- .../auth/DefaultAuthorizationManager.java | 30 ++- .../apache/wiki/auth/DefaultUserManager.java | 34 ++- .../org/apache/wiki/auth/SessionMonitor.java | 61 ++++- .../apache/wiki/auth/acl/AclEntryImpl.java | 73 ++++-- .../org/apache/wiki/auth/acl/AclImpl.java | 44 +++- .../auth/authorize/DefaultGroupManager.java | 58 ++++- .../org/apache/wiki/auth/authorize/Group.java | 130 +++++++--- .../CookieAuthenticationLoginModule.java | 47 ++-- .../auth/permissions/PermissionFactory.java | 25 +- .../wiki/auth/user/XMLUserDatabase.java | 241 ++++++++++-------- .../wiki/diff/ContextualDiffProvider.java | 74 ++++-- .../apache/wiki/filters/PageEventFilter.java | 33 ++- .../org/apache/wiki/filters/SpamFilter.java | 163 +++++++----- .../apache/wiki/plugin/BugReportHandler.java | 40 ++- .../apache/wiki/plugin/PageViewPlugin.java | 118 ++++++--- .../providers/CachingAttachmentProvider.java | 42 ++- .../wiki/providers/CachingProvider.java | 61 ++++- .../providers/VersioningFileProvider.java | 168 ++++++------ .../references/DefaultReferenceManager.java | 239 +++++++++-------- .../apache/wiki/rss/DefaultRSSGenerator.java | 30 ++- .../wiki/search/LuceneSearchProvider.java | 93 +++++-- .../ui/admin/beans/SearchManagerBean.java | 79 +++--- .../apache/wiki/workflow/AbstractStep.java | 87 +++++-- .../org/apache/wiki/workflow/Decision.java | 26 +- .../apache/wiki/workflow/DecisionQueue.java | 50 +++- .../wiki/workflow/DefaultWorkflowManager.java | 94 ++++--- .../java/org/apache/wiki/workflow/Task.java | 28 +- .../org/apache/wiki/workflow/Workflow.java | 210 +++++++++------ jspwiki-main/src/test/resources/wkflmgr.ser | Bin 3770 -> 4030 bytes .../apache/wiki/util/CommentedProperties.java | 124 ++++++--- 35 files changed, 1904 insertions(+), 873 deletions(-) diff --git a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java index 635ec6e444..0aedc2659a 100644 --- a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java +++ b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java @@ -33,6 +33,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Set; import java.util.TreeSet; import java.util.Vector; +import java.util.concurrent.locks.ReentrantLock; /** * A singleton class that manages the addition and removal of WikiEvent listeners to a event source, as well as the firing of events @@ -141,6 +142,17 @@ public final class WikiEventManager { /* Singleton instance of the WikiEventManager. */ private static WikiEventManager c_instance; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private static final ReentrantLock lock = new ReentrantLock(); + + /** Constructor for a WikiEventManager. */ private WikiEventManager() { c_instance = this; @@ -154,10 +166,15 @@ private WikiEventManager() { * @return A shared instance of the WikiEventManager */ public static WikiEventManager getInstance() { - if( c_instance == null ) { - synchronized( WikiEventManager.class ) { - return new WikiEventManager(); - // start up any post-instantiation services here + if (c_instance == null) { + lock.lock(); + try { + if (c_instance == null) { + c_instance = new WikiEventManager(); + // start up any post-instantiation services here + } + } finally { + lock.unlock(); } } return c_instance; @@ -242,7 +259,8 @@ public static boolean removeWikiEventListener( final WikiEventListener listener // get the Map.entry object for the entire Map, then check match on entry (listener) final WikiEventManager mgr = getInstance(); final Map< Object, WikiEventDelegate > sources = Collections.synchronizedMap( mgr.getDelegates() ); - synchronized( sources ) { + lock.lock(); + try { // get an iterator over the Map.Enty objects in the map for( final Map.Entry< Object, WikiEventDelegate > entry : sources.entrySet() ) { // the entry value is the delegate @@ -253,16 +271,24 @@ public static boolean removeWikiEventListener( final WikiEventListener listener removed = true; // was removed } } + } finally { + lock.unlock(); } return removed; } private void removeDelegates() { - synchronized( m_delegates ) { + lock.lock(); + try { m_delegates.clear(); + } finally { + lock.unlock(); } - synchronized( m_preloadCache ) { + lock.lock(); + try { m_preloadCache.clear(); + } finally { + lock.unlock(); } } @@ -315,7 +341,8 @@ private Map< Object, WikiEventDelegate > getDelegates() { * @return the WikiEventDelegate. */ private WikiEventDelegate getDelegateFor( final Object client ) { - synchronized( m_delegates ) { + lock.lock(); + try { if( client == null || client instanceof Class ) { // then preload the cache final WikiEventDelegate delegate = new WikiEventDelegate( client ); m_preloadCache.add( delegate ); @@ -342,6 +369,8 @@ private WikiEventDelegate getDelegateFor( final Object client ) { m_delegates.put( client, delegate ); } return delegate; + } finally { + lock.unlock(); } } @@ -386,7 +415,8 @@ private static final class WikiEventDelegate { * @throws java.lang.UnsupportedOperationException if any attempt is made to modify the Set */ public Set< WikiEventListener > getWikiEventListeners() { - synchronized( m_listenerList ) { + lock.lock(); + try { final TreeSet< WikiEventListener > set = new TreeSet<>( new WikiEventListenerComparator() ); for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) { final WikiEventListener l = wikiEventListenerWeakReference.get(); @@ -396,6 +426,8 @@ public Set< WikiEventListener > getWikiEventListeners() { } return Collections.unmodifiableSet( set ); + } finally { + lock.unlock(); } } @@ -406,13 +438,16 @@ public Set< WikiEventListener > getWikiEventListeners() { * @return true if the listener was added (i.e., it was not already in the list and was added) */ public boolean addWikiEventListener( final WikiEventListener listener ) { - synchronized( m_listenerList ) { + lock.lock(); + try { final boolean listenerAlreadyContained = m_listenerList.stream() .map( WeakReference::get ) .anyMatch( ref -> ref == listener ); if( !listenerAlreadyContained ) { return m_listenerList.add( new WeakReference<>( listener ) ); } + } finally { + lock.unlock(); } return false; } @@ -424,7 +459,8 @@ public boolean addWikiEventListener( final WikiEventListener listener ) { * @return true if the listener was removed (i.e., it was actually in the list and was removed) */ public boolean removeWikiEventListener( final WikiEventListener listener ) { - synchronized( m_listenerList ) { + lock.lock(); + try { for( final Iterator< WeakReference< WikiEventListener > > i = m_listenerList.iterator(); i.hasNext(); ) { final WikiEventListener l = i.next().get(); if( l == listener ) { @@ -432,6 +468,8 @@ public boolean removeWikiEventListener( final WikiEventListener listener ) { return true; } } + } finally { + lock.unlock(); } return false; @@ -441,8 +479,11 @@ public boolean removeWikiEventListener( final WikiEventListener listener ) { * Returns true if there are one or more listeners registered with this instance. */ public boolean isListening() { - synchronized( m_listenerList ) { + lock.lock(); + try { return !m_listenerList.isEmpty(); + } finally { + lock.unlock(); } } @@ -452,7 +493,8 @@ public boolean isListening() { public void fireEvent( final WikiEvent event ) { boolean needsCleanup = false; try { - synchronized( m_listenerList ) { + lock.lock(); + try { for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) { final WikiEventListener listener = wikiEventListenerWeakReference.get(); if( listener != null ) { @@ -472,6 +514,8 @@ public void fireEvent( final WikiEvent event ) { } } + } finally { + lock.unlock(); } } catch( final ConcurrentModificationException e ) { // We don't die, we just don't do notifications in that case. diff --git a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java index 9563e59f2f..8bc11fe50b 100644 --- a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java +++ b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java @@ -55,6 +55,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.*; +import java.util.concurrent.locks.ReentrantLock; import static java.lang.String.format; @@ -87,7 +88,18 @@ public class KendraSearchProvider implements SearchProvider { private static final String PROP_KENDRA_INDEXDELAY = "jspwiki.kendra.indexdelay"; private static final String PROP_KENDRA_INITIALDELAY = "jspwiki.kendra.initialdelay"; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + public KendraSearchProvider() { + lock = new ReentrantLock(); } /** @@ -339,7 +351,8 @@ private void doPartialReindex() { } LOG.debug( "Indexing updated pages. Please wait ..." ); final String executionId = startExecution(); - synchronized ( updates ) { + lock.lock(); + try { try { while (!updates.isEmpty()) { indexOnePage( updates.remove( 0 ), executionId ); @@ -347,6 +360,8 @@ private void doPartialReindex() { } finally { stopExecution(); } + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java index 8b83a13228..820ece30a4 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java @@ -28,6 +28,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Set; import java.util.Stack; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReentrantLock; /** @@ -57,6 +58,16 @@ public final class WatchDog { private static final Map< Integer, WeakReference< WatchDog > > c_kennel = new ConcurrentHashMap<>(); private static WikiBackgroundThread c_watcherThread; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + /** * Returns the current watchdog for the current thread. This is the preferred method of getting you a Watchdog, since it * keeps an internal list of Watchdogs for you so that there won't be more than one watchdog per thread. @@ -92,11 +103,16 @@ public WatchDog( final Engine engine, final Watchable watch ) { m_engine = engine; m_watchable = watch; - synchronized( WatchDog.class ) { + lock = new ReentrantLock(); + + lock.lock(); + try { if( c_watcherThread == null ) { c_watcherThread = new WatchDogThread( engine ); c_watcherThread.start(); } + } finally { + lock.unlock(); } } @@ -136,12 +152,15 @@ private static void scrub() { * Can be used to enable the WatchDog. Will cause a new Thread to be created, if none was existing previously. */ public void enable() { - synchronized( WatchDog.class ) { + lock.lock(); + try { if( !m_enabled ) { m_enabled = true; c_watcherThread = new WatchDogThread( m_engine ); c_watcherThread.start(); } + } finally { + lock.unlock(); } } @@ -149,12 +168,15 @@ public void enable() { * Is used to disable a WatchDog. The watchdog thread is shut down and resources released. */ public void disable() { - synchronized( WatchDog.class ) { + lock.lock(); + try { if( m_enabled ) { m_enabled = false; c_watcherThread.shutdown(); c_watcherThread = null; } + } finally { + lock.unlock(); } } @@ -186,9 +208,12 @@ public void enterState( final String state ) { */ public void enterState( final String state, final int expectedCompletionTime ) { LOG.debug( "{}: Entering state {}, expected completion in {} s", m_watchable.getName(), state, expectedCompletionTime ); - synchronized( m_stateStack ) { + lock.lock(); + try { final State st = new State( state, expectedCompletionTime ); m_stateStack.push( st ); + } finally { + lock.unlock(); } } @@ -208,7 +233,8 @@ public void exitState() { */ public void exitState( final String state ) { if( !m_stateStack.empty() ) { - synchronized( m_stateStack ) { + lock.lock(); + try { final State st = m_stateStack.peek(); if( state == null || st.getState().equals( state ) ) { m_stateStack.pop(); @@ -218,6 +244,8 @@ public void exitState( final String state ) { // FIXME: should actually go and fix things for that LOG.error( "exitState() called before enterState()" ); } + } finally { + lock.unlock(); } } else { LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" ); @@ -244,8 +272,8 @@ public boolean isWatchableAlive() { private void check() { LOG.debug( "Checking watchdog '{}'", m_watchable.getName() ); - - synchronized( m_stateStack ) { + lock.lock(); + try { if( !m_stateStack.empty() ) { final State st = m_stateStack.peek(); final long now = System.currentTimeMillis(); @@ -261,6 +289,8 @@ private void check() { } else { LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" ); } + } finally { + lock.unlock(); } } @@ -302,7 +332,8 @@ private void dumpStackTraceForWatchable() { */ @Override public String toString() { - synchronized( m_stateStack ) { + lock.lock(); + try { String state = "Idle"; if( !m_stateStack.empty() ) { @@ -310,6 +341,8 @@ public String toString() { state = st.getState(); } return "WatchDog state=" + state; + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java index 4ec79d82e0..9895796b16 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java @@ -81,6 +81,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.TimeZone; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; @@ -138,6 +139,16 @@ public class WikiEngine implements Engine { /** Stores WikiEngine's associated managers. */ protected final Map< Class< ? >, Object > managers = new ConcurrentHashMap<>(); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private static final ReentrantLock lock = new ReentrantLock(); + /** * Gets a WikiEngine related to this servlet. Since this method is only called from JSP pages (and JspInit()) to be specific, * we throw a RuntimeException if things don't work. @@ -146,8 +157,13 @@ public class WikiEngine implements Engine { * @return A WikiEngine instance. * @throws InternalWikiException in case something fails. This is a RuntimeException, so be prepared for it. */ - public static synchronized WikiEngine getInstance( final ServletConfig config ) throws InternalWikiException { - return getInstance( config.getServletContext(), null ); + public static WikiEngine getInstance( final ServletConfig config ) throws InternalWikiException { + lock.lock(); + try { + return getInstance( config.getServletContext(), null ); + } finally { + lock.unlock(); + } } /** @@ -160,8 +176,13 @@ public static synchronized WikiEngine getInstance( final ServletConfig config ) * * @return One well-behaving WikiEngine instance. */ - public static synchronized WikiEngine getInstance( final ServletConfig config, final Properties props ) { - return getInstance( config.getServletContext(), props ); + public static WikiEngine getInstance( final ServletConfig config, final Properties props ) { + lock.lock(); + try { + return getInstance( config.getServletContext(), props ); + } finally { + lock.unlock(); + } } /** @@ -172,33 +193,39 @@ public static synchronized WikiEngine getInstance( final ServletConfig config, f * @return One fully functional, properly behaving WikiEngine. * @throws InternalWikiException If the WikiEngine instantiation fails. */ - public static synchronized WikiEngine getInstance( final ServletContext context, Properties props ) throws InternalWikiException { - WikiEngine engine = ( WikiEngine )context.getAttribute( ATTR_WIKIENGINE ); - if( engine == null ) { - final String appid = Integer.toString( context.hashCode() ); - context.log( " Assigning new engine to " + appid ); - try { - if( props == null ) { - props = PropertyReader.loadWebAppProps( context ); - } - - engine = new WikiEngine( context, appid ); + public static WikiEngine getInstance(final ServletContext context, Properties props) throws InternalWikiException { + lock.lock(); + WikiEngine engine; + try { + engine = (WikiEngine) context.getAttribute(ATTR_WIKIENGINE); + if (engine == null) { + final String appid = Integer.toString(context.hashCode()); + context.log(" Assigning new engine to " + appid); try { - // Note: May be null, if JSPWiki has been deployed in a WAR file. - engine.start( props ); - LOG.info( "Root path for this Wiki is: '{}'", engine.getRootPath() ); - } catch( final Exception e ) { - final String msg = Release.APPNAME + ": Unable to load and setup properties from jspwiki.properties. " + e.getMessage(); - context.log( msg ); - LOG.error( msg, e ); - throw new WikiException( msg, e ); + if (props == null) { + props = PropertyReader.loadWebAppProps(context); + } + + engine = new WikiEngine(context, appid); + try { + // Note: May be null, if JSPWiki has been deployed in a WAR file. + engine.start(props); + LOG.info("Root path for this Wiki is: '{}'", engine.getRootPath()); + } catch (final Exception e) { + final String msg = Release.APPNAME + ": Unable to load and setup properties from jspwiki.properties. " + e.getMessage(); + context.log(msg); + LOG.error(msg, e); + throw new WikiException(msg, e); + } + context.setAttribute(ATTR_WIKIENGINE, engine); + } catch (final Exception e) { + context.log("ERROR: Failed to create a Wiki engine: " + e.getMessage()); + LOG.error("ERROR: Failed to create a Wiki engine, stacktrace follows ", e); + throw new InternalWikiException("No wiki engine, check logs.", e); } - context.setAttribute( ATTR_WIKIENGINE, engine ); - } catch( final Exception e ) { - context.log( "ERROR: Failed to create a Wiki engine: " + e.getMessage() ); - LOG.error( "ERROR: Failed to create a Wiki engine, stacktrace follows ", e ); - throw new InternalWikiException( "No wiki engine, check logs.", e ); } + } finally { + lock.unlock(); } return engine; } @@ -943,14 +970,26 @@ public InternationalizationManager getInternationalizationManager() { /** {@inheritDoc} */ @Override - public final synchronized void addWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.addWikiEventListener( this, listener ); + public final void addWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.addWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } - /** {@inheritDoc} */ + /** + * {@inheritDoc} + */ @Override - public final synchronized void removeWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.removeWikiEventListener( this, listener ); + public final void removeWikiEventListener(final WikiEventListener listener) { + lock.lock(); + try { + WikiEventManager.removeWikiEventListener(this, listener); + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java index 6f86584c14..79d5682644 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java @@ -53,6 +53,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Map; import java.util.Properties; import java.util.Set; +import java.util.concurrent.locks.ReentrantLock; /** @@ -103,6 +104,21 @@ public class DefaultAuthenticationManager implements AuthenticationManager { /** Keeps a list of the usernames who have attempted a login recently. */ private final TimedCounterList< String > m_lastLoginAttempts = new TimedCounterList<>(); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public DefaultAuthenticationManager () { + lock = new ReentrantLock(); + } + + /** * {@inheritDoc} */ @@ -352,16 +368,26 @@ public Set< Principal > doJAASLogin( final Class< ? extends LoginModule > clazz, * {@inheritDoc} */ @Override - public synchronized void addWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.addWikiEventListener( this, listener ); + public void addWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.addWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** * {@inheritDoc} */ @Override - public synchronized void removeWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.removeWikiEventListener( this, listener ); + public void removeWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.removeWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java index f39f65cdcd..f6747cbb18 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java @@ -64,6 +64,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.ResourceBundle; import java.util.WeakHashMap; +import java.util.concurrent.locks.ReentrantLock; /** @@ -88,10 +89,21 @@ public class DefaultAuthorizationManager implements AuthorizationManager { private LocalPolicy m_localPolicy; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private ReentrantLock lock; + /** * Constructs a new DefaultAuthorizationManager instance. */ public DefaultAuthorizationManager() { + lock = new ReentrantLock(); } /** {@inheritDoc} */ @@ -373,14 +385,24 @@ public Principal resolvePrincipal( final String name ) { /** {@inheritDoc} */ @Override - public synchronized void addWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.addWikiEventListener( this, listener ); + public void addWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.addWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** {@inheritDoc} */ @Override - public synchronized void removeWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.removeWikiEventListener( this, listener ); + public void removeWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.removeWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java index 048fc76315..32dde21a35 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java @@ -69,6 +69,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.ResourceBundle; import java.util.WeakHashMap; +import java.util.concurrent.locks.ReentrantLock; /** @@ -96,6 +97,21 @@ public class DefaultUserManager implements UserManager { /** The user database loads, manages and persists user identities */ private UserDatabase m_database; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public DefaultUserManager() { + lock = new ReentrantLock(); + } + + /** {@inheritDoc} */ @Override public void initialize( final Engine engine, final Properties props ) { @@ -406,8 +422,13 @@ public Principal[] listWikiNames() throws WikiSecurityException { * This is a convenience method. * @param listener the event listener */ - @Override public synchronized void addWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.addWikiEventListener( this, listener ); + @Override public void addWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.addWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** @@ -415,8 +436,13 @@ public Principal[] listWikiNames() throws WikiSecurityException { * This is a convenience method. * @param listener the event listener */ - @Override public synchronized void removeWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.removeWikiEventListener( this, listener ); + @Override public void removeWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.removeWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java index dc7e082e1c..638e246ae1 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java @@ -22,6 +22,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.logging.log4j.Logger; import org.apache.wiki.api.core.Engine; import org.apache.wiki.api.core.Session; +import org.apache.wiki.api.spi.SessionSPI; import org.apache.wiki.api.spi.Wiki; import org.apache.wiki.event.WikiEventListener; import org.apache.wiki.event.WikiEventManager; @@ -32,12 +33,14 @@ Licensed to the Apache Software Foundation (ASF) under one import javax.servlet.http.HttpSession; import javax.servlet.http.HttpSessionEvent; import javax.servlet.http.HttpSessionListener; +import java.lang.ref.WeakReference; import java.security.Principal; import java.util.Arrays; import java.util.Collection; import java.util.Map; import java.util.WeakHashMap; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; /** @@ -59,6 +62,16 @@ public class SessionMonitor implements HttpSessionListener { private final PrincipalComparator m_comparator = new PrincipalComparator(); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see ReentrantLock + */ + private final ReentrantLock lock; + /** * Returns the instance of the SessionMonitor for this wiki. Only one SessionMonitor exists per Engine. * @@ -80,10 +93,13 @@ public static SessionMonitor getInstance( final Engine engine ) { /** Construct the SessionListener */ public SessionMonitor() { + lock = new ReentrantLock(); } private SessionMonitor( final Engine engine ) { + this(); m_engine = engine; + } /** @@ -123,10 +139,10 @@ private Session findSession( final String sessionId ) { /** *

Looks up the wiki session associated with a user's Http session and adds it to the session cache. This method will return the - * "guest session" as constructed by {@link org.apache.wiki.api.spi.SessionSPI#guest(Engine)} if the HttpSession is not currently + * "guest session" as constructed by {@link SessionSPI#guest(Engine)} if the HttpSession is not currently * associated with a WikiSession. This method is guaranteed to return a non-null WikiSession.

*

Internally, the session is stored in a HashMap; keys are the HttpSession objects, while the values are - * {@link java.lang.ref.WeakReference}-wrapped WikiSessions.

+ * {@link WeakReference}-wrapped WikiSessions.

* * @param session the HTTP session * @return the wiki session @@ -143,10 +159,10 @@ public final Session find( final HttpSession session ) { /** *

Looks up the wiki session associated with a user's Http session and adds it to the session cache. This method will return the - * "guest session" as constructed by {@link org.apache.wiki.api.spi.SessionSPI#guest(Engine)} if the HttpSession is not currently + * "guest session" as constructed by {@link SessionSPI#guest(Engine)} if the HttpSession is not currently * associated with a WikiSession. This method is guaranteed to return a non-null WikiSession.

*

Internally, the session is stored in a HashMap; keys are the HttpSession objects, while the values are - * {@link java.lang.ref.WeakReference}-wrapped WikiSessions.

+ * {@link WeakReference}-wrapped WikiSessions.

* * @param sessionId the HTTP session * @return the wiki session @@ -169,8 +185,11 @@ public final Session find( final String sessionId ) { private Session createGuestSessionFor( final String sessionId ) { LOG.debug( "Session for session ID={}... not found. Creating guestSession()", sessionId ); final Session wikiSession = Wiki.session().guest( m_engine ); - synchronized( m_sessions ) { + lock.lock(); + try { m_sessions.put( sessionId, wikiSession ); + } finally { + lock.unlock(); } return wikiSession; } @@ -196,8 +215,11 @@ public final void remove( final HttpSession session ) { if( session == null ) { throw new IllegalArgumentException( "Session cannot be null." ); } - synchronized( m_sessions ) { + lock.lock(); + try { m_sessions.remove( session.getId() ); + } finally { + lock.unlock(); } } @@ -214,15 +236,18 @@ public final int sessions() *

Returns the current wiki users as a sorted array of Principal objects. The principals are those returned by * each WikiSession's {@link Session#getUserPrincipal()}'s method.

*

To obtain the list of current WikiSessions, we iterate through our session Map and obtain the list of values, - * which are WikiSessions wrapped in {@link java.lang.ref.WeakReference} objects. Those WeakReferences + * which are WikiSessions wrapped in {@link WeakReference} objects. Those WeakReferences * whose get() method returns non-null values are valid sessions.

* * @return the array of user principals */ public final Principal[] userPrincipals() { final Collection principals; - synchronized ( m_sessions ) { + lock.lock(); + try { principals = m_sessions.values().stream().map(Session::getUserPrincipal).collect(Collectors.toList()); + } finally { + lock.unlock(); } final Principal[] p = principals.toArray( new Principal[0] ); Arrays.sort( p, m_comparator ); @@ -235,8 +260,13 @@ public final Principal[] userPrincipals() { * @param listener the event listener * @since 2.4.75 */ - public final synchronized void addWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.addWikiEventListener( this, listener ); + public final void addWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.addWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** @@ -245,8 +275,13 @@ public final synchronized void addWikiEventListener( final WikiEventListener lis * @param listener the event listener * @since 2.4.75 */ - public final synchronized void removeWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.removeWikiEventListener( this, listener ); + public final void removeWikiEventListener(final WikiEventListener listener) { + lock.lock(); + try { + WikiEventManager.removeWikiEventListener(this, listener); + } finally { + lock.unlock(); + } } /** @@ -265,7 +300,7 @@ protected final void fireEvent( final int type, final Principal principal, final /** * Fires when the web container creates a new HTTP session. - * + * * @param se the HTTP session event */ @Override diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java index cc7464a45e..befac2c77e 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java @@ -19,12 +19,14 @@ Licensed to the Apache Software Foundation (ASF) under one package org.apache.wiki.auth.acl; import org.apache.wiki.auth.permissions.PagePermission; +import org.apache.wiki.event.WikiEventManager; import java.io.Serializable; import java.security.Permission; import java.security.Principal; import java.util.Enumeration; import java.util.Vector; +import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; @@ -39,10 +41,21 @@ public class AclEntryImpl implements AclEntry, Serializable { private final Vector< Permission > m_permissions = new Vector<>(); private Principal m_principal; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + /** * Constructs a new AclEntryImpl instance. */ public AclEntryImpl() { + lock = new ReentrantLock(); } /** @@ -54,13 +67,18 @@ public AclEntryImpl() { * already part of this entry's permission set, and false if the permission is not of type PagePermission */ @Override - public synchronized boolean addPermission(final Permission permission ) { - if( permission instanceof PagePermission && findPermission( permission ) == null ) { - m_permissions.add( permission ); - return true; - } + public boolean addPermission(final Permission permission) { + lock.lock(); + try { + if (permission instanceof PagePermission && findPermission(permission) == null) { + m_permissions.add(permission); + return true; + } - return false; + return false; + } finally { + lock.unlock(); + } } /** @@ -81,8 +99,13 @@ public boolean checkPermission(final Permission permission ) { * @return the principal associated with this entry. */ @Override - public synchronized Principal getPrincipal() { - return m_principal; + public Principal getPrincipal() { + lock.lock(); + try { + return m_principal; + } finally { + lock.unlock(); + } } /** @@ -102,14 +125,19 @@ public Enumeration< Permission > permissions() { * @return true if the permission is removed, false if the permission was not part of this entry's permission set. */ @Override - public synchronized boolean removePermission(final Permission permission ) { - final Permission p = findPermission( permission ); - if( p != null ) { - m_permissions.remove( p ); - return true; - } + public boolean removePermission(final Permission permission ) { + lock.lock(); + try { + final Permission p = findPermission(permission); + if (p != null) { + m_permissions.remove(p); + return true; + } - return false; + return false; + } finally { + lock.unlock(); + } } /** @@ -121,12 +149,17 @@ public synchronized boolean removePermission(final Permission permission ) { * principal set for this entry */ @Override - public synchronized boolean setPrincipal(final Principal user ) { - if( m_principal != null || user == null ) { - return false; + public boolean setPrincipal(final Principal user) { + lock.lock(); + try { + if (m_principal != null || user == null) { + return false; + } + m_principal = user; + return true; + } finally { + lock.unlock(); } - m_principal = user; - return true; } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java index 11af72c27f..3eb2ddf729 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java @@ -27,6 +27,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Enumeration; import java.util.List; import java.util.Vector; +import java.util.concurrent.locks.ReentrantLock; /** @@ -36,6 +37,16 @@ Licensed to the Apache Software Foundation (ASF) under one */ public class AclImpl implements Acl, Serializable { + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + private static final long serialVersionUID = 1L; private final Vector< AclEntry > m_entries = new Vector<>(); @@ -43,6 +54,7 @@ public class AclImpl implements Acl, Serializable { * Constructs a new AclImpl instance. */ public AclImpl() { + lock = new ReentrantLock(); } /** {@inheritDoc} */ @@ -86,24 +98,34 @@ private boolean hasEntry( final AclEntry entry ) { /** {@inheritDoc} */ @Override - public synchronized boolean addEntry( final AclEntry entry ) { - if( entry.getPrincipal() == null ) { - throw new IllegalArgumentException( "Entry principal cannot be null" ); - } + public boolean addEntry( final AclEntry entry ) { + lock.lock(); + try { + if( entry.getPrincipal() == null ) { + throw new IllegalArgumentException( "Entry principal cannot be null" ); + } - if( hasEntry( entry ) ) { - return false; - } + if( hasEntry( entry ) ) { + return false; + } - m_entries.add( entry ); + m_entries.add( entry ); - return true; + return true; + } finally { + lock.unlock(); + } } /** {@inheritDoc} */ @Override - public synchronized boolean removeEntry( final AclEntry entry ) { - return m_entries.remove( entry ); + public boolean removeEntry( final AclEntry entry ) { + lock.lock(); + try { + return m_entries.remove( entry ); + } finally { + lock.unlock(); + } } /** {@inheritDoc} */ diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java index fccf6c1cb0..eefc53bec0 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java @@ -49,6 +49,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; +import java.util.concurrent.locks.ReentrantLock; /** @@ -75,6 +76,20 @@ public class DefaultGroupManager implements GroupManager, Authorizer, WikiEventL /** Map with GroupPrincipals as keys, and Groups as values */ private final Map< Principal, Group > m_groups = new HashMap<>(); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public DefaultGroupManager() { + lock = new ReentrantLock(); + } + /** {@inheritDoc} */ @Override public Principal findRole( final String name ) { @@ -152,12 +167,15 @@ public void initialize( final Engine engine, final Properties props ) throws Wik // Load all groups from the database into the cache final Group[] groups = m_groupDatabase.groups(); - synchronized( m_groups ) { + lock.lock(); + try { for( final Group group : groups ) { // Add new group to cache; fire GROUP_ADD event m_groups.put( group.getPrincipal(), group ); fireEvent( WikiSecurityEvent.GROUP_ADD, group ); } + } finally { + lock.unlock(); } // Make the GroupManager listen for WikiEvents (WikiSecurityEvents for changed user profiles) @@ -253,8 +271,11 @@ public void removeGroup( final String index ) throws WikiSecurityException { // Delete the group // TODO: need rollback procedure - synchronized( m_groups ) { + lock.lock(); + try { m_groups.remove( group.getPrincipal() ); + } finally { + lock.unlock(); } m_groupDatabase.delete( group ); fireEvent( WikiSecurityEvent.GROUP_REMOVE, group ); @@ -269,8 +290,11 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu final Group oldGroup = m_groups.get( group.getPrincipal() ); if( oldGroup != null ) { fireEvent( WikiSecurityEvent.GROUP_REMOVE, oldGroup ); - synchronized( m_groups ) { + lock.lock(); + try { m_groups.remove( oldGroup.getPrincipal() ); + } finally { + lock.unlock(); } } @@ -283,8 +307,11 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu } // Add new group to cache; announce GROUP_ADD event - synchronized( m_groups ) { + lock.lock(); + try { m_groups.put( group.getPrincipal(), group ); + } finally { + lock.unlock(); } fireEvent( WikiSecurityEvent.GROUP_ADD, group ); @@ -300,8 +327,11 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu // Restore previous version, re-throw... fireEvent( WikiSecurityEvent.GROUP_REMOVE, group ); fireEvent( WikiSecurityEvent.GROUP_ADD, oldGroup ); - synchronized( m_groups ) { + lock.lock(); + try { m_groups.put( oldGroup.getPrincipal(), oldGroup ); + } finally { + lock.unlock(); } throw new WikiSecurityException( e.getMessage() + " (rolled back to previous version).", e ); } @@ -367,14 +397,24 @@ protected String[] extractMembers( final String memberLine ) { /** {@inheritDoc} */ @Override - public synchronized void addWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.addWikiEventListener( this, listener ); + public void addWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.addWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** {@inheritDoc} */ @Override - public synchronized void removeWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.removeWikiEventListener( this, listener ); + public void removeWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.removeWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** {@inheritDoc} */ diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java index fc0b733abb..31b67dbec1 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java @@ -23,6 +23,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.security.Principal; import java.util.Date; import java.util.Vector; +import java.util.concurrent.locks.ReentrantLock; /** *

@@ -77,6 +78,16 @@ public class Group { private final String m_wiki; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + /** * Protected constructor to prevent direct instantiation except by other * package members. Callers should use @@ -90,6 +101,7 @@ protected Group( final String name, final String wiki ) { m_name = name; m_wiki = wiki; m_principal = new GroupPrincipal( name ); + lock = new ReentrantLock(); } /** @@ -98,20 +110,30 @@ protected Group( final String name, final String wiki ) { * @param user the principal to add * @return true if the operation was successful */ - public synchronized boolean add( final Principal user ) { - if( isMember( user ) ) { - return false; + public boolean add( final Principal user ) { + lock.lock(); + try { + if( isMember( user ) ) { + return false; + } + + m_members.add( user ); + return true; + } finally { + lock.unlock(); } - - m_members.add( user ); - return true; } /** * Clears all Principals from the group list. */ - public synchronized void clear() { - m_members.clear(); + public void clear() { + lock.lock(); + try { + m_members.clear(); + } finally { + lock.unlock(); + } } /** @@ -156,8 +178,13 @@ public int hashCode() { * * @return the creation date */ - public synchronized Date getCreated() { - return m_created; + public Date getCreated() { + lock.lock(); + try { + return m_created; + } finally { + lock.unlock(); + } } /** @@ -165,8 +192,13 @@ public synchronized Date getCreated() { * * @return the creator */ - public final synchronized String getCreator() { - return m_creator; + public final String getCreator() { + lock.lock(); + try { + return m_creator; + } finally { + lock.unlock(); + } } /** @@ -174,8 +206,13 @@ public final synchronized String getCreator() { * * @return the date and time of last modification */ - public synchronized Date getLastModified() { - return m_modified; + public Date getLastModified() { + lock.lock(); + try { + return m_modified; + } finally { + lock.unlock(); + } } /** @@ -183,8 +220,13 @@ public synchronized Date getLastModified() { * * @return the modifier */ - public final synchronized String getModifier() { - return m_modifier; + public final String getModifier() { + lock.lock(); + try { + return m_modifier; + } finally { + lock.unlock(); + } } /** @@ -240,14 +282,20 @@ public Principal[] members() { * @param user the principal to remove * @return true if the operation was successful */ - public synchronized boolean remove( Principal user ) { - user = findMember( user.getName() ); - if( user == null ) - return false; - - m_members.remove( user ); + public boolean remove( Principal user ) { + lock.lock(); + try { + user = findMember( user.getName() ); + if( user == null ) + return false; + + m_members.remove( user ); + + return true; + } finally { + lock.unlock(); + } - return true; } /** @@ -255,16 +303,26 @@ public synchronized boolean remove( Principal user ) { * * @param date the creation date */ - public synchronized void setCreated( final Date date ) { - m_created = date; + public void setCreated( final Date date ) { + lock.lock(); + try { + m_created = date; + } finally { + lock.unlock(); + } } /** * Sets the creator of this Group. * @param creator the creator */ - public final synchronized void setCreator( final String creator ) { - this.m_creator = creator; + public final void setCreator( final String creator ) { + lock.lock(); + try { + this.m_creator = creator; + } finally { + lock.unlock(); + } } /** @@ -272,8 +330,13 @@ public final synchronized void setCreator( final String creator ) { * * @param date the last-modified date */ - public synchronized void setLastModified( final Date date ) { - m_modified = date; + public void setLastModified( final Date date ) { + lock.lock(); + try { + m_modified = date; + } finally { + lock.unlock(); + } } /** @@ -281,8 +344,13 @@ public synchronized void setLastModified( final Date date ) { * * @param modifier the modifier */ - public final synchronized void setModifier( final String modifier ) { - this.m_modifier = modifier; + public final void setModifier( final String modifier ) { + lock.lock(); + try { + this.m_modifier = modifier; + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java index 1d98e66cbd..28299c2a1a 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java @@ -44,6 +44,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.UUID; +import java.util.concurrent.locks.ReentrantLock; /** @@ -99,6 +100,16 @@ public class CookieAuthenticationLoginModule extends AbstractLoginModule { */ private static final long SCRUB_PERIOD = 60 * 60 * 1_000L; // In milliseconds + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private static final ReentrantLock lock = new ReentrantLock(); + /** * {@inheritDoc} * @@ -268,25 +279,31 @@ private static Cookie getLoginCookie( final String value ) { * @param days number of days that the cookie will survive * @param cookieDir cookie directory */ - private static synchronized void scrub( final int days, final File cookieDir ) { - LOG.debug( "Scrubbing cookieDir..." ); - final File[] files = cookieDir.listFiles(); - final long obsoleteDateLimit = System.currentTimeMillis() - ( ( long )days + 1 ) * 24 * 60 * 60 * 1000L; - int deleteCount = 0; - - for( int i = 0; i < files.length; i++ ) { - final File f = files[ i ]; - final long lastModified = f.lastModified(); - if( lastModified < obsoleteDateLimit ) { - if( f.delete() ) { - deleteCount++; - } else { - LOG.debug( "Error deleting cookie login with index {}", i ); + private static void scrub( final int days, final File cookieDir ) { + lock.lock(); + try { + LOG.debug( "Scrubbing cookieDir..." ); + final File[] files = cookieDir.listFiles(); + final long obsoleteDateLimit = System.currentTimeMillis() - ( ( long )days + 1 ) * 24 * 60 * 60 * 1000L; + int deleteCount = 0; + + for( int i = 0; i < files.length; i++ ) { + final File f = files[ i ]; + final long lastModified = f.lastModified(); + if( lastModified < obsoleteDateLimit ) { + if( f.delete() ) { + deleteCount++; + } else { + LOG.debug( "Error deleting cookie login with index {}", i ); + } } } + + LOG.debug( "Removed {} obsolete cookie logins", deleteCount ); + } finally { + lock.unlock(); } - LOG.debug( "Removed {} obsolete cookie logins", deleteCount ); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java index 760c879f4b..22d13be93f 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java @@ -21,6 +21,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.api.core.Page; import java.util.WeakHashMap; +import java.util.concurrent.locks.ReentrantLock; /** @@ -42,6 +43,16 @@ private PermissionFactory() {} * cached page permissions. */ private static final WeakHashMap c_cache = new WeakHashMap<>(); + + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private static final ReentrantLock lock = new ReentrantLock(); /** * Get a permission object for a WikiPage and a set of actions. @@ -93,19 +104,23 @@ private static PagePermission getPagePermission( final String wiki, String page, // It's fine if two threads update the cache, since the objects mean the same // thing anyway. And this avoids nasty blocking effects. // - synchronized( c_cache ) - { + lock.lock(); + try { perm = c_cache.get( key ); + } finally { + lock.unlock(); } if( perm == null ) { if( !wiki.isEmpty() ) page = wiki+":"+page; perm = new PagePermission( page, actions ); - - synchronized( c_cache ) - { + + lock.lock(); + try { c_cache.put( key, perm ); + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java index dd7c6ee582..c7251d6594 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java @@ -52,6 +52,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -91,25 +92,44 @@ public class XMLUserDatabase extends AbstractUserDatabase { private Document c_dom; private File c_file; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public XMLUserDatabase() { + lock = new ReentrantLock(); + } + /** {@inheritDoc} */ @Override - public synchronized void deleteByLoginName( final String loginName ) throws WikiSecurityException { - if( c_dom == null ) { - throw new WikiSecurityException( "FATAL: database does not exist" ); - } + public void deleteByLoginName( final String loginName ) throws WikiSecurityException { + lock.lock(); + try { + if( c_dom == null ) { + throw new WikiSecurityException( "FATAL: database does not exist" ); + } - final NodeList users = c_dom.getDocumentElement().getElementsByTagName( USER_TAG ); - for( int i = 0; i < users.getLength(); i++ ) { - final Element user = ( Element )users.item( i ); - if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) { - c_dom.getDocumentElement().removeChild( user ); + final NodeList users = c_dom.getDocumentElement().getElementsByTagName( USER_TAG ); + for( int i = 0; i < users.getLength(); i++ ) { + final Element user = ( Element )users.item( i ); + if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) { + c_dom.getDocumentElement().removeChild( user ); - // Commit to disk - saveDOM(); - return; + // Commit to disk + saveDOM(); + return; + } } + throw new NoSuchPrincipalException( "Not in database: " + loginName ); + } finally { + lock.unlock(); } - throw new NoSuchPrincipalException( "Not in database: " + loginName ); } /** {@inheritDoc} */ @@ -322,118 +342,129 @@ private void checkForRefresh() { * @see org.apache.wiki.auth.user.UserDatabase#rename(String, String) */ @Override - public synchronized void rename( final String loginName, final String newName) throws DuplicateUserException, WikiSecurityException { - if( c_dom == null ) { - LOG.fatal( "Could not rename profile '" + loginName + "'; database does not exist" ); - throw new IllegalStateException( "FATAL: database does not exist" ); - } - checkForRefresh(); + public void rename( final String loginName, final String newName) throws DuplicateUserException, WikiSecurityException { + lock.lock(); + try { + if( c_dom == null ) { + LOG.fatal( "Could not rename profile '" + loginName + "'; database does not exist" ); + throw new IllegalStateException( "FATAL: database does not exist" ); + } + checkForRefresh(); - // Get the existing user; if not found, throws NoSuchPrincipalException - final UserProfile profile = findByLoginName( loginName ); + // Get the existing user; if not found, throws NoSuchPrincipalException + final UserProfile profile = findByLoginName( loginName ); - // Get user with the proposed name; if found, it's a collision - try { - final UserProfile otherProfile = findByLoginName( newName ); - if( otherProfile != null ) { - throw new DuplicateUserException( "security.error.cannot.rename", newName ); + // Get user with the proposed name; if found, it's a collision + try { + final UserProfile otherProfile = findByLoginName( newName ); + if( otherProfile != null ) { + throw new DuplicateUserException( "security.error.cannot.rename", newName ); + } + } catch( final NoSuchPrincipalException e ) { + // Good! That means it's safe to save using the new name } - } catch( final NoSuchPrincipalException e ) { - // Good! That means it's safe to save using the new name - } - // Find the user with the old login id attribute, and change it - final NodeList users = c_dom.getElementsByTagName( USER_TAG ); - for( int i = 0; i < users.getLength(); i++ ) { - final Element user = ( Element )users.item( i ); - if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) { - final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT ); - final Date modDate = new Date( System.currentTimeMillis() ); - setAttribute( user, LOGIN_NAME, newName ); - setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) ); - profile.setLoginName( newName ); - profile.setLastModified( modDate ); - break; + // Find the user with the old login id attribute, and change it + final NodeList users = c_dom.getElementsByTagName( USER_TAG ); + for( int i = 0; i < users.getLength(); i++ ) { + final Element user = ( Element )users.item( i ); + if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) { + final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT ); + final Date modDate = new Date( System.currentTimeMillis() ); + setAttribute( user, LOGIN_NAME, newName ); + setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) ); + profile.setLoginName( newName ); + profile.setLastModified( modDate ); + break; + } } + + // Commit to disk + saveDOM(); + } finally { + lock.unlock(); } - // Commit to disk - saveDOM(); } /** {@inheritDoc} */ @Override - public synchronized void save( final UserProfile profile ) throws WikiSecurityException { - if ( c_dom == null ) { - LOG.fatal( "Could not save profile " + profile + " database does not exist" ); - throw new IllegalStateException( "FATAL: database does not exist" ); - } + public void save( final UserProfile profile ) throws WikiSecurityException { + lock.lock(); + try { + if ( c_dom == null ) { + LOG.fatal( "Could not save profile " + profile + " database does not exist" ); + throw new IllegalStateException( "FATAL: database does not exist" ); + } - checkForRefresh(); + checkForRefresh(); - final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT ); - final String index = profile.getLoginName(); - final NodeList users = c_dom.getElementsByTagName( USER_TAG ); - Element user = IntStream.range(0, users.getLength()).mapToObj(i -> (Element) users.item(i)).filter(currentUser -> currentUser.getAttribute(LOGIN_NAME).equals(index)).findFirst().orElse(null); - - boolean isNew = false; - - final Date modDate = new Date( System.currentTimeMillis() ); - if( user == null ) { - // Create new user node - profile.setCreated( modDate ); - LOG.info( "Creating new user " + index ); - user = c_dom.createElement( USER_TAG ); - c_dom.getDocumentElement().appendChild( user ); - setAttribute( user, CREATED, c_format.format( profile.getCreated() ) ); - isNew = true; - } else { - // To update existing user node, delete old attributes first... - final NodeList attributes = user.getElementsByTagName( ATTRIBUTES_TAG ); - for( int i = 0; i < attributes.getLength(); i++ ) { - user.removeChild( attributes.item( i ) ); + final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT ); + final String index = profile.getLoginName(); + final NodeList users = c_dom.getElementsByTagName( USER_TAG ); + Element user = IntStream.range(0, users.getLength()).mapToObj(i -> (Element) users.item(i)).filter(currentUser -> currentUser.getAttribute(LOGIN_NAME).equals(index)).findFirst().orElse(null); + + boolean isNew = false; + + final Date modDate = new Date( System.currentTimeMillis() ); + if( user == null ) { + // Create new user node + profile.setCreated( modDate ); + LOG.info( "Creating new user " + index ); + user = c_dom.createElement( USER_TAG ); + c_dom.getDocumentElement().appendChild( user ); + setAttribute( user, CREATED, c_format.format( profile.getCreated() ) ); + isNew = true; + } else { + // To update existing user node, delete old attributes first... + final NodeList attributes = user.getElementsByTagName( ATTRIBUTES_TAG ); + for( int i = 0; i < attributes.getLength(); i++ ) { + user.removeChild( attributes.item( i ) ); + } } - } - setAttribute( user, UID, profile.getUid() ); - setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) ); - setAttribute( user, LOGIN_NAME, profile.getLoginName() ); - setAttribute( user, FULL_NAME, profile.getFullname() ); - setAttribute( user, WIKI_NAME, profile.getWikiName() ); - setAttribute( user, EMAIL, profile.getEmail() ); - final Date lockExpiry = profile.getLockExpiry(); - setAttribute( user, LOCK_EXPIRY, lockExpiry == null ? "" : c_format.format( lockExpiry ) ); - - // Hash and save the new password if it's different from old one - final String newPassword = profile.getPassword(); - if( newPassword != null && !newPassword.equals( "" ) ) { - final String oldPassword = user.getAttribute( PASSWORD ); - if( !oldPassword.equals( newPassword ) ) { - setAttribute( user, PASSWORD, getHash( newPassword ) ); + setAttribute( user, UID, profile.getUid() ); + setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) ); + setAttribute( user, LOGIN_NAME, profile.getLoginName() ); + setAttribute( user, FULL_NAME, profile.getFullname() ); + setAttribute( user, WIKI_NAME, profile.getWikiName() ); + setAttribute( user, EMAIL, profile.getEmail() ); + final Date lockExpiry = profile.getLockExpiry(); + setAttribute( user, LOCK_EXPIRY, lockExpiry == null ? "" : c_format.format( lockExpiry ) ); + + // Hash and save the new password if it's different from old one + final String newPassword = profile.getPassword(); + if( newPassword != null && !newPassword.equals( "" ) ) { + final String oldPassword = user.getAttribute( PASSWORD ); + if( !oldPassword.equals( newPassword ) ) { + setAttribute( user, PASSWORD, getHash( newPassword ) ); + } } - } - // Save the attributes as Base64 string - if(!profile.getAttributes().isEmpty()) { - try { - final String encodedAttributes = Serializer.serializeToBase64( profile.getAttributes() ); - final Element attributes = c_dom.createElement( ATTRIBUTES_TAG ); - user.appendChild( attributes ); - final Text value = c_dom.createTextNode( encodedAttributes ); - attributes.appendChild( value ); - } catch( final IOException e ) { - throw new WikiSecurityException( "Could not save user profile attribute. Reason: " + e.getMessage(), e ); + // Save the attributes as Base64 string + if( profile.getAttributes().isEmpty()) { + try { + final String encodedAttributes = Serializer.serializeToBase64( profile.getAttributes() ); + final Element attributes = c_dom.createElement( ATTRIBUTES_TAG ); + user.appendChild( attributes ); + final Text value = c_dom.createTextNode( encodedAttributes ); + attributes.appendChild( value ); + } catch( final IOException e ) { + throw new WikiSecurityException( "Could not save user profile attribute. Reason: " + e.getMessage(), e ); + } } - } - // Set the profile timestamps - if( isNew ) { - profile.setCreated( modDate ); - } - profile.setLastModified( modDate ); + // Set the profile timestamps + if( isNew ) { + profile.setCreated( modDate ); + } + profile.setLastModified( modDate ); - // Commit to disk - saveDOM(); + // Commit to disk + saveDOM(); + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java index 32d96bce18..bf4c71aa5c 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java @@ -42,6 +42,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.List; import java.util.Properties; import java.util.StringTokenizer; +import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; @@ -92,12 +93,24 @@ public class ContextualDiffProvider implements DiffProvider { private static final int LIMIT_MAX_VALUE = (Integer.MAX_VALUE /2) - 1; private int m_unchangedContextLimit = LIMIT_MAX_VALUE; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + /** * Constructs this provider. */ public ContextualDiffProvider() - {} + { + lock = new ReentrantLock(); + } /** * @see org.apache.wiki.api.providers.WikiProvider#getProviderInfo() @@ -137,35 +150,40 @@ public void initialize( final Engine engine, final Properties properties) throws * {@inheritDoc} */ @Override - public synchronized String makeDiffHtml( final Context ctx, final String wikiOld, final String wikiNew ) { - // - // Sequencing handles lineterminator to
and every-other consequtive space to a   - // - final String[] alpha = sequence( TextUtil.replaceEntities( wikiOld ) ); - final String[] beta = sequence( TextUtil.replaceEntities( wikiNew ) ); - - final Revision rev; + public String makeDiffHtml( final Context ctx, final String wikiOld, final String wikiNew ) { + lock.lock(); try { - rev = Diff.diff( alpha, beta, new MyersDiff() ); - } catch( final DifferentiationFailedException dfe ) { - LOG.error( "Diff generation failed", dfe ); - return "Error while creating version diff."; - } - - final int revSize = rev.size(); - final StringBuffer sb = new StringBuffer(); - - sb.append( DIFF_START ); + // + // Sequencing handles lineterminator to
and every-other consequtive space to a   + // + final String[] alpha = sequence( TextUtil.replaceEntities( wikiOld ) ); + final String[] beta = sequence( TextUtil.replaceEntities( wikiNew ) ); + + final Revision rev; + try { + rev = Diff.diff( alpha, beta, new MyersDiff() ); + } catch( final DifferentiationFailedException dfe ) { + LOG.error( "Diff generation failed", dfe ); + return "Error while creating version diff."; + } - // - // The MyersDiff is a bit dumb by converting a single line multi-word diff into a series - // of Changes. The ChangeMerger pulls them together again... - // - final ChangeMerger cm = new ChangeMerger( sb, alpha, revSize ); - rev.accept( cm ); - cm.shutdown(); - sb.append( DIFF_END ); - return sb.toString(); + final int revSize = rev.size(); + final StringBuffer sb = new StringBuffer(); + + sb.append( DIFF_START ); + + // + // The MyersDiff is a bit dumb by converting a single line multi-word diff into a series + // of Changes. The ChangeMerger pulls them together again... + // + final ChangeMerger cm = new ChangeMerger( sb, alpha, revSize ); + rev.accept( cm ); + cm.shutdown(); + sb.append( DIFF_END ); + return sb.toString(); + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java index 299e25b62f..b58c3f249b 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java @@ -28,6 +28,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiPageEvent; import java.util.Properties; +import java.util.concurrent.locks.ReentrantLock; /** * Fires WikiPageEvents for page events. @@ -50,6 +51,20 @@ Licensed to the Apache Software Foundation (ASF) under one */ public class PageEventFilter extends BasePageFilter { + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public PageEventFilter() { + lock = new ReentrantLock(); + } + /** * Called whenever a new PageFilter is instantiated and reset. */ @@ -112,8 +127,13 @@ public void postSave( final Context wikiContext, final String content ) { * * @param listener the event listener */ - public final synchronized void addWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.addWikiEventListener( this, listener ); + public final void addWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.addWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** @@ -121,8 +141,13 @@ public final synchronized void addWikiEventListener( final WikiEventListener lis * * @param listener the event listener */ - public final synchronized void removeWikiEventListener( final WikiEventListener listener ) { - WikiEventManager.removeWikiEventListener( this, listener ); + public final void removeWikiEventListener( final WikiEventListener listener ) { + lock.lock(); + try { + WikiEventManager.removeWikiEventListener( this, listener ); + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java index 1160d8b744..981a291b6a 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java @@ -77,6 +77,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.StringTokenizer; import java.util.Vector; import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.locks.ReentrantLock; /** @@ -242,6 +243,20 @@ public class SpamFilter extends BasePageFilter { */ private static final long HASH_DELAY = 24; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public SpamFilter() { + lock = new ReentrantLock(); + } + /** * {@inheritDoc} @@ -435,87 +450,92 @@ private Collection< Pattern > parseBlacklist( final String list ) { * @param change page change * @throws RedirectException spam filter rejects the page change. */ - private synchronized void checkSinglePageChange(final Context context, final Change change ) + private void checkSinglePageChange(final Context context, final Change change ) throws RedirectException { - final HttpServletRequest req = context.getHttpRequest(); + lock.lock(); + try { + final HttpServletRequest req = context.getHttpRequest(); - if( req != null ) { - final String addr = HttpUtil.getRemoteAddress( req ); - int hostCounter = 0; - int changeCounter = 0; + if( req != null ) { + final String addr = HttpUtil.getRemoteAddress( req ); + int hostCounter = 0; + int changeCounter = 0; - LOG.debug( "Change is " + change.m_change ); + LOG.debug( "Change is " + change.m_change ); - final long time = System.currentTimeMillis() - 60*1000L; // 1 minute + final long time = System.currentTimeMillis() - 60*1000L; // 1 minute - for( final Iterator< Host > i = m_lastModifications.iterator(); i.hasNext(); ) { - final Host host = i.next(); + for( final Iterator< Host > i = m_lastModifications.iterator(); i.hasNext(); ) { + final Host host = i.next(); - // Check if this item is invalid - if( host.getAddedTime() < time ) { - LOG.debug( "Removed host " + host.getAddress() + " from modification queue (expired)" ); - i.remove(); - continue; - } + // Check if this item is invalid + if( host.getAddedTime() < time ) { + LOG.debug( "Removed host " + host.getAddress() + " from modification queue (expired)" ); + i.remove(); + continue; + } - // Check if this IP address has been seen before - if( host.getAddress().equals( addr ) ) { - hostCounter++; - } + // Check if this IP address has been seen before + if( host.getAddress().equals( addr ) ) { + hostCounter++; + } - // Check, if this change has been seen before - if( host.getChange() != null && host.getChange().equals( change ) ) { - changeCounter++; + // Check, if this change has been seen before + if( host.getChange() != null && host.getChange().equals( change ) ) { + changeCounter++; + } } - } - // Now, let's check against the limits. - if( hostCounter >= m_limitSinglePageChanges ) { - final Host host = new Host( addr, null ); - m_temporaryBanList.add( host ); + // Now, let's check against the limits. + if( hostCounter >= m_limitSinglePageChanges ) { + final Host host = new Host( addr, null ); + m_temporaryBanList.add( host ); - final String uid = log( context, REJECT, REASON_TOO_MANY_MODIFICATIONS, change.m_change ); - LOG.info( "SPAM:TooManyModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many modifications/minute" ); - checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" ); - } + final String uid = log( context, REJECT, REASON_TOO_MANY_MODIFICATIONS, change.m_change ); + LOG.info( "SPAM:TooManyModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many modifications/minute" ); + checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" ); + } - if( changeCounter >= m_limitSimilarChanges ) { - final Host host = new Host( addr, null ); - m_temporaryBanList.add( host ); + if( changeCounter >= m_limitSimilarChanges ) { + final Host host = new Host( addr, null ); + m_temporaryBanList.add( host ); - final String uid = log( context, REJECT, REASON_SIMILAR_MODIFICATIONS, change.m_change ); - LOG.info( "SPAM:SimilarModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many similar modifications" ); - checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code "+uid+")"); - } + final String uid = log( context, REJECT, REASON_SIMILAR_MODIFICATIONS, change.m_change ); + LOG.info( "SPAM:SimilarModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many similar modifications" ); + checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code "+uid+")"); + } - // Calculate the number of links in the addition. - String tstChange = change.toString(); - int urlCounter = 0; - while( m_matcher.contains( tstChange,m_urlPattern ) ) { - final MatchResult m = m_matcher.getMatch(); - tstChange = tstChange.substring( m.endOffset(0) ); - urlCounter++; - } + // Calculate the number of links in the addition. + String tstChange = change.toString(); + int urlCounter = 0; + while( m_matcher.contains( tstChange,m_urlPattern ) ) { + final MatchResult m = m_matcher.getMatch(); + tstChange = tstChange.substring( m.endOffset(0) ); + urlCounter++; + } - if( urlCounter > m_maxUrls ) { - final Host host = new Host( addr, null ); - m_temporaryBanList.add( host ); + if( urlCounter > m_maxUrls ) { + final Host host = new Host( addr, null ); + m_temporaryBanList.add( host ); - final String uid = log( context, REJECT, REASON_TOO_MANY_URLS, change.toString() ); - LOG.info( "SPAM:TooManyUrls (" + uid + "). Added host " + addr + " to temporary ban list for adding too many URLs" ); - checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" ); - } + final String uid = log( context, REJECT, REASON_TOO_MANY_URLS, change.toString() ); + LOG.info( "SPAM:TooManyUrls (" + uid + "). Added host " + addr + " to temporary ban list for adding too many URLs" ); + checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" ); + } - // Check bot trap - checkBotTrap( context, change ); + // Check bot trap + checkBotTrap( context, change ); - // Check UTF-8 mangling - checkUTF8( context, change ); + // Check UTF-8 mangling + checkUTF8( context, change ); - // Do Akismet check. This is good to be the last, because this is the most expensive operation. - checkAkismet( context, change ); + // Do Akismet check. This is good to be the last, because this is the most expensive operation. + checkAkismet( context, change ); - m_lastModifications.add( new Host( addr, change ) ); + m_lastModifications.add( new Host( addr, change ) ); + } + } finally { + lock.unlock(); } } @@ -630,15 +650,20 @@ private void checkUTF8( final Context context, final Change change ) throws Redi } /** Goes through the ban list and cleans away any host which has expired from it. */ - private synchronized void cleanBanList() { - final long now = System.currentTimeMillis(); - for( final Iterator< Host > i = m_temporaryBanList.iterator(); i.hasNext(); ) { - final Host host = i.next(); - - if( host.getReleaseTime() < now ) { - LOG.debug( "Removed host " + host.getAddress() + " from temporary ban list (expired)" ); - i.remove(); + private void cleanBanList() { + lock.lock(); + try { + final long now = System.currentTimeMillis(); + for( final Iterator< Host > i = m_temporaryBanList.iterator(); i.hasNext(); ) { + final Host host = i.next(); + + if( host.getReleaseTime() < now ) { + LOG.debug( "Removed host " + host.getAddress() + " from temporary ban list (expired)" ); + i.remove(); + } } + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java index f3d7eff0a0..ad59bef784 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java @@ -43,6 +43,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.ResourceBundle; import java.util.StringTokenizer; +import java.util.concurrent.locks.ReentrantLock; /** * Provides a handler for bug reports. Still under construction. @@ -73,6 +74,20 @@ public class BugReportHandler implements Plugin { /** Parameter name for setting the page. Value is {@value}. */ public static final String PARAM_PAGE = "page"; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public BugReportHandler() { + lock = new ReentrantLock(); + } + /** * {@inheritDoc} */ @@ -166,17 +181,22 @@ public String execute( final Context context, final Map< String, String > params * Finds a free page name for adding the bug report. Tries to construct a page, and if it's found, adds a number to it * and tries again. */ - private synchronized String findNextPage( final Context context, final String title, final String baseName ) { - final String basicPageName = ( ( baseName != null ) ? baseName : "Bug" ) + MarkupParser.cleanLink( title ); - final Engine engine = context.getEngine(); - - String pageName = basicPageName; - long lastbug = 2; - while( engine.getManager( PageManager.class ).wikiPageExists( pageName ) ) { - pageName = basicPageName + lastbug++; - } + private String findNextPage( final Context context, final String title, final String baseName ) { + lock.lock(); + try { + final String basicPageName = ( ( baseName != null ) ? baseName : "Bug" ) + MarkupParser.cleanLink( title ); + final Engine engine = context.getEngine(); - return pageName; + String pageName = basicPageName; + long lastbug = 2; + while( engine.getManager( PageManager.class ).wikiPageExists( pageName ) ) { + pageName = basicPageName + lastbug++; + } + + return pageName; + } finally { + lock.unlock(); + } } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java index 850d7c210a..6a2faed91d 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java @@ -59,6 +59,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Map.Entry; import java.util.Properties; import java.util.TreeMap; +import java.util.concurrent.locks.ReentrantLock; /** @@ -129,6 +130,20 @@ public class PageViewPlugin extends AbstractReferralPlugin implements Plugin, In /** Constant for storage interval in seconds. */ private static final int STORAGE_INTERVAL = 60; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public PageViewPlugin() { + lock = new ReentrantLock(); + } + /** * Initialize the PageViewPlugin and its singleton. * @@ -137,11 +152,14 @@ public class PageViewPlugin extends AbstractReferralPlugin implements Plugin, In @Override public void initialize( final Engine engine ) { LOG.info( "initializing PageViewPlugin" ); - synchronized( this ) { + lock.lock(); + try { if( c_singleton == null ) { c_singleton = new PageViewManager(); } c_singleton.initialize( engine ); + } finally { + lock.unlock(); } } @@ -202,50 +220,60 @@ public final class PageViewManager implements WikiEventListener { * * @param engine The wiki engine. */ - public synchronized void initialize( final Engine engine ) { - LOG.info( "initializing PageView Manager" ); - m_workDir = engine.getWorkDir(); - engine.addWikiEventListener( this ); - if( m_counters == null ) { - // Load the counters into a collection - m_storage = new Properties(); - m_counters = new TreeMap<>(); - - loadCounters(); - } + public void initialize( final Engine engine ) { + lock.lock(); + try { + LOG.info( "initializing PageView Manager" ); + m_workDir = engine.getWorkDir(); + engine.addWikiEventListener( this ); + if( m_counters == null ) { + // Load the counters into a collection + m_storage = new Properties(); + m_counters = new TreeMap<>(); + + loadCounters(); + } - // backup counters every 5 minutes - if( m_pageCountSaveThread == null ) { - m_pageCountSaveThread = new CounterSaveThread( engine, 5 * STORAGE_INTERVAL, this ); - m_pageCountSaveThread.start(); - } + // backup counters every 5 minutes + if( m_pageCountSaveThread == null ) { + m_pageCountSaveThread = new CounterSaveThread( engine, 5 * STORAGE_INTERVAL, this ); + m_pageCountSaveThread.start(); + } - m_initialized = true; + m_initialized = true; + } finally { + lock.unlock(); + } } /** * Handle the shutdown event via the page counter thread. */ - private synchronized void handleShutdown() { - LOG.info( "handleShutdown: The counter store thread was shut down." ); + private void handleShutdown() { + lock.lock(); + try { + LOG.info( "handleShutdown: The counter store thread was shut down." ); - cleanup(); + cleanup(); - if( m_counters != null ) { + if( m_counters != null ) { - m_dirty = true; - storeCounters(); + m_dirty = true; + storeCounters(); - m_counters.clear(); - m_counters = null; + m_counters.clear(); + m_counters = null; - m_storage.clear(); - m_storage = null; - } + m_storage.clear(); + m_storage = null; + } - m_initialized = false; + m_initialized = false; - m_pageCountSaveThread = null; + m_pageCountSaveThread = null; + } finally { + lock.unlock(); + } } /** @@ -342,7 +370,9 @@ public String execute( final Context context, final Map< String, String > params } } - synchronized( this ) { + lock.lock(); + try { + Counter counter = m_counters.get( pagename ); // only count in view mode, keep storage values in sync @@ -446,6 +476,8 @@ public String execute( final Context context, final Map< String, String > params // let the engine render the list result = engine.getManager( RenderingManager.class ).textToHTML( context, buf.toString() ); } + } finally { + lock.unlock(); } } return result; @@ -509,7 +541,8 @@ int getCount( final Object key ) private void loadCounters() { if( m_counters != null && m_storage != null ) { LOG.info( "Loading counters." ); - synchronized( this ) { + lock.lock(); + try { try( final InputStream fis = Files.newInputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) { m_storage.load( fis ); } catch( final IOException ioe ) { @@ -520,8 +553,10 @@ private void loadCounters() { for( final Entry< ?, ? > entry : m_storage.entrySet() ) { m_counters.put( ( String )entry.getKey(), new Counter( ( String )entry.getValue() ) ); } - + LOG.info( "Loaded " + m_counters.size() + " counter values." ); + } finally { + lock.unlock(); } } } @@ -532,7 +567,8 @@ private void loadCounters() { void storeCounters() { if( m_counters != null && m_storage != null && m_dirty ) { LOG.info( "Storing " + m_counters.size() + " counter values." ); - synchronized( this ) { + lock.lock(); + try { // Write out the collection of counters try( final OutputStream fos = Files.newOutputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) { m_storage.store( fos, "\n# The number of times each page has been viewed.\n# Do not modify.\n" ); @@ -542,6 +578,8 @@ void storeCounters() { } catch( final IOException ioe ) { LOG.error( "Couldn't store counters values: " + ioe.getMessage() ); } + } finally { + lock.unlock(); } } } @@ -552,9 +590,15 @@ void storeCounters() { * @param thrd thread that can be the current background thread. * @return boolean true if the thread is still the current background thread. */ - private synchronized boolean isRunning( final Thread thrd ) + private boolean isRunning( final Thread thrd ) { - return m_initialized && thrd == m_pageCountSaveThread; + lock.lock(); + try { + return m_initialized && thrd == m_pageCountSaveThread; + } finally { + lock.unlock(); + } + } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java index e8523d0717..8d5bcf415d 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java @@ -45,6 +45,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.NoSuchElementException; import java.util.Properties; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantLock; /** @@ -62,6 +63,20 @@ public class CachingAttachmentProvider implements AttachmentProvider { private boolean allRequested; private final AtomicLong attachments = new AtomicLong( 0L ); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public CachingAttachmentProvider() { + lock = new ReentrantLock(); + } + /** * {@inheritDoc} */ @@ -141,7 +156,8 @@ public List< Attachment > listAllChanged( final Date timestamp ) throws Provider all = provider.listAllChanged( timestamp ); // Make sure that all attachments are in the cache. - synchronized( this ) { + lock.lock(); + try { for( final Attachment att : all ) { cachingManager.put( CachingManager.CACHE_ATTACHMENTS, att.getName(), att ); } @@ -149,6 +165,8 @@ public List< Attachment > listAllChanged( final Date timestamp ) throws Provider allRequested = true; attachments.set( all.size() ); } + } finally { + lock.unlock(); } } else { final List< String > keys = cachingManager.keys( CachingManager.CACHE_ATTACHMENTS ); @@ -240,14 +258,20 @@ public void deleteAttachment( final Attachment att ) throws ProviderException { * @return A plain string with all the above-mentioned values. */ @Override - public synchronized String getProviderInfo() { - final CacheInfo attCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS ); - final CacheInfo attColCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS_COLLECTION ); - return "Real provider: " + provider.getClass().getName() + - ". Attachment cache hits: " + attCacheInfo.getHits() + - ". Attachment cache misses: " + attCacheInfo.getMisses() + - ". Attachment collection cache hits: " + attColCacheInfo.getHits() + - ". Attachment collection cache misses: " + attColCacheInfo.getMisses(); + public String getProviderInfo() { + lock.lock(); + try { + final CacheInfo attCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS ); + final CacheInfo attColCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS_COLLECTION ); + return "Real provider: " + provider.getClass().getName() + + ". Attachment cache hits: " + attCacheInfo.getHits() + + ". Attachment cache misses: " + attCacheInfo.getMisses() + + ". Attachment collection cache hits: " + attColCacheInfo.getHits() + + ". Attachment collection cache misses: " + attColCacheInfo.getMisses(); + } finally { + lock.unlock(); + } + } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java index 5b1260f468..745e58b734 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java @@ -45,6 +45,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantLock; /** @@ -70,6 +71,20 @@ public class CachingProvider implements PageProvider { private boolean allRequested; private final AtomicLong pages = new AtomicLong( 0L ); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public CachingProvider() { + lock = new ReentrantLock(); + } + /** * {@inheritDoc} */ @@ -220,7 +235,8 @@ private String getTextFromCache( final String pageName ) throws ProviderExceptio */ @Override public void putPageText( final Page page, final String text ) throws ProviderException { - synchronized( this ) { + lock.lock(); + try { provider.putPageText( page, text ); page.setLastModified( new Date() ); @@ -230,6 +246,8 @@ public void putPageText( final Page page, final String text ) throws ProviderExc cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, page.getName() ); getPageInfoFromCache( page.getName() ); + } finally { + lock.unlock(); } pages.incrementAndGet(); } @@ -243,11 +261,14 @@ public Collection< Page > getAllPages() throws ProviderException { if ( !allRequested ) { all = provider.getAllPages(); // Make sure that all pages are in the cache. - synchronized( this ) { + lock.lock(); + try { for( final Page p : all ) { cachingManager.put( CachingManager.CACHE_PAGES, p.getName(), p ); } allRequested = true; + } finally { + lock.unlock(); } pages.set( all.size() ); } else { @@ -349,14 +370,19 @@ public List< Page > getVersionHistory( final String pageName) throws ProviderExc * @return A plain string with all the above-mentioned values. */ @Override - public synchronized String getProviderInfo() { - final CacheInfo pageCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES ); - final CacheInfo pageHistoryCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES_HISTORY ); - return "Real provider: " + provider.getClass().getName()+ - ". Page cache hits: " + pageCacheInfo.getHits() + - ". Page cache misses: " + pageCacheInfo.getMisses() + - ". History cache hits: " + pageHistoryCacheInfo.getHits() + - ". History cache misses: " + pageHistoryCacheInfo.getMisses(); + public String getProviderInfo() { + lock.lock(); + try { + final CacheInfo pageCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES ); + final CacheInfo pageHistoryCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES_HISTORY ); + return "Real provider: " + provider.getClass().getName()+ + ". Page cache hits: " + pageCacheInfo.getHits() + + ". Page cache misses: " + pageCacheInfo.getMisses() + + ". History cache hits: " + pageHistoryCacheInfo.getHits() + + ". History cache misses: " + pageHistoryCacheInfo.getMisses(); + } finally { + lock.unlock(); + } } /** @@ -365,7 +391,8 @@ public synchronized String getProviderInfo() { @Override public void deleteVersion( final String pageName, final int version ) throws ProviderException { // Luckily, this is such a rare operation it is okay to synchronize against the whole thing. - synchronized( this ) { + lock.lock(); + try { final Page cached = getPageInfoFromCache( pageName ); final int latestcached = ( cached != null ) ? cached.getVersion() : Integer.MIN_VALUE; @@ -377,6 +404,8 @@ public void deleteVersion( final String pageName, final int version ) throws Pro provider.deleteVersion( pageName, version ); cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, pageName ); + } finally { + lock.unlock(); } if( version == PageProvider.LATEST_VERSION ) { pages.decrementAndGet(); @@ -389,11 +418,14 @@ public void deleteVersion( final String pageName, final int version ) throws Pro @Override public void deletePage( final String pageName ) throws ProviderException { // See note in deleteVersion(). - synchronized( this ) { + lock.lock(); + try { cachingManager.put( CachingManager.CACHE_PAGES, pageName, null ); cachingManager.put( CachingManager.CACHE_PAGES_TEXT, pageName, null ); cachingManager.put( CachingManager.CACHE_PAGES_HISTORY, pageName, null ); provider.deletePage( pageName ); + } finally { + lock.unlock(); } pages.decrementAndGet(); } @@ -404,7 +436,8 @@ public void deletePage( final String pageName ) throws ProviderException { @Override public void movePage( final String from, final String to ) throws ProviderException { provider.movePage( from, to ); - synchronized( this ) { + lock.lock(); + try { // Clear any cached version of the old page and new page cachingManager.remove( CachingManager.CACHE_PAGES, from ); cachingManager.remove( CachingManager.CACHE_PAGES_TEXT, from ); @@ -413,6 +446,8 @@ public void movePage( final String from, final String to ) throws ProviderExcept cachingManager.remove( CachingManager.CACHE_PAGES, to ); cachingManager.remove( CachingManager.CACHE_PAGES_TEXT, to ); cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, to ); + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java index 35e70e2d6d..d6908853ca 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java @@ -42,6 +42,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Date; import java.util.List; import java.util.Properties; +import java.util.concurrent.locks.ReentrantLock; /** * Provides a simple directory based repository for Wiki pages. @@ -81,6 +82,20 @@ public class VersioningFileProvider extends AbstractFileProvider { private CachedProperties m_cachedProperties; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public VersioningFileProvider() { + lock = new ReentrantLock(); + } + /** * {@inheritDoc} */ @@ -268,21 +283,26 @@ private int realVersion( final String page, final int requestedVersion ) throws * {@inheritDoc} */ @Override - public synchronized String getPageText( final String page, int version ) throws ProviderException { - final File dir = findOldPageDir( page ); + public String getPageText( final String page, int version ) throws ProviderException { + lock.lock(); + try { + final File dir = findOldPageDir( page ); - version = realVersion( page, version ); - if( version == -1 ) { - // We can let the FileSystemProvider take care of these requests. - return super.getPageText( page, PageProvider.LATEST_VERSION ); - } + version = realVersion( page, version ); + if( version == -1 ) { + // We can let the FileSystemProvider take care of these requests. + return super.getPageText( page, PageProvider.LATEST_VERSION ); + } - final File pageFile = new File( dir, ""+version+FILE_EXT ); - if( !pageFile.exists() ) { - throw new NoSuchVersionException("Version "+version+"does not exist."); - } + final File pageFile = new File( dir, ""+version+FILE_EXT ); + if( !pageFile.exists() ) { + throw new NoSuchVersionException("Version "+version+"does not exist."); + } - return readFile( pageFile ); + return readFile( pageFile ); + } finally { + lock.unlock(); + } } @@ -325,76 +345,82 @@ private String readFile( final File pagedata ) throws ProviderException { * {@inheritDoc} */ @Override - public synchronized void putPageText( final Page page, final String text ) throws ProviderException { - // This is a bit complicated. We'll first need to copy the old file to be the newest file. - final int latest = findLatestVersion( page.getName() ); - final File pageDir = findOldPageDir( page.getName() ); - if( !pageDir.exists() ) { - pageDir.mkdirs(); - } - + public void putPageText( final Page page, final String text ) throws ProviderException { + lock.lock(); try { - // Copy old data to safety, if one exists. - final File oldFile = findPage( page.getName() ); - - // Figure out which version should the old page be? Numbers should always start at 1. - // "most recent" = -1 ==> 1 - // "first" = 1 ==> 2 - int versionNumber = (latest > 0) ? latest : 1; - final boolean firstUpdate = (versionNumber == 1); - - if( oldFile != null && oldFile.exists() ) { - final File pageFile = new File( pageDir, versionNumber + FILE_EXT ); - try( final InputStream in = new BufferedInputStream( Files.newInputStream( oldFile.toPath() ) ); - final OutputStream out = new BufferedOutputStream( Files.newOutputStream( pageFile.toPath() ) ) ) { - FileUtil.copyContents( in, out ); - - // We need also to set the date, since we rely on this. - pageFile.setLastModified( oldFile.lastModified() ); + // This is a bit complicated. We'll first need to copy the old file to be the newest file. + final int latest = findLatestVersion( page.getName() ); + final File pageDir = findOldPageDir( page.getName() ); + if( !pageDir.exists() ) { + pageDir.mkdirs(); + } - // Kludge to make the property code to work properly. - versionNumber++; + try { + // Copy old data to safety, if one exists. + final File oldFile = findPage( page.getName() ); + + // Figure out which version should the old page be? Numbers should always start at 1. + // "most recent" = -1 ==> 1 + // "first" = 1 ==> 2 + int versionNumber = (latest > 0) ? latest : 1; + final boolean firstUpdate = (versionNumber == 1); + + if( oldFile != null && oldFile.exists() ) { + final File pageFile = new File( pageDir, versionNumber + FILE_EXT ); + try( final InputStream in = new BufferedInputStream( Files.newInputStream( oldFile.toPath() ) ); + final OutputStream out = new BufferedOutputStream( Files.newOutputStream( pageFile.toPath() ) ) ) { + FileUtil.copyContents( in, out ); + + // We need also to set the date, since we rely on this. + pageFile.setLastModified( oldFile.lastModified() ); + + // Kludge to make the property code to work properly. + versionNumber++; + } } - } - // Let superclass handler writing data to a new version. - super.putPageText( page, text ); + // Let superclass handler writing data to a new version. + super.putPageText( page, text ); - // Finally, write page version data. - // FIXME: No rollback available. - final Properties props = getPageProperties( page.getName() ); + // Finally, write page version data. + // FIXME: No rollback available. + final Properties props = getPageProperties( page.getName() ); - String authorFirst = null; - // if the following file exists, we are NOT migrating from FileSystemProvider - final File pagePropFile = new File(getPageDirectory() + File.separator + PAGEDIR + File.separator + mangleName(page.getName()) + File.separator + "page" + FileSystemProvider.PROP_EXT); - if( firstUpdate && ! pagePropFile.exists() ) { - // we might not yet have a versioned author because the old page was last maintained by FileSystemProvider - final Properties props2 = getHeritagePageProperties( page.getName() ); + String authorFirst = null; + // if the following file exists, we are NOT migrating from FileSystemProvider + final File pagePropFile = new File(getPageDirectory() + File.separator + PAGEDIR + File.separator + mangleName(page.getName()) + File.separator + "page" + FileSystemProvider.PROP_EXT); + if( firstUpdate && ! pagePropFile.exists() ) { + // we might not yet have a versioned author because the old page was last maintained by FileSystemProvider + final Properties props2 = getHeritagePageProperties( page.getName() ); - // remember the simulated original author (or something) in the new properties - authorFirst = props2.getProperty( "1.author", "unknown" ); - props.setProperty( "1.author", authorFirst ); - } + // remember the simulated original author (or something) in the new properties + authorFirst = props2.getProperty( "1.author", "unknown" ); + props.setProperty( "1.author", authorFirst ); + } - String newAuthor = page.getAuthor(); - if ( newAuthor == null ) { - newAuthor = ( authorFirst != null ) ? authorFirst : "unknown"; - } - page.setAuthor(newAuthor); - props.setProperty( versionNumber + ".author", newAuthor ); + String newAuthor = page.getAuthor(); + if ( newAuthor == null ) { + newAuthor = ( authorFirst != null ) ? authorFirst : "unknown"; + } + page.setAuthor(newAuthor); + props.setProperty( versionNumber + ".author", newAuthor ); - final String changeNote = page.getAttribute( Page.CHANGENOTE ); - if( changeNote != null ) { - props.setProperty( versionNumber + ".changenote", changeNote ); - } + final String changeNote = page.getAttribute( Page.CHANGENOTE ); + if( changeNote != null ) { + props.setProperty( versionNumber + ".changenote", changeNote ); + } - // Get additional custom properties from page and add to props - getCustomProperties( page, props ); - putPageProperties( page.getName(), props ); - } catch( final IOException e ) { - LOG.error( "Saving failed", e ); - throw new ProviderException("Could not save page text: "+e.getMessage()); + // Get additional custom properties from page and add to props + getCustomProperties( page, props ); + putPageProperties( page.getName(), props ); + } catch( final IOException e ) { + LOG.error( "Saving failed", e ); + throw new ProviderException("Could not save page text: "+e.getMessage()); + } + } finally { + lock.unlock(); } + } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java index 329672d744..7a08d55765 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java @@ -47,6 +47,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.security.NoSuchAlgorithmException; import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; /* @@ -132,6 +133,16 @@ public class DefaultReferenceManager extends BasePageFilter implements Reference /** We use this also a generic serialization id */ private static final long serialVersionUID = 4L; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + /** * Builds a new ReferenceManager. * @@ -148,6 +159,7 @@ public DefaultReferenceManager( final Engine engine ) { // m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy ); m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo ); + lock = new ReentrantLock(); } /** @@ -233,53 +245,63 @@ public void initialize( final Collection< Page > pages ) throws ProviderExceptio * Reads the serialized data from the disk back to memory. Returns the date when the data was last written on disk */ @SuppressWarnings("unchecked") - private synchronized long unserializeFromDisk() throws IOException, ClassNotFoundException { - final long saved; + private long unserializeFromDisk() throws IOException, ClassNotFoundException { + lock.lock(); + try { + final long saved; - final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); - try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { - final StopWatch sw = new StopWatch(); - sw.start(); + final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); + try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { + final StopWatch sw = new StopWatch(); + sw.start(); - final long ver = in.readLong(); + final long ver = in.readLong(); - if( ver != serialVersionUID ) { - throw new IOException("File format has changed; I need to recalculate references."); - } + if( ver != serialVersionUID ) { + throw new IOException("File format has changed; I need to recalculate references."); + } - saved = in.readLong(); - m_refersTo = ( Map< String, Collection< String > > ) in.readObject(); - m_referredBy = ( Map< String, Set< String > > ) in.readObject(); + saved = in.readLong(); + m_refersTo = ( Map< String, Collection< String > > ) in.readObject(); + m_referredBy = ( Map< String, Set< String > > ) in.readObject(); - m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy ); - m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo ); + m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy ); + m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo ); - sw.stop(); - LOG.debug( "Read serialized data successfully in {}", sw ); - } + sw.stop(); + LOG.debug( "Read serialized data successfully in {}", sw ); + } - return saved; + return saved; + } finally { + lock.unlock(); + } } /** * Serializes hashmaps to disk. The format is private, don't touch it. */ - private synchronized void serializeToDisk() { - final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); - try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { - final StopWatch sw = new StopWatch(); - sw.start(); + private void serializeToDisk() { + lock.lock(); + try { + final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); + try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { + final StopWatch sw = new StopWatch(); + sw.start(); - out.writeLong( serialVersionUID ); - out.writeLong( System.currentTimeMillis() ); // Timestamp - out.writeObject( m_refersTo ); - out.writeObject( m_referredBy ); + out.writeLong( serialVersionUID ); + out.writeLong( System.currentTimeMillis() ); // Timestamp + out.writeObject( m_refersTo ); + out.writeObject( m_referredBy ); - sw.stop(); + sw.stop(); - LOG.debug( "serialization done - took {}", sw ); - } catch( final IOException ioe ) { - LOG.error( "Unable to serialize!", ioe ); + LOG.debug( "serialization done - took {}", sw ); + } catch( final IOException ioe ) { + LOG.error( "Unable to serialize!", ioe ); + } + } finally { + lock.unlock(); } } @@ -301,101 +323,110 @@ private String getHashFileName( final String pageName ) { /** * Reads the serialized data from the disk back to memory. Returns the date when the data was last written on disk */ - private synchronized long unserializeAttrsFromDisk( final Page p ) throws IOException, ClassNotFoundException { - long saved = 0L; + private long unserializeAttrsFromDisk( final Page p ) throws IOException, ClassNotFoundException { + lock.lock(); + try { + long saved = 0L; + + // Find attribute cache, and check if it exists + final String hashName = getHashFileName( p.getName() ); + if( hashName != null ) { + File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); + f = new File( f, hashName ); + if( !f.exists() ) { + return 0L; + } - // Find attribute cache, and check if it exists - final String hashName = getHashFileName( p.getName() ); - if( hashName != null ) { - File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); - f = new File( f, hashName ); - if( !f.exists() ) { - return 0L; - } + try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { + final StopWatch sw = new StopWatch(); + sw.start(); + LOG.debug( "Deserializing attributes for {}", p.getName() ); - try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { - final StopWatch sw = new StopWatch(); - sw.start(); - LOG.debug( "Deserializing attributes for {}", p.getName() ); + final long ver = in.readLong(); + if( ver != serialVersionUID ) { + LOG.debug( "File format has changed; cannot deserialize." ); + return 0L; + } - final long ver = in.readLong(); - if( ver != serialVersionUID ) { - LOG.debug( "File format has changed; cannot deserialize." ); - return 0L; - } + saved = in.readLong(); + final String name = in.readUTF(); + if( !name.equals( p.getName() ) ) { + LOG.debug( "File name does not match ({}), skipping...", name ); + return 0L; // Not here + } - saved = in.readLong(); - final String name = in.readUTF(); - if( !name.equals( p.getName() ) ) { - LOG.debug( "File name does not match ({}), skipping...", name ); - return 0L; // Not here - } + final long entries = in.readLong(); + for( int i = 0; i < entries; i++ ) { + final String key = in.readUTF(); + final Object value = in.readObject(); + p.setAttribute( key, value ); + LOG.debug( " attr: {}={}", key, value ); + } - final long entries = in.readLong(); - for( int i = 0; i < entries; i++ ) { - final String key = in.readUTF(); - final Object value = in.readObject(); - p.setAttribute( key, value ); - LOG.debug( " attr: {}={}", key, value ); + sw.stop(); + LOG.debug( "Read serialized data for {} successfully in {}", name, sw ); + p.setHasMetadata(); } - - sw.stop(); - LOG.debug( "Read serialized data for {} successfully in {}", name, sw ); - p.setHasMetadata(); } - } - return saved; + return saved; + } finally { + lock.unlock(); + } } /** * Serializes hashmaps to disk. The format is private, don't touch it. */ - private synchronized void serializeAttrsToDisk( final Page p ) { - final StopWatch sw = new StopWatch(); - sw.start(); + private void serializeAttrsToDisk( final Page p ) { + lock.lock(); + try { + final StopWatch sw = new StopWatch(); + sw.start(); - final String hashName = getHashFileName( p.getName() ); - if( hashName != null ) { - File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); - if( !f.exists() ) { - f.mkdirs(); - } + final String hashName = getHashFileName( p.getName() ); + if( hashName != null ) { + File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); + if( !f.exists() ) { + f.mkdirs(); + } - // Create a digest for the name - f = new File( f, hashName ); + // Create a digest for the name + f = new File( f, hashName ); - try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { - // new Set to avoid concurrency issues - final Set< Map.Entry < String, Object > > entries = new HashSet<>( p.getAttributes().entrySet() ); + try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { + // new Set to avoid concurrency issues + final Set< Map.Entry < String, Object > > entries = new HashSet<>( p.getAttributes().entrySet() ); - if(entries.isEmpty()) { - // Nothing to serialize, therefore we will just simply remove the serialization file so that the - // next time we boot, we don't deserialize old data. - f.delete(); - return; - } + if(entries.isEmpty()) { + // Nothing to serialize, therefore we will just simply remove the serialization file so that the + // next time we boot, we don't deserialize old data. + f.delete(); + return; + } - out.writeLong( serialVersionUID ); - out.writeLong( System.currentTimeMillis() ); // Timestamp - out.writeUTF( p.getName() ); - out.writeLong( entries.size() ); + out.writeLong( serialVersionUID ); + out.writeLong( System.currentTimeMillis() ); // Timestamp + out.writeUTF( p.getName() ); + out.writeLong( entries.size() ); - for( final Map.Entry< String, Object > e : entries ) { - if( e.getValue() instanceof Serializable ) { - out.writeUTF( e.getKey() ); - out.writeObject( e.getValue() ); + for( final Map.Entry< String, Object > e : entries ) { + if( e.getValue() instanceof Serializable ) { + out.writeUTF( e.getKey() ); + out.writeObject( e.getValue() ); + } } - } - } catch( final IOException e ) { - LOG.error( "Unable to serialize!", e ); - } finally { - sw.stop(); - LOG.debug( "serialization for {} done - took {}", p.getName(), sw ); + } catch( final IOException e ) { + LOG.error( "Unable to serialize!", e ); + } finally { + sw.stop(); + LOG.debug( "serialization for {} done - took {}", p.getName(), sw ); + } } + } finally { + lock.unlock(); } - } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java index 20cf0d366b..2e3122137d 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java @@ -43,6 +43,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Objects; import java.util.Properties; import java.util.Set; +import java.util.concurrent.locks.ReentrantLock; /** @@ -64,6 +65,16 @@ public class DefaultRSSGenerator implements RSSGenerator { private static final int MAX_CHARACTERS = Integer.MAX_VALUE-1; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + /** * Builds the RSS generator for a given Engine. * @@ -75,6 +86,7 @@ public DefaultRSSGenerator( final Engine engine, final Properties properties ) { m_channelDescription = properties.getProperty( PROP_CHANNEL_DESCRIPTION, m_channelDescription ); m_channelLanguage = properties.getProperty( PROP_CHANNEL_LANGUAGE, m_channelLanguage ); m_rssFile = TextUtil.getStringProperty( properties, DefaultRSSGenerator.PROP_RSSFILE, "rss.rdf" ); + this.lock = new ReentrantLock(); } /** @@ -200,14 +212,24 @@ public String generateFeed( final Context wikiContext, final List< Page > change /** {@inheritDoc} */ @Override - public synchronized boolean isEnabled() { - return m_enabled; + public boolean isEnabled() { + lock.lock(); + try { + return m_enabled; + } finally { + lock.unlock(); + } } /** {@inheritDoc} */ @Override - public synchronized void setEnabled( final boolean enabled ) { - m_enabled = enabled; + public void setEnabled( final boolean enabled ) { + lock.lock(); + try { + m_enabled = enabled; + } finally { + lock.unlock(); + } } /** {@inheritDoc} */ diff --git a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java index 9593712018..758284f006 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java @@ -84,6 +84,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.concurrent.Executor; import java.util.concurrent.Executors; +import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; @@ -133,6 +134,20 @@ public class LuceneSearchProvider implements SearchProvider { private static final String PUNCTUATION_TO_SPACES = StringUtils.repeat( " ", TextUtil.PUNCTUATION_CHARS_ALLOWED.length() ); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + + public LuceneSearchProvider() { + lock = new ReentrantLock(); + } + /** {@inheritDoc} */ @Override public void initialize( final Engine engine, final Properties props ) throws NoRequiredPropertyException, IOException { @@ -303,23 +318,28 @@ protected String getAttachmentContent( final Attachment att ) { * @param page The WikiPage to check * @param text The page text to index. */ - protected synchronized void updateLuceneIndex( final Page page, final String text ) { - LOG.debug( "Updating Lucene index for page '{}'...", page.getName() ); - pageRemoved( page ); + protected void updateLuceneIndex( final Page page, final String text ) { + lock.lock(); + try { + LOG.debug( "Updating Lucene index for page '{}'...", page.getName() ); + pageRemoved( page ); + + // Now add back the new version. + try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() ); + final IndexWriter writer = getIndexWriter( luceneDir ) ) { + luceneIndexPage( page, text, writer ); + } catch( final IOException e ) { + LOG.error( "Unable to update page '{}' from Lucene index", page.getName(), e ); + // reindexPage( page ); + } catch( final Exception e ) { + LOG.error( "Unexpected Lucene exception - please check configuration!", e ); + // reindexPage( page ); + } - // Now add back the new version. - try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() ); - final IndexWriter writer = getIndexWriter( luceneDir ) ) { - luceneIndexPage( page, text, writer ); - } catch( final IOException e ) { - LOG.error( "Unable to update page '{}' from Lucene index", page.getName(), e ); - // reindexPage( page ); - } catch( final Exception e ) { - LOG.error( "Unexpected Lucene exception - please check configuration!", e ); - // reindexPage( page ); + LOG.debug( "Done updating Lucene index for page '{}'.", page.getName() ); + } finally { + lock.unlock(); } - - LOG.debug( "Done updating Lucene index for page '{}'.", page.getName() ); } private Analyzer getLuceneAnalyzer() throws ProviderException { @@ -389,8 +409,11 @@ protected Document luceneIndexPage( final Page page, final String text, final In field = new Field( LUCENE_PAGE_KEYWORDS, page.getAttribute( "keywords" ).toString(), TextField.TYPE_STORED ); doc.add( field ); } - synchronized( writer ) { + lock.lock(); + try { writer.addDocument( doc ); + } finally { + lock.unlock(); } return doc; @@ -400,13 +423,18 @@ protected Document luceneIndexPage( final Page page, final String text, final In * {@inheritDoc} */ @Override - public synchronized void pageRemoved( final Page page ) { - try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() ); - final IndexWriter writer = getIndexWriter( luceneDir ) ) { - final Query query = new TermQuery( new Term( LUCENE_ID, page.getName() ) ); - writer.deleteDocuments( query ); - } catch( final Exception e ) { - LOG.error( "Unable to remove page '{}' from Lucene index", page.getName(), e ); + public void pageRemoved( final Page page ) { + lock.lock(); + try { + try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() ); + final IndexWriter writer = getIndexWriter( luceneDir ) ) { + final Query query = new TermQuery( new Term( LUCENE_ID, page.getName() ) ); + writer.deleteDocuments( query ); + } catch( final Exception e ) { + LOG.error( "Unable to remove page '{}' from Lucene index", page.getName(), e ); + } + } finally { + lock.unlock(); } } @@ -540,6 +568,16 @@ private static final class LuceneUpdater extends WikiBackgroundThread { private WatchDog m_watchdog; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private static final ReentrantLock lock = new ReentrantLock(); + private LuceneUpdater( final Engine engine, final LuceneSearchProvider provider, final int initialDelay, final int indexDelay ) { super( engine, indexDelay ); m_provider = provider; @@ -567,16 +605,19 @@ public void startupTask() throws Exception { @Override public void backgroundTask() { m_watchdog.enterState( "Emptying index queue", 60 ); - - synchronized( m_provider.m_updates ) { - while(!m_provider.m_updates.isEmpty()) { + lock.lock(); + try { + while( m_provider.m_updates.isEmpty() ) { final Object[] pair = m_provider.m_updates.remove( 0 ); final Page page = ( Page )pair[ 0 ]; final String text = ( String )pair[ 1 ]; m_provider.updateLuceneIndex( page, text ); } + } finally { + lock.unlock(); } + m_watchdog.exitState(); } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java index e85c86af46..d85ec435c0 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java @@ -30,6 +30,7 @@ Licensed to the Apache Software Foundation (ASF) under one import javax.management.NotCompliantMBeanException; import java.util.Collection; +import java.util.concurrent.locks.ReentrantLock; /** @@ -46,9 +47,20 @@ public class SearchManagerBean extends SimpleAdminBean { private WikiBackgroundThread m_updater; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock; + public SearchManagerBean( final Engine engine ) throws NotCompliantMBeanException { super(); initialize( engine ); + this.lock = new ReentrantLock(); } @Override @@ -74,49 +86,54 @@ public String getTitle() *

* This method prevents itself from being called twice. */ - public synchronized void reload() { - if( m_updater == null ) { - m_updater = new WikiBackgroundThread( m_engine, 0 ) { + public void reload() { + lock.lock(); + try { + if( m_updater == null ) { + m_updater = new WikiBackgroundThread( m_engine, 0 ) { - int m_count; - int m_max; + int m_count; + int m_max; - @Override - public void startupTask() throws Exception { - super.startupTask(); + @Override + public void startupTask() throws Exception { + super.startupTask(); - setName( "Reindexer started" ); - } + setName( "Reindexer started" ); + } + + @Override + public void backgroundTask() throws Exception { + final Collection< Page > allPages = m_engine.getManager( PageManager.class ).getAllPages(); - @Override - public void backgroundTask() throws Exception { - final Collection< Page > allPages = m_engine.getManager( PageManager.class ).getAllPages(); + final SearchManager mgr = m_engine.getManager( SearchManager.class ); + m_max = allPages.size(); - final SearchManager mgr = m_engine.getManager( SearchManager.class ); - m_max = allPages.size(); + final ProgressItem pi = new ProgressItem() { - final ProgressItem pi = new ProgressItem() { + @Override + public int getProgress() { + return 100 * m_count / m_max; + } + }; + m_engine.getManager( ProgressManager.class ).startProgress( pi, PROGRESS_ID ); - @Override - public int getProgress() { - return 100 * m_count / m_max; + for( final Page page : allPages ) { + mgr.reindexPage( page ); + m_count++; } - }; - m_engine.getManager( ProgressManager.class ).startProgress( pi, PROGRESS_ID ); - for( final Page page : allPages ) { - mgr.reindexPage( page ); - m_count++; + m_engine.getManager( ProgressManager.class ).stopProgress( PROGRESS_ID ); + shutdown(); + m_updater = null; } - m_engine.getManager( ProgressManager.class ).stopProgress( PROGRESS_ID ); - shutdown(); - m_updater = null; - } - - }; + }; - m_updater.start(); + m_updater.start(); + } + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java index 3f42bebf6f..20be5cb527 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java @@ -24,6 +24,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.io.Serializable; import java.security.Principal; import java.util.*; +import java.util.concurrent.locks.ReentrantLock; /** * Abstract superclass that provides a complete implementation of most Step methods; subclasses need only implement {@link #execute(Context)} and @@ -58,6 +59,16 @@ public abstract class AbstractStep implements Step { private boolean m_started; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock = new ReentrantLock(); + /** * Protected constructor that creates a new Step with a specified message key. After construction, the method * {@link #setWorkflow(int, Map)} should be called. @@ -146,8 +157,13 @@ public final String getMessageKey() { * {@inheritDoc} */ @Override - public final synchronized Outcome getOutcome() { - return m_outcome; + public final Outcome getOutcome() { + lock.lock(); + try { + return m_outcome; + } finally { + lock.unlock(); + } } /** @@ -178,35 +194,46 @@ public final boolean isStarted() { * {@inheritDoc} */ @Override - public final synchronized void setOutcome(final Outcome outcome ) { - // Is this an allowed Outcome? - if( !m_successors.containsKey( outcome ) ) { - if( !Outcome.STEP_CONTINUE.equals( outcome ) && !Outcome.STEP_ABORT.equals( outcome ) ) { - throw new IllegalArgumentException( "Outcome " + outcome.getMessageKey() + " is not supported for this Step." ); + public final void setOutcome(final Outcome outcome ) { + lock.lock(); + try { + // Is this an allowed Outcome? + if( !m_successors.containsKey( outcome ) ) { + if( !Outcome.STEP_CONTINUE.equals( outcome ) && !Outcome.STEP_ABORT.equals( outcome ) ) { + throw new IllegalArgumentException( "Outcome " + outcome.getMessageKey() + " is not supported for this Step." ); + } } - } - // Is this a "completion" outcome? - if( outcome.isCompletion() ) { - if( m_completed ) { - throw new IllegalStateException( "Step has already been marked complete; cannot set again." ); + // Is this a "completion" outcome? + if( outcome.isCompletion() ) { + if( m_completed ) { + throw new IllegalStateException( "Step has already been marked complete; cannot set again." ); + } + m_completed = true; + m_end = new Date( System.currentTimeMillis() ); } - m_completed = true; - m_end = new Date( System.currentTimeMillis() ); + m_outcome = outcome; + } finally { + lock.unlock(); } - m_outcome = outcome; + } /** * {@inheritDoc} */ @Override - public final synchronized void start() throws WikiException { - if( m_started ) { - throw new IllegalStateException( "Step already started." ); + public final void start() throws WikiException { + lock.lock(); + try { + if( m_started ) { + throw new IllegalStateException( "Step already started." ); + } + m_started = true; + m_start = new Date( System.currentTimeMillis() ); + } finally { + lock.unlock(); } - m_started = true; - m_start = new Date( System.currentTimeMillis() ); } /** @@ -226,9 +253,14 @@ public final Step getSuccessor(final Outcome outcome ) { * @param workflowContext the parent workflow context to set */ @Override - public final synchronized void setWorkflow(final int workflowId, final Map< String, Serializable > workflowContext ) { - this.workflowId = workflowId; - this.workflowContext = workflowContext; + public final void setWorkflow(final int workflowId, final Map< String, Serializable > workflowContext ) { + lock.lock(); + try { + this.workflowId = workflowId; + this.workflowContext = workflowContext; + } finally { + lock.unlock(); + } } public int getWorkflowId() { @@ -244,8 +276,13 @@ public Map< String, Serializable > getWorkflowContext() { * * @param message the error message */ - protected final synchronized void addError( final String message ) { - m_errors.add( message ); + protected final void addError( final String message ) { + lock.lock(); + try { + m_errors.add( message ); + } finally { + lock.unlock(); + } } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java index 7f833ecc59..c7f6393002 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java @@ -29,6 +29,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.locks.ReentrantLock; /** @@ -62,6 +63,16 @@ public abstract class Decision extends AbstractStep { private final List m_facts; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock = new ReentrantLock(); + /** * Constructs a new Decision for a required "actor" Principal, having a default Outcome. * @@ -186,11 +197,16 @@ public boolean isReassignable() { * * @param actor the actor to reassign the Decision to */ - public final synchronized void reassign( final Principal actor ) { - if( isReassignable() ) { - m_actor = actor; - } else { - throw new IllegalArgumentException( "Decision cannot be reassigned." ); + public final void reassign( final Principal actor ) { + lock.lock(); + try { + if( isReassignable() ) { + m_actor = actor; + } else { + throw new IllegalArgumentException( "Decision cannot be reassigned." ); + } + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java index 83d18a3896..592c3eb533 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java @@ -30,6 +30,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantLock; /** @@ -45,6 +46,16 @@ public class DecisionQueue implements Serializable { private final AtomicInteger next = new AtomicInteger( 1_000 ); + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock = new ReentrantLock(); + /** Constructs a new DecisionQueue. */ public DecisionQueue() { } @@ -54,9 +65,14 @@ public DecisionQueue() { * * @param decision the Decision to add */ - protected synchronized void add( final Decision decision ) { - m_queue.addLast( decision ); - decision.setId( next.getAndIncrement() ); + protected void add( final Decision decision ) { + lock.lock(); + try { + m_queue.addLast( decision ); + decision.setId( next.getAndIncrement() ); + } finally { + lock.unlock(); + } } /** @@ -74,8 +90,13 @@ protected Decision[] decisions() { * * @param decision the decision to remove */ - protected synchronized void remove( final Decision decision ) { - m_queue.remove( decision ); + protected void remove( final Decision decision ) { + lock.lock(); + try { + m_queue.remove( decision ); + } finally { + lock.unlock(); + } } /** @@ -133,14 +154,19 @@ public void decide( final Decision decision, final Outcome outcome, final Contex * @param owner the new owner * @throws WikiException never */ - public synchronized void reassign( final Decision decision, final Principal owner ) throws WikiException { - if( decision.isReassignable() ) { - decision.reassign( owner ); - - WikiEventEmitter.fireWorkflowEvent( decision, WorkflowEvent.DQ_REASSIGN ); - return; + public void reassign( final Decision decision, final Principal owner ) throws WikiException { + lock.lock(); + try { + if( decision.isReassignable() ) { + decision.reassign( owner ); + + WikiEventEmitter.fireWorkflowEvent( decision, WorkflowEvent.DQ_REASSIGN ); + return; + } + throw new IllegalStateException( "Reassignments not allowed for this decision." ); + } finally { + lock.unlock(); } - throw new IllegalStateException( "Reassignments not allowed for this decision." ); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java index 93307699be..45a074eee4 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java @@ -39,6 +39,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.locks.ReentrantLock; /** @@ -62,6 +63,16 @@ public class DefaultWorkflowManager implements WorkflowManager, Serializable { private Engine m_engine; private int retainCompleted; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock = new ReentrantLock(); + /** * Constructs a new WorkflowManager, with an empty workflow cache. */ @@ -70,6 +81,7 @@ public DefaultWorkflowManager() { m_approvers = new ConcurrentHashMap<>(); m_queue = new DecisionQueue(); WikiEventEmitter.attach( this ); + // this.lock = new ReentrantLock(); } /** @@ -140,49 +152,59 @@ public void initialize( final Engine engine, final Properties props ) { * @return the date when the data was last written on disk or {@code 0} if there has been problems reading from disk. */ @SuppressWarnings( "unchecked" ) - synchronized long unserializeFromDisk( final File f ) { - long saved = 0L; - final StopWatch sw = new StopWatch(); - sw.start(); - try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { - final long ver = in.readLong(); - if( ver != serialVersionUID ) { - LOG.warn( "File format has changed; Unable to recover workflows and decision queue from disk." ); - } else { - saved = in.readLong(); - m_workflows = ( Set< Workflow > )in.readObject(); - m_queue = ( DecisionQueue )in.readObject(); - m_completed = new CircularFifoQueue<>( retainCompleted ); - m_completed.addAll( ( Collection< Workflow > )in.readObject() ); - LOG.debug( "Read serialized data successfully in " + sw ); + long unserializeFromDisk( final File f ) { + lock.lock(); + try { + long saved = 0L; + final StopWatch sw = new StopWatch(); + sw.start(); + try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { + final long ver = in.readLong(); + if( ver != serialVersionUID ) { + LOG.warn( "File format has changed; Unable to recover workflows and decision queue from disk." ); + } else { + saved = in.readLong(); + m_workflows = ( Set< Workflow > )in.readObject(); + m_queue = ( DecisionQueue )in.readObject(); + m_completed = new CircularFifoQueue<>( retainCompleted ); + m_completed.addAll( ( Collection< Workflow > )in.readObject() ); + LOG.debug( "Read serialized data successfully in " + sw ); + } + } catch( final IOException | ClassNotFoundException e ) { + LOG.warn( "unable to recover from disk workflows and decision queue: " + e.getMessage() ); } - } catch( final IOException | ClassNotFoundException e ) { - LOG.warn( "unable to recover from disk workflows and decision queue: " + e.getMessage() ); - } - sw.stop(); + sw.stop(); - return saved; + return saved; + } finally { + lock.unlock(); + } } /** * Serializes workflows and decisionqueue to disk. The format is private, don't touch it. */ - synchronized void serializeToDisk( final File f ) { - try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { - final StopWatch sw = new StopWatch(); - sw.start(); - - out.writeLong( serialVersionUID ); - out.writeLong( System.currentTimeMillis() ); // Timestamp - out.writeObject( m_workflows ); - out.writeObject( m_queue ); - out.writeObject( m_completed ); - - sw.stop(); - - LOG.debug( "serialization done - took " + sw ); - } catch( final IOException ioe ) { - LOG.error( "Unable to serialize!", ioe ); + void serializeToDisk( final File f ) { + lock.lock(); + try { + try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { + final StopWatch sw = new StopWatch(); + sw.start(); + + out.writeLong( serialVersionUID ); + out.writeLong( System.currentTimeMillis() ); // Timestamp + out.writeObject( m_workflows ); + out.writeObject( m_queue ); + out.writeObject( m_completed ); + + sw.stop(); + + LOG.debug( "serialization done - took " + sw ); + } catch( final IOException ioe ) { + LOG.error( "Unable to serialize!", ioe ); + } + } finally { + lock.unlock(); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java index 87a68400be..9eed1d90fc 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java @@ -21,6 +21,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.io.Serializable; import java.security.Principal; import java.util.Map; +import java.util.concurrent.locks.ReentrantLock; /** @@ -38,6 +39,15 @@ public abstract class Task extends AbstractStep { private static final long serialVersionUID = 4630293957752430807L; private Step m_successor; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private final ReentrantLock lock = new ReentrantLock(); /** * Public constructor that creates a new Task with a specified message key. After construction, the protected method @@ -81,8 +91,13 @@ public final Principal getActor() { * * @param step the successor */ - public final synchronized void setSuccessor( final Step step ) { - m_successor = step; + public final void setSuccessor( final Step step ) { + lock.lock(); + try { + m_successor = step; + } finally { + lock.unlock(); + } } /** @@ -91,8 +106,13 @@ public final synchronized void setSuccessor( final Step step ) { * * @return the next step */ - public final synchronized Step getSuccessor() { - return m_successor; + public final Step getSuccessor() { + lock.lock(); + try { + return m_successor; + } finally { + lock.unlock(); + } } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java index 4a607e020b..55848038c3 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java @@ -23,11 +23,14 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiEventEmitter; import org.apache.wiki.event.WorkflowEvent; +import java.io.IOException; +import java.io.ObjectInputStream; import java.io.Serializable; import java.security.Principal; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantLock; /** @@ -205,6 +208,16 @@ public class Workflow implements Serializable { private Step m_currentStep; + /** + * A lock used to ensure thread safety when accessing shared resources. + * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread + * waiting to acquire a lock. + * + * @see java.util.concurrent.locks.ReentrantLock + */ + private ReentrantLock lock = new ReentrantLock(); + /** * Constructs a new Workflow object with a supplied message key, owner Principal, and undefined unique identifier {@link #ID_NOT_SET}. * Once instantiated the Workflow is considered to be in the {@link #CREATED} state; a caller must explicitly invoke the @@ -233,25 +246,30 @@ public Workflow( final String messageKey, final Principal owner ) { * to completion, but it cannot be called twice. It finishes by calling the {@link #cleanup()} method to flush retained objects. * If the Workflow had been previously aborted, this method throws an IllegalStateException. */ - public final synchronized void abort( final Context context ) { - // Check corner cases: previous abort or completion - if( m_state == ABORTED ) { - throw new IllegalStateException( "The workflow has already been aborted." ); - } - if( m_state == COMPLETED ) { - throw new IllegalStateException( "The workflow has already completed." ); - } + public final void abort( final Context context ) { + lock.lock(); + try { + // Check corner cases: previous abort or completion + if( m_state == ABORTED ) { + throw new IllegalStateException( "The workflow has already been aborted." ); + } + if( m_state == COMPLETED ) { + throw new IllegalStateException( "The workflow has already completed." ); + } - if( m_currentStep != null ) { - if( m_currentStep instanceof Decision ) { - WikiEventEmitter.fireWorkflowEvent( m_currentStep, WorkflowEvent.DQ_REMOVAL, context ); + if( m_currentStep != null ) { + if( m_currentStep instanceof Decision ) { + WikiEventEmitter.fireWorkflowEvent( m_currentStep, WorkflowEvent.DQ_REMOVAL, context ); + } + m_currentStep.setOutcome( Outcome.STEP_ABORT ); + m_history.addLast( m_currentStep ); } - m_currentStep.setOutcome( Outcome.STEP_ABORT ); - m_history.addLast( m_currentStep ); + m_state = ABORTED; + WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.ABORTED ); + cleanup(); + } finally { + lock.unlock(); } - m_state = ABORTED; - WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.ABORTED ); - cleanup(); } /** @@ -275,11 +293,16 @@ public final void addMessageArgument( final Serializable obj ) { * * @return the current actor */ - public final synchronized Principal getCurrentActor() { - if( m_currentStep == null ) { - return null; + public final Principal getCurrentActor() { + lock.lock(); + try { + if( m_currentStep == null ) { + return null; + } + return m_currentStep.getActor(); + } finally { + lock.unlock(); } - return m_currentStep.getActor(); } /** @@ -343,9 +366,14 @@ public final Date getEndTime() { * * @return the unique identifier */ - public final synchronized int getId() + public final int getId() { - return m_id; + lock.lock(); + try { + return m_id; + } finally { + lock.unlock(); + } } /** @@ -434,9 +462,14 @@ public final boolean isAborted() * * @return true if the workflow has been started but has no more steps to perform; false if not. */ - public final synchronized boolean isCompleted() { - // If current step is null, then we're done - return m_started && m_state == COMPLETED; + public final boolean isCompleted() { + lock.lock(); + try { + // If current step is null, then we're done + return m_started && m_state == COMPLETED; + } finally { + lock.unlock(); + } } /** @@ -468,20 +501,25 @@ public final Step getPreviousStep() * @param context current wiki context * @throws WikiException if the current task's {@link Task#execute( Context )} method throws an exception */ - public final synchronized void restart( final Context context ) throws WikiException { - if( m_state != WAITING ) { - throw new IllegalStateException( "Workflow is not paused; cannot restart." ); - } - WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED ); - m_state = RUNNING; - WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING ); - - // Process current step + public final void restart( final Context context ) throws WikiException { + lock.lock(); try { - processCurrentStep( context ); - } catch( final WikiException e ) { - abort( context ); - throw e; + if( m_state != WAITING ) { + throw new IllegalStateException( "Workflow is not paused; cannot restart." ); + } + WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED ); + m_state = RUNNING; + WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING ); + + // Process current step + try { + processCurrentStep( context ); + } catch( final WikiException e ) { + abort( context ); + throw e; + } + } finally { + lock.unlock(); } } @@ -505,9 +543,14 @@ public final void setAttribute( final String attr, final Serializable obj ) { * * @param step the first step for the workflow */ - public final synchronized void setFirstStep( final Step step ) + public final void setFirstStep( final Step step ) { - m_firstStep = step; + lock.lock(); + try { + m_firstStep = step; + } finally { + lock.unlock(); + } } /** @@ -515,9 +558,14 @@ public final synchronized void setFirstStep( final Step step ) * * @param id the unique identifier */ - public final synchronized void setId( final int id ) + public final void setId( final int id ) { - this.m_id = id; + lock.lock(); + try { + this.m_id = id; + } finally { + lock.unlock(); + } } /** @@ -528,28 +576,33 @@ public final synchronized void setId( final int id ) * @param context current wiki context. * @throws WikiException if the current Step's {@link Step#start()} method throws an exception of any kind */ - public final synchronized void start( final Context context ) throws WikiException { - if( m_state == ABORTED ) { - throw new IllegalStateException( "Workflow cannot be started; it has already been aborted." ); - } - if( m_started ) { - throw new IllegalStateException( "Workflow has already started." ); - } - WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED ); - m_started = true; - m_state = RUNNING; - - WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING ); - // Mark the first step as the current one & add to history - m_currentStep = m_firstStep; - m_history.add( m_currentStep ); - - // Process current step + public final void start( final Context context ) throws WikiException { + lock.lock(); try { - processCurrentStep( context ); - } catch( final WikiException e ) { - abort( context ); - throw e; + if( m_state == ABORTED ) { + throw new IllegalStateException( "Workflow cannot be started; it has already been aborted." ); + } + if( m_started ) { + throw new IllegalStateException( "Workflow has already started." ); + } + WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED ); + m_started = true; + m_state = RUNNING; + + WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING ); + // Mark the first step as the current one & add to history + m_currentStep = m_firstStep; + m_history.add( m_currentStep ); + + // Process current step + try { + processCurrentStep( context ); + } catch( final WikiException e ) { + abort( context ); + throw e; + } + } finally { + lock.unlock(); } } @@ -557,12 +610,17 @@ public final synchronized void start( final Context context ) throws WikiExcepti * Sets the Workflow in the {@link #WAITING} state. If the Workflow is not running or has already been paused, this method throws an * IllegalStateException. Once paused, the Workflow can be un-paused by executing the {@link #restart(Context)} method. */ - public final synchronized void waitstate() { - if ( m_state != RUNNING ) { - throw new IllegalStateException( "Workflow is not running; cannot pause." ); + public final void waitstate() { + lock.lock(); + try { + if ( m_state != RUNNING ) { + throw new IllegalStateException( "Workflow is not running; cannot pause." ); + } + m_state = WAITING; + WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.WAITING ); + } finally { + lock.unlock(); } - m_state = WAITING; - WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.WAITING ); } /** @@ -577,11 +635,16 @@ protected void cleanup() { * Protected helper method that changes the Workflow's state to {@link #COMPLETED} and sets the current Step to null. It * calls the {@link #cleanup()} method to flush retained objects. This method will no-op if it has previously been called. */ - protected final synchronized void complete() { - if( !isCompleted() ) { - m_state = COMPLETED; - WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.COMPLETED ); - cleanup(); + protected final void complete() { + lock.lock(); + try { + if( !isCompleted() ) { + m_state = COMPLETED; + WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.COMPLETED ); + cleanup(); + } + } finally { + lock.unlock(); } } @@ -638,5 +701,4 @@ protected final void processCurrentStep( final Context context ) throws WikiExce } } - } diff --git a/jspwiki-main/src/test/resources/wkflmgr.ser b/jspwiki-main/src/test/resources/wkflmgr.ser index 4a69d74d0cd3f9790448f1b61b021f1f21b44920..82947dccae57294cdaed38d6a6c859fe0bfefcf1 100644 GIT binary patch delta 982 zcmZ`&&1(}u6n~Rs(XOdh{Y*L_vELM2i1`9ts}XKS6M2V$DZ&4{yGH@BQBUy|?evA2X@fd!V!q zLFWqspl54nTXh|nVB6^Qo9Pb$q_SUuOekOxvr1unV=Kul-w||`r4)>^?6LAfRow=x z!mtSPHW}PPCzN>z*m~t&5u}@Voh-tTts5pKjyPUsdqZ1* zT@Us=G~Uh^L9z8JUJ+ioB7|+7G^t5!EH3WKs4g`eDvnD`Ca9R|musMiSIh0k3`dzVX1&nzUkR>`eF_%CR5eW2nS@kquO3n~^6s zef^cGJ)|O!tS77?RXso;o|YwRnN6$dw$;5L*KbIlXe-25(H5TsRuP57pRzTV;=0BK zs9Tut{=OgXdpDMdL+c)h*oDCoyG}eP`>LiA1pxogK^WUruf@6KL0hLQb*XobU5TdK z9bwBN)mA`QPr6d{Z$U*4B~cM-?Cf6n+yVD`3&2BR9?3WdumiH3<%~WF7seE3^?#+Z z@6n5+>k{Wti0%)9#s^ArmD#!2NPLY#aG_Ph&V)cdWw&CJ%1b^7r{rwDiX9#G_cxr6 za-)fxQj<&UORR9bcY;KaO{om(F!NAj{C`TY^TP{w^2AxzjJjc#v1p@BYA#*|ZF2u2Le4f@?jcd`f3#Z~FnwgK8Vztc7h{qb{Fh2<>;`HMZn^f$Xg{`X&vAEAIb{LGHMT-?!E5r1nnT+v-NkFWJ7uk6AOo6@d;=G)yH77Y2e<4P|2 zumxxEoUx=YBm$1$7wrH(H72v?Z;Tn-HI6$A7U;gc>7_(%S}^R}Jd!T=JpxvhR45%G z>1bPE`gRz(t`~+}goM>CsKi`sc5d?c67NYbQPg@80Lv<1cyIUvYvOWonmO1jK4hIo zr8~3Cg3>-E5}HrQuBoaSx!0Axi=Rr>!|VTmjSCkQ$Bd0^r#1~JVh4+6jdk#ZSv6Y0 z9%P%-dJACiCq6dMSF-vFzAe*071zXXX4UcYC?pwcWD^B&rm|PQLy>3*r#|MF?mjjp zrdZMcxf{qwCdVV9T_Y0*3DDlcXCtRC-&S}c!Tj69li4hk5(Qx@B_^S;oUS arg0 ) + public void putAll(final Map< ? , ? > arg0 ) { - // Shove all of the entries into the property string - for (final Entry value : arg0.entrySet()) { - @SuppressWarnings("unchecked") final Entry entry = (Entry) value; - writeProperty(entry.getKey(), entry.getValue()); - } + lock.lock(); + try { + // Shove all of the entries into the property string + for (final Entry value : arg0.entrySet()) { + @SuppressWarnings("unchecked") final Entry entry = (Entry) value; + writeProperty(entry.getKey(), entry.getValue()); + } - // Call the superclass method - super.putAll(arg0); + // Call the superclass method + super.putAll(arg0); + } finally { + lock.unlock(); + } } /** * {@inheritDoc} */ @Override - public synchronized Object remove(final Object key ) + public Object remove(final Object key ) { - // Remove from the property string - deleteProperty( key ); + lock.lock(); + try { + // Remove from the property string + deleteProperty( key ); - // Call the superclass method - return super.remove(key); + // Call the superclass method + return super.remove(key); + } finally { + lock.unlock(); + } } /** * {@inheritDoc} */ @Override - public synchronized String toString() + public String toString() { - return m_propertyString; + lock.lock(); + try { + return m_propertyString; + } finally { + lock.unlock(); + } + } private void deleteProperty(final Object arg0 ) From 3a83d94e69bddad50f47ca12d38203358779dba3 Mon Sep 17 00:00:00 2001 From: Arturo Bernal Date: Tue, 10 Oct 2023 09:04:23 +0200 Subject: [PATCH 02/10] Use custom Synchronizer class to manage ReentrantLock synchronization --- ChangeLog.md | 7 + .../java/org/apache/wiki/api/Release.java | 2 +- jspwiki-event/pom.xml | 6 + .../apache/wiki/event/WikiEventManager.java | 161 +++++------ .../search/kendra/KendraSearchProvider.java | 19 +- .../main/java/org/apache/wiki/WatchDog.java | 64 ++--- .../main/java/org/apache/wiki/WikiEngine.java | 49 ++-- .../auth/DefaultAuthenticationManager.java | 15 +- .../auth/DefaultAuthorizationManager.java | 15 +- .../apache/wiki/auth/DefaultUserManager.java | 15 +- .../org/apache/wiki/auth/SessionMonitor.java | 58 ++-- .../apache/wiki/auth/acl/AclEntryImpl.java | 31 +-- .../org/apache/wiki/auth/acl/AclImpl.java | 15 +- .../auth/authorize/DefaultGroupManager.java | 66 ++--- .../org/apache/wiki/auth/authorize/Group.java | 94 ++----- .../CookieAuthenticationLoginModule.java | 8 +- .../auth/permissions/PermissionFactory.java | 57 ++-- .../wiki/auth/user/XMLUserDatabase.java | 102 +++---- .../wiki/diff/ContextualDiffProvider.java | 8 +- .../apache/wiki/filters/PageEventFilter.java | 15 +- .../org/apache/wiki/filters/SpamFilter.java | 15 +- .../apache/wiki/plugin/BugReportHandler.java | 9 +- .../apache/wiki/plugin/PageViewPlugin.java | 252 +++++++++--------- .../providers/CachingAttachmentProvider.java | 15 +- .../wiki/providers/CachingProvider.java | 43 +-- .../providers/VersioningFileProvider.java | 41 +-- .../references/DefaultReferenceManager.java | 174 ++++++------ .../apache/wiki/rss/DefaultRSSGenerator.java | 15 +- .../wiki/search/LuceneSearchProvider.java | 29 +- .../ui/admin/beans/SearchManagerBean.java | 8 +- .../apache/wiki/workflow/AbstractStep.java | 36 +-- .../org/apache/wiki/workflow/Decision.java | 8 +- .../apache/wiki/workflow/DecisionQueue.java | 22 +- .../wiki/workflow/DefaultWorkflowManager.java | 16 +- .../java/org/apache/wiki/workflow/Task.java | 16 +- .../org/apache/wiki/workflow/Workflow.java | 71 ++--- .../apache/wiki/util/CommentedProperties.java | 56 +--- .../org/apache/wiki/util/Synchronizer.java | 150 +++++++++++ pom.xml | 7 + 39 files changed, 826 insertions(+), 964 deletions(-) create mode 100644 jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java diff --git a/ChangeLog.md b/ChangeLog.md index eed4cd10ef..f96febf25d 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -17,6 +17,13 @@ specific language governing permissions and limitations under the License. --> +**2023-12-06 Arturo Bernal (abernal AT apache DOT org)** + +* _2.12.2-git-12_ +* +* [JSPWIKI-1178](https://issues.apache.org/jira/browse/JSPWIKI-1178) - Deadlock with Java Virtual Threads + + **2023-12-04 Juan Pablo Santos (juanpablo AT apache DOT org)** * _2.12.2-git-11_ diff --git a/jspwiki-api/src/main/java/org/apache/wiki/api/Release.java b/jspwiki-api/src/main/java/org/apache/wiki/api/Release.java index b3699e53a1..1c6bef6af2 100644 --- a/jspwiki-api/src/main/java/org/apache/wiki/api/Release.java +++ b/jspwiki-api/src/main/java/org/apache/wiki/api/Release.java @@ -69,7 +69,7 @@ public final class Release { *

* If the build identifier is empty, it is not added. */ - public static final String BUILD = "11"; + public static final String BUILD = "12"; /** * This is the generic version string you should use when printing out the version. It is of diff --git a/jspwiki-event/pom.xml b/jspwiki-event/pom.xml index 2066adbfa0..e819a97b2c 100644 --- a/jspwiki-event/pom.xml +++ b/jspwiki-event/pom.xml @@ -62,5 +62,11 @@ junit-jupiter-engine test + + + ${project.groupId} + jspwiki-util + ${project.version} + diff --git a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java index 0aedc2659a..87b6f05eff 100644 --- a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java +++ b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java @@ -21,6 +21,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.wiki.util.Synchronizer; import java.lang.ref.WeakReference; import java.util.ArrayList; @@ -33,6 +34,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Set; import java.util.TreeSet; import java.util.Vector; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; /** @@ -150,7 +152,18 @@ public final class WikiEventManager { * * @see java.util.concurrent.locks.ReentrantLock */ - private static final ReentrantLock lock = new ReentrantLock(); + private static final ReentrantLock instanceLock = new ReentrantLock(); + private static final ReentrantLock removeWikiEventListenerLock = new ReentrantLock(); + private static final ReentrantLock delegatesLockLock = new ReentrantLock(); + private static final ReentrantLock preloadCacheLock = new ReentrantLock(); + private static final ReentrantLock delegateForLock = new ReentrantLock(); + private static final ReentrantLock wikiEventListenersLock = new ReentrantLock(); + private static final ReentrantLock wikiEventListenerLock = new ReentrantLock(); + private static final ReentrantLock removeWikiEventListenerLock2 = new ReentrantLock(); + private static final ReentrantLock isListeningLock = new ReentrantLock(); + private static final ReentrantLock fireEventLock = new ReentrantLock(); + + /** Constructor for a WikiEventManager. */ @@ -167,15 +180,12 @@ private WikiEventManager() { */ public static WikiEventManager getInstance() { if (c_instance == null) { - lock.lock(); - try { + Synchronizer.synchronize(instanceLock, () -> { if (c_instance == null) { c_instance = new WikiEventManager(); // start up any post-instantiation services here } - } finally { - lock.unlock(); - } + }); } return c_instance; } @@ -255,12 +265,11 @@ public static Set getWikiEventListeners( final Object client * @return true if the listener was found and removed. */ public static boolean removeWikiEventListener( final WikiEventListener listener ) { - boolean removed = false; + final AtomicBoolean removed = new AtomicBoolean(false); // get the Map.entry object for the entire Map, then check match on entry (listener) final WikiEventManager mgr = getInstance(); final Map< Object, WikiEventDelegate > sources = Collections.synchronizedMap( mgr.getDelegates() ); - lock.lock(); - try { + Synchronizer.synchronize(removeWikiEventListenerLock, () -> { // get an iterator over the Map.Enty objects in the map for( final Map.Entry< Object, WikiEventDelegate > entry : sources.entrySet() ) { // the entry value is the delegate @@ -268,28 +277,16 @@ public static boolean removeWikiEventListener( final WikiEventListener listener // now see if we can remove the listener from the delegate (delegate may be null because this is a weak reference) if( delegate != null && delegate.removeWikiEventListener( listener ) ) { - removed = true; // was removed + removed.set(true); // was removed } } - } finally { - lock.unlock(); - } - return removed; + }); + return removed.get(); } private void removeDelegates() { - lock.lock(); - try { - m_delegates.clear(); - } finally { - lock.unlock(); - } - lock.lock(); - try { - m_preloadCache.clear(); - } finally { - lock.unlock(); - } + Synchronizer.synchronize(delegatesLockLock, m_delegates::clear); + Synchronizer.synchronize(preloadCacheLock, m_preloadCache::clear); } public static void shutdown() { @@ -340,38 +337,35 @@ private Map< Object, WikiEventDelegate > getDelegates() { * @param client the client Object, or alternately a Class reference * @return the WikiEventDelegate. */ - private WikiEventDelegate getDelegateFor( final Object client ) { - lock.lock(); - try { - if( client == null || client instanceof Class ) { // then preload the cache - final WikiEventDelegate delegate = new WikiEventDelegate( client ); - m_preloadCache.add( delegate ); - m_delegates.put( client, delegate ); + private WikiEventDelegate getDelegateFor(final Object client) { + return Synchronizer.synchronize(delegateForLock, () -> { + if (client == null || client instanceof Class) { // then preload the cache + final WikiEventDelegate delegate = new WikiEventDelegate(client); + m_preloadCache.add(delegate); + m_delegates.put(client, delegate); return delegate; - } else if( !m_preloadCache.isEmpty() ) { + } else if (!m_preloadCache.isEmpty()) { // then see if any of the cached delegates match the class of the incoming client - for( int i = m_preloadCache.size()-1 ; i >= 0 ; i-- ) { // start with most-recently added - final WikiEventDelegate delegate = m_preloadCache.elementAt( i ); - if( delegate.getClientClass() == null || delegate.getClientClass().equals( client.getClass() ) ) { + for (int i = m_preloadCache.size() - 1; i >= 0; i--) { // start with most-recently added + final WikiEventDelegate delegate = m_preloadCache.elementAt(i); + if (delegate.getClientClass() == null || delegate.getClientClass().equals(client.getClass())) { // we have a hit, so use it, but only on a client we haven't seen before - if( !m_delegates.containsKey( client ) ) { - m_preloadCache.remove( delegate ); - m_delegates.put( client, delegate ); + if (!m_delegates.containsKey(client)) { + m_preloadCache.remove(delegate); + m_delegates.put(client, delegate); return delegate; } } } } // otherwise treat normally... - WikiEventDelegate delegate = m_delegates.get( client ); - if( delegate == null ) { - delegate = new WikiEventDelegate( client ); - m_delegates.put( client, delegate ); + WikiEventDelegate delegate = m_delegates.get(client); + if (delegate == null) { + delegate = new WikiEventDelegate(client); + m_delegates.put(client, delegate); } return delegate; - } finally { - lock.unlock(); - } + }); } @@ -415,8 +409,7 @@ private static final class WikiEventDelegate { * @throws java.lang.UnsupportedOperationException if any attempt is made to modify the Set */ public Set< WikiEventListener > getWikiEventListeners() { - lock.lock(); - try { + return Synchronizer.synchronize(wikiEventListenersLock, () -> { final TreeSet< WikiEventListener > set = new TreeSet<>( new WikiEventListenerComparator() ); for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) { final WikiEventListener l = wikiEventListenerWeakReference.get(); @@ -426,9 +419,7 @@ public Set< WikiEventListener > getWikiEventListeners() { } return Collections.unmodifiableSet( set ); - } finally { - lock.unlock(); - } + }); } /** @@ -438,18 +429,15 @@ public Set< WikiEventListener > getWikiEventListeners() { * @return true if the listener was added (i.e., it was not already in the list and was added) */ public boolean addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + return Synchronizer.synchronize(wikiEventListenerLock, () -> { final boolean listenerAlreadyContained = m_listenerList.stream() .map( WeakReference::get ) .anyMatch( ref -> ref == listener ); if( !listenerAlreadyContained ) { return m_listenerList.add( new WeakReference<>( listener ) ); } - } finally { - lock.unlock(); - } - return false; + return false; + }); } /** @@ -459,67 +447,54 @@ public boolean addWikiEventListener( final WikiEventListener listener ) { * @return true if the listener was removed (i.e., it was actually in the list and was removed) */ public boolean removeWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { - for( final Iterator< WeakReference< WikiEventListener > > i = m_listenerList.iterator(); i.hasNext(); ) { + return Synchronizer.synchronize(removeWikiEventListenerLock2, () -> { + for (final Iterator> i = m_listenerList.iterator(); i.hasNext(); ) { final WikiEventListener l = i.next().get(); - if( l == listener ) { + if (l == listener) { i.remove(); return true; } } - } finally { - lock.unlock(); - } - - return false; + return false; + }); } /** * Returns true if there are one or more listeners registered with this instance. */ public boolean isListening() { - lock.lock(); - try { - return !m_listenerList.isEmpty(); - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(isListeningLock, () -> !m_listenerList.isEmpty()); } /** * Notify all listeners having a registered interest in change events of the supplied WikiEvent. */ - public void fireEvent( final WikiEvent event ) { - boolean needsCleanup = false; + public void fireEvent(final WikiEvent event) { + final AtomicBoolean needsCleanup = new AtomicBoolean(false); try { - lock.lock(); - try { - for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) { + Synchronizer.synchronize(fireEventLock, () -> { + for (final WeakReference wikiEventListenerWeakReference : m_listenerList) { final WikiEventListener listener = wikiEventListenerWeakReference.get(); - if( listener != null ) { - listener.actionPerformed( event ); + if (listener != null) { + listener.actionPerformed(event); } else { - needsCleanup = true; + needsCleanup.set(true); } } - // Remove all such listeners which have expired - if( needsCleanup ) { - for( int i = 0; i < m_listenerList.size(); i++ ) { - final WeakReference< WikiEventListener > w = m_listenerList.get( i ); - if( w.get() == null ) { + // Remove all such listeners which have expired + if (needsCleanup.get()) { + for (int i = 0; i < m_listenerList.size(); i++) { + final WeakReference w = m_listenerList.get(i); + if (w.get() == null) { m_listenerList.remove(i--); } } } - - } finally { - lock.unlock(); - } - } catch( final ConcurrentModificationException e ) { - // We don't die, we just don't do notifications in that case. - LOG.info( "Concurrent modification of event list; please report this.", e ); + }); + } catch (final ConcurrentModificationException e) { + // We don't die, we just don't do notifications in that case. + LOG.info("Concurrent modification of event list; please report this.", e); } } } diff --git a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java index 8bc11fe50b..bb4c643f60 100644 --- a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java +++ b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java @@ -46,6 +46,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.auth.permissions.PagePermission; import org.apache.wiki.pages.PageManager; import org.apache.wiki.search.SearchProvider; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.IOException; @@ -157,7 +158,7 @@ public void pageRemoved( final Page page ) { final BatchDeleteDocumentRequest request = new BatchDeleteDocumentRequest().withIndexId( indexId ) .withDocumentIdList( pageName ); final BatchDeleteDocumentResult result = getKendra().batchDeleteDocument( request ); - if (result.getFailedDocuments().isEmpty()) { + if ( result.getFailedDocuments().size() == 0 ) { LOG.debug( format( "Page '%s' was removed from index", pageName ) ); } else { LOG.error( format( "Failed to remove Page '%s' from index", pageName ) ); @@ -346,23 +347,21 @@ private void doFullReindex() throws IOException { * index pages that have been modified */ private void doPartialReindex() { - if ( updates.isEmpty() ) { + if (updates.isEmpty()) { return; } - LOG.debug( "Indexing updated pages. Please wait ..." ); + LOG.debug("Indexing updated pages. Please wait ..."); final String executionId = startExecution(); - lock.lock(); - try { + + Synchronizer.synchronize(lock, () -> { try { while (!updates.isEmpty()) { - indexOnePage( updates.remove( 0 ), executionId ); + indexOnePage(updates.remove(0), executionId); } } finally { stopExecution(); } - } finally { - lock.unlock(); - } + }); } /** @@ -399,7 +398,7 @@ private void indexOnePage( final Page page, final String executionId ) { final BatchPutDocumentRequest request = new BatchPutDocumentRequest().withIndexId( indexId ) .withDocuments( document ); final BatchPutDocumentResult result = getKendra().batchPutDocument( request ); - if (result.getFailedDocuments().isEmpty()) { + if ( result.getFailedDocuments().size() == 0 ) { LOG.info( format( "Successfully indexed Page '%s' as %s", page.getName(), document.getContentType() ) ); } else { for ( final BatchPutDocumentResponseFailedDocument failedDocument : result.getFailedDocuments() ) { diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java index 820ece30a4..f33fdd6fb4 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java @@ -21,6 +21,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.wiki.api.core.Engine; +import org.apache.wiki.util.Synchronizer; import java.lang.ref.WeakReference; import java.util.Iterator; @@ -66,7 +67,13 @@ public final class WatchDog { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock watchDogLock = new ReentrantLock(); + private final ReentrantLock enableLock = new ReentrantLock(); + private final ReentrantLock disableLock = new ReentrantLock(); + private final ReentrantLock enterStateLock = new ReentrantLock(); + private final ReentrantLock exitStateLock = new ReentrantLock(); + private final ReentrantLock checkLock = new ReentrantLock(); + private final ReentrantLock toStringLock = new ReentrantLock(); /** * Returns the current watchdog for the current thread. This is the preferred method of getting you a Watchdog, since it @@ -103,17 +110,12 @@ public WatchDog( final Engine engine, final Watchable watch ) { m_engine = engine; m_watchable = watch; - lock = new ReentrantLock(); - - lock.lock(); - try { - if( c_watcherThread == null ) { - c_watcherThread = new WatchDogThread( engine ); + Synchronizer.synchronize(watchDogLock, () -> { + if (c_watcherThread == null) { + c_watcherThread = new WatchDogThread(engine); c_watcherThread.start(); } - } finally { - lock.unlock(); - } + }); } /** @@ -152,32 +154,26 @@ private static void scrub() { * Can be used to enable the WatchDog. Will cause a new Thread to be created, if none was existing previously. */ public void enable() { - lock.lock(); - try { + Synchronizer.synchronize(enableLock, () -> { if( !m_enabled ) { m_enabled = true; c_watcherThread = new WatchDogThread( m_engine ); c_watcherThread.start(); } - } finally { - lock.unlock(); - } + }); } /** * Is used to disable a WatchDog. The watchdog thread is shut down and resources released. */ public void disable() { - lock.lock(); - try { + Synchronizer.synchronize(disableLock, () -> { if( m_enabled ) { m_enabled = false; c_watcherThread.shutdown(); c_watcherThread = null; } - } finally { - lock.unlock(); - } + }); } /** @@ -208,13 +204,10 @@ public void enterState( final String state ) { */ public void enterState( final String state, final int expectedCompletionTime ) { LOG.debug( "{}: Entering state {}, expected completion in {} s", m_watchable.getName(), state, expectedCompletionTime ); - lock.lock(); - try { + Synchronizer.synchronize(enterStateLock, () -> { final State st = new State( state, expectedCompletionTime ); m_stateStack.push( st ); - } finally { - lock.unlock(); - } + }); } /** @@ -233,8 +226,7 @@ public void exitState() { */ public void exitState( final String state ) { if( !m_stateStack.empty() ) { - lock.lock(); - try { + Synchronizer.synchronize(exitStateLock, () -> { final State st = m_stateStack.peek(); if( state == null || st.getState().equals( state ) ) { m_stateStack.pop(); @@ -244,9 +236,7 @@ public void exitState( final String state ) { // FIXME: should actually go and fix things for that LOG.error( "exitState() called before enterState()" ); } - } finally { - lock.unlock(); - } + }); } else { LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" ); } @@ -272,8 +262,7 @@ public boolean isWatchableAlive() { private void check() { LOG.debug( "Checking watchdog '{}'", m_watchable.getName() ); - lock.lock(); - try { + Synchronizer.synchronize(checkLock, () -> { if( !m_stateStack.empty() ) { final State st = m_stateStack.peek(); final long now = System.currentTimeMillis(); @@ -289,9 +278,7 @@ private void check() { } else { LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" ); } - } finally { - lock.unlock(); - } + }); } /** @@ -332,8 +319,7 @@ private void dumpStackTraceForWatchable() { */ @Override public String toString() { - lock.lock(); - try { + return Synchronizer.synchronize(toStringLock, () -> { String state = "Idle"; if( !m_stateStack.empty() ) { @@ -341,9 +327,7 @@ public String toString() { state = st.getState(); } return "WatchDog state=" + state; - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java index 9895796b16..1c8bd1fdc0 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java @@ -57,6 +57,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.url.URLConstructor; import org.apache.wiki.util.ClassUtil; import org.apache.wiki.util.PropertyReader; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import org.apache.wiki.variables.VariableManager; import org.apache.wiki.workflow.WorkflowManager; @@ -81,6 +82,7 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.TimeZone; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; @@ -158,12 +160,7 @@ public class WikiEngine implements Engine { * @throws InternalWikiException in case something fails. This is a RuntimeException, so be prepared for it. */ public static WikiEngine getInstance( final ServletConfig config ) throws InternalWikiException { - lock.lock(); - try { - return getInstance( config.getServletContext(), null ); - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> getInstance(config.getServletContext(), null)); } /** @@ -177,12 +174,7 @@ public static WikiEngine getInstance( final ServletConfig config ) throws Intern * @return One well-behaving WikiEngine instance. */ public static WikiEngine getInstance( final ServletConfig config, final Properties props ) { - lock.lock(); - try { - return getInstance( config.getServletContext(), props ); - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> getInstance( config.getServletContext(), props )); } /** @@ -194,22 +186,21 @@ public static WikiEngine getInstance( final ServletConfig config, final Properti * @throws InternalWikiException If the WikiEngine instantiation fails. */ public static WikiEngine getInstance(final ServletContext context, Properties props) throws InternalWikiException { - lock.lock(); - WikiEngine engine; - try { - engine = (WikiEngine) context.getAttribute(ATTR_WIKIENGINE); + final AtomicReference propsRef = new AtomicReference<>(props); + return Synchronizer.synchronize(lock, () -> { + WikiEngine engine = (WikiEngine) context.getAttribute(ATTR_WIKIENGINE); if (engine == null) { final String appid = Integer.toString(context.hashCode()); context.log(" Assigning new engine to " + appid); try { - if (props == null) { - props = PropertyReader.loadWebAppProps(context); + if (propsRef.get() == null) { + propsRef.set(PropertyReader.loadWebAppProps(context)); } engine = new WikiEngine(context, appid); try { // Note: May be null, if JSPWiki has been deployed in a WAR file. - engine.start(props); + engine.start(propsRef.get()); LOG.info("Root path for this Wiki is: '{}'", engine.getRootPath()); } catch (final Exception e) { final String msg = Release.APPNAME + ": Unable to load and setup properties from jspwiki.properties. " + e.getMessage(); @@ -224,10 +215,8 @@ public static WikiEngine getInstance(final ServletContext context, Properties pr throw new InternalWikiException("No wiki engine, check logs.", e); } } - } finally { - lock.unlock(); - } - return engine; + return engine; + }); } /** @@ -971,12 +960,9 @@ public InternationalizationManager getInternationalizationManager() { /** {@inheritDoc} */ @Override public final void addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** @@ -984,12 +970,9 @@ public final void addWikiEventListener( final WikiEventListener listener ) { */ @Override public final void removeWikiEventListener(final WikiEventListener listener) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.removeWikiEventListener(this, listener); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java index 79d5682644..4fe3392def 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java @@ -37,6 +37,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiEventManager; import org.apache.wiki.event.WikiSecurityEvent; import org.apache.wiki.util.ClassUtil; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import org.apache.wiki.util.TimedCounterList; @@ -369,12 +370,9 @@ public Set< Principal > doJAASLogin( final Class< ? extends LoginModule > clazz, */ @Override public void addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** @@ -382,12 +380,9 @@ public void addWikiEventListener( final WikiEventListener listener ) { */ @Override public void removeWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.removeWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java index f6747cbb18..79a72ce8a0 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java @@ -44,6 +44,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.pages.PageManager; import org.apache.wiki.preferences.Preferences; import org.apache.wiki.util.ClassUtil; +import org.apache.wiki.util.Synchronizer; import org.freshcookies.security.policy.LocalPolicy; import javax.servlet.http.HttpServletResponse; @@ -386,23 +387,17 @@ public Principal resolvePrincipal( final String name ) { /** {@inheritDoc} */ @Override public void addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** {@inheritDoc} */ @Override public void removeWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.removeWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java index 32dde21a35..74c8ded51c 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java @@ -47,6 +47,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.tasks.TasksManager; import org.apache.wiki.ui.InputValidator; import org.apache.wiki.util.ClassUtil; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import org.apache.wiki.workflow.Decision; import org.apache.wiki.workflow.DecisionRequiredException; @@ -423,12 +424,9 @@ public Principal[] listWikiNames() throws WikiSecurityException { * @param listener the event listener */ @Override public void addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** @@ -437,12 +435,9 @@ public Principal[] listWikiNames() throws WikiSecurityException { * @param listener the event listener */ @Override public void removeWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.removeWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java index 638e246ae1..0b5c45709a 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java @@ -27,6 +27,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiEventListener; import org.apache.wiki.event.WikiEventManager; import org.apache.wiki.event.WikiSecurityEvent; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.comparators.PrincipalComparator; import javax.servlet.http.HttpServletRequest; @@ -70,7 +71,12 @@ public class SessionMonitor implements HttpSessionListener { * * @see ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock createGuestSessionForLock; + private final ReentrantLock removeLock; + private final ReentrantLock userPrincipalsLock; + private final ReentrantLock addWikiEventListenerLock; + private final ReentrantLock removeWikiEventListenerLock; + /** * Returns the instance of the SessionMonitor for this wiki. Only one SessionMonitor exists per Engine. @@ -93,7 +99,11 @@ public static SessionMonitor getInstance( final Engine engine ) { /** Construct the SessionListener */ public SessionMonitor() { - lock = new ReentrantLock(); + createGuestSessionForLock = new ReentrantLock(); + removeLock = new ReentrantLock(); + userPrincipalsLock = new ReentrantLock(); + addWikiEventListenerLock = new ReentrantLock(); + removeWikiEventListenerLock = new ReentrantLock(); } private SessionMonitor( final Engine engine ) { @@ -185,12 +195,9 @@ public final Session find( final String sessionId ) { private Session createGuestSessionFor( final String sessionId ) { LOG.debug( "Session for session ID={}... not found. Creating guestSession()", sessionId ); final Session wikiSession = Wiki.session().guest( m_engine ); - lock.lock(); - try { - m_sessions.put( sessionId, wikiSession ); - } finally { - lock.unlock(); - } + Synchronizer.synchronize(createGuestSessionForLock, () -> { + m_sessions.put(sessionId, wikiSession); + }); return wikiSession; } @@ -215,12 +222,9 @@ public final void remove( final HttpSession session ) { if( session == null ) { throw new IllegalArgumentException( "Session cannot be null." ); } - lock.lock(); - try { + Synchronizer.synchronize(removeLock, () -> { m_sessions.remove( session.getId() ); - } finally { - lock.unlock(); - } + }); } /** @@ -242,15 +246,11 @@ public final int sessions() * @return the array of user principals */ public final Principal[] userPrincipals() { - final Collection principals; - lock.lock(); - try { - principals = m_sessions.values().stream().map(Session::getUserPrincipal).collect(Collectors.toList()); - } finally { - lock.unlock(); - } - final Principal[] p = principals.toArray( new Principal[0] ); - Arrays.sort( p, m_comparator ); + final Collection principals = Synchronizer.synchronize(userPrincipalsLock, () -> + m_sessions.values().stream().map(Session::getUserPrincipal).collect(Collectors.toList())); + + final Principal[] p = principals.toArray(new Principal[0]); + Arrays.sort(p, m_comparator); return p; } @@ -261,12 +261,9 @@ public final Principal[] userPrincipals() { * @since 2.4.75 */ public final void addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(addWikiEventListenerLock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** @@ -276,12 +273,9 @@ public final void addWikiEventListener( final WikiEventListener listener ) { * @since 2.4.75 */ public final void removeWikiEventListener(final WikiEventListener listener) { - lock.lock(); - try { + Synchronizer.synchronize(removeWikiEventListenerLock, () -> { WikiEventManager.removeWikiEventListener(this, listener); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java index befac2c77e..3a2d6d6d0b 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java @@ -20,6 +20,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.auth.permissions.PagePermission; import org.apache.wiki.event.WikiEventManager; +import org.apache.wiki.util.Synchronizer; import java.io.Serializable; import java.security.Permission; @@ -68,17 +69,13 @@ public AclEntryImpl() { */ @Override public boolean addPermission(final Permission permission) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { if (permission instanceof PagePermission && findPermission(permission) == null) { m_permissions.add(permission); return true; } - return false; - } finally { - lock.unlock(); - } + }); } /** @@ -100,12 +97,7 @@ public boolean checkPermission(final Permission permission ) { */ @Override public Principal getPrincipal() { - lock.lock(); - try { - return m_principal; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_principal); } /** @@ -126,18 +118,14 @@ public Enumeration< Permission > permissions() { */ @Override public boolean removePermission(final Permission permission ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { final Permission p = findPermission(permission); if (p != null) { m_permissions.remove(p); return true; } - return false; - } finally { - lock.unlock(); - } + }); } /** @@ -150,16 +138,13 @@ public boolean removePermission(final Permission permission ) { */ @Override public boolean setPrincipal(final Principal user) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { if (m_principal != null || user == null) { return false; } m_principal = user; return true; - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java index 3eb2ddf729..54af42c608 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java @@ -19,6 +19,7 @@ Licensed to the Apache Software Foundation (ASF) under one package org.apache.wiki.auth.acl; import org.apache.wiki.api.core.AclEntry; +import org.apache.wiki.util.Synchronizer; import java.io.Serializable; import java.security.Permission; @@ -99,8 +100,7 @@ private boolean hasEntry( final AclEntry entry ) { /** {@inheritDoc} */ @Override public boolean addEntry( final AclEntry entry ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { if( entry.getPrincipal() == null ) { throw new IllegalArgumentException( "Entry principal cannot be null" ); } @@ -112,20 +112,13 @@ public boolean addEntry( final AclEntry entry ) { m_entries.add( entry ); return true; - } finally { - lock.unlock(); - } + }); } /** {@inheritDoc} */ @Override public boolean removeEntry( final AclEntry entry ) { - lock.lock(); - try { - return m_entries.remove( entry ); - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_entries.remove( entry )); } /** {@inheritDoc} */ diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java index eefc53bec0..005e1248a3 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java @@ -40,6 +40,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiSecurityEvent; import org.apache.wiki.ui.InputValidator; import org.apache.wiki.util.ClassUtil; +import org.apache.wiki.util.Synchronizer; import java.security.Principal; import java.util.Arrays; @@ -84,10 +85,22 @@ public class DefaultGroupManager implements GroupManager, Authorizer, WikiEventL * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock initializeLock; + private final ReentrantLock removeGroupLock; + private final ReentrantLock setGroupLock; + private final ReentrantLock setGroupLockPut; + private final ReentrantLock setGroupLockCatch; + private final ReentrantLock addWikiEventListenerLock; + private final ReentrantLock removeWikiEventListenerLock; public DefaultGroupManager() { - lock = new ReentrantLock(); + initializeLock = new ReentrantLock(); + removeGroupLock = new ReentrantLock(); + setGroupLock = new ReentrantLock(); + setGroupLockPut = new ReentrantLock(); + setGroupLockCatch = new ReentrantLock(); + addWikiEventListenerLock = new ReentrantLock(); + removeWikiEventListenerLock = new ReentrantLock(); } /** {@inheritDoc} */ @@ -167,16 +180,13 @@ public void initialize( final Engine engine, final Properties props ) throws Wik // Load all groups from the database into the cache final Group[] groups = m_groupDatabase.groups(); - lock.lock(); - try { + Synchronizer.synchronize(initializeLock, () -> { for( final Group group : groups ) { // Add new group to cache; fire GROUP_ADD event m_groups.put( group.getPrincipal(), group ); fireEvent( WikiSecurityEvent.GROUP_ADD, group ); } - } finally { - lock.unlock(); - } + }); // Make the GroupManager listen for WikiEvents (WikiSecurityEvents for changed user profiles) engine.getManager( UserManager.class ).addWikiEventListener( this ); @@ -271,12 +281,9 @@ public void removeGroup( final String index ) throws WikiSecurityException { // Delete the group // TODO: need rollback procedure - lock.lock(); - try { + Synchronizer.synchronize(removeGroupLock, () -> { m_groups.remove( group.getPrincipal() ); - } finally { - lock.unlock(); - } + }); m_groupDatabase.delete( group ); fireEvent( WikiSecurityEvent.GROUP_REMOVE, group ); } @@ -290,12 +297,9 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu final Group oldGroup = m_groups.get( group.getPrincipal() ); if( oldGroup != null ) { fireEvent( WikiSecurityEvent.GROUP_REMOVE, oldGroup ); - lock.lock(); - try { + Synchronizer.synchronize(setGroupLock, () -> { m_groups.remove( oldGroup.getPrincipal() ); - } finally { - lock.unlock(); - } + }); } // Copy existing modifier info & timestamps @@ -307,12 +311,9 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu } // Add new group to cache; announce GROUP_ADD event - lock.lock(); - try { + Synchronizer.synchronize(setGroupLockPut, () -> { m_groups.put( group.getPrincipal(), group ); - } finally { - lock.unlock(); - } + }); fireEvent( WikiSecurityEvent.GROUP_ADD, group ); // Save the group to back-end database; if it fails, roll back to previous state. Note that the back-end @@ -327,12 +328,9 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu // Restore previous version, re-throw... fireEvent( WikiSecurityEvent.GROUP_REMOVE, group ); fireEvent( WikiSecurityEvent.GROUP_ADD, oldGroup ); - lock.lock(); - try { + Synchronizer.synchronize(setGroupLockCatch, () -> { m_groups.put( oldGroup.getPrincipal(), oldGroup ); - } finally { - lock.unlock(); - } + }); throw new WikiSecurityException( e.getMessage() + " (rolled back to previous version).", e ); } // Re-throw security exception @@ -398,23 +396,17 @@ protected String[] extractMembers( final String memberLine ) { /** {@inheritDoc} */ @Override public void addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(addWikiEventListenerLock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** {@inheritDoc} */ @Override public void removeWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(removeWikiEventListenerLock, () -> { WikiEventManager.removeWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** {@inheritDoc} */ diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java index 31b67dbec1..327af4841b 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java @@ -19,10 +19,13 @@ Licensed to the Apache Software Foundation (ASF) under one package org.apache.wiki.auth.authorize; import org.apache.wiki.auth.GroupPrincipal; +import org.apache.wiki.event.WikiEventManager; +import org.apache.wiki.util.Synchronizer; import java.security.Principal; import java.util.Date; import java.util.Vector; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; /** @@ -111,29 +114,21 @@ protected Group( final String name, final String wiki ) { * @return true if the operation was successful */ public boolean add( final Principal user ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { if( isMember( user ) ) { return false; } m_members.add( user ); return true; - } finally { - lock.unlock(); - } + }); } /** * Clears all Principals from the group list. */ public void clear() { - lock.lock(); - try { - m_members.clear(); - } finally { - lock.unlock(); - } + Synchronizer.synchronize(lock, m_members::clear); } /** @@ -179,12 +174,7 @@ public int hashCode() { * @return the creation date */ public Date getCreated() { - lock.lock(); - try { - return m_created; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_created); } /** @@ -193,12 +183,7 @@ public Date getCreated() { * @return the creator */ public final String getCreator() { - lock.lock(); - try { - return m_creator; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_creator); } /** @@ -207,12 +192,7 @@ public final String getCreator() { * @return the date and time of last modification */ public Date getLastModified() { - lock.lock(); - try { - return m_modified; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_modified); } /** @@ -221,12 +201,7 @@ public Date getLastModified() { * @return the modifier */ public final String getModifier() { - lock.lock(); - try { - return m_modifier; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_modifier); } /** @@ -282,20 +257,17 @@ public Principal[] members() { * @param user the principal to remove * @return true if the operation was successful */ - public boolean remove( Principal user ) { - lock.lock(); - try { - user = findMember( user.getName() ); - if( user == null ) + public boolean remove(Principal user) { + final AtomicReference userRef = new AtomicReference<>(user); + return Synchronizer.synchronize(lock, () -> { + Principal updatedUser = findMember(userRef.get().getName()); + userRef.set(updatedUser); + if (updatedUser == null) { return false; - - m_members.remove( user ); - + } + m_members.remove(updatedUser); return true; - } finally { - lock.unlock(); - } - + }); } /** @@ -304,12 +276,9 @@ public boolean remove( Principal user ) { * @param date the creation date */ public void setCreated( final Date date ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_created = date; - } finally { - lock.unlock(); - } + }); } /** @@ -317,12 +286,9 @@ public void setCreated( final Date date ) { * @param creator the creator */ public final void setCreator( final String creator ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { this.m_creator = creator; - } finally { - lock.unlock(); - } + }); } /** @@ -331,12 +297,9 @@ public final void setCreator( final String creator ) { * @param date the last-modified date */ public void setLastModified( final Date date ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_modified = date; - } finally { - lock.unlock(); - } + }); } /** @@ -345,12 +308,9 @@ public void setLastModified( final Date date ) { * @param modifier the modifier */ public final void setModifier( final String modifier ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { this.m_modifier = modifier; - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java index 28299c2a1a..28a9634d3b 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java @@ -24,6 +24,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.auth.WikiPrincipal; import org.apache.wiki.util.FileUtil; import org.apache.wiki.util.HttpUtil; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import javax.security.auth.callback.Callback; @@ -280,8 +281,7 @@ private static Cookie getLoginCookie( final String value ) { * @param cookieDir cookie directory */ private static void scrub( final int days, final File cookieDir ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { LOG.debug( "Scrubbing cookieDir..." ); final File[] files = cookieDir.listFiles(); final long obsoleteDateLimit = System.currentTimeMillis() - ( ( long )days + 1 ) * 24 * 60 * 60 * 1000L; @@ -300,9 +300,7 @@ private static void scrub( final int days, final File cookieDir ) { } LOG.debug( "Removed {} obsolete cookie logins", deleteCount ); - } finally { - lock.unlock(); - } + }); } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java index 22d13be93f..37c6ec726f 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java @@ -19,8 +19,10 @@ Licensed to the Apache Software Foundation (ASF) under one package org.apache.wiki.auth.permissions; import org.apache.wiki.api.core.Page; +import org.apache.wiki.util.Synchronizer; import java.util.WeakHashMap; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; @@ -86,45 +88,28 @@ public static PagePermission getPagePermission( final String page, final String * @param actions A list of actions. * @return A PagePermission object. */ - private static PagePermission getPagePermission( final String wiki, String page, final String actions ) - { - PagePermission perm; - // - // Since this is pretty speed-critical, we try to avoid the StringBuffer creation - // overhead by XORring the hashcodes. However, if page name length > 32 characters, - // this might result in two same hashCodes. - // FIXME: Make this work for page-name lengths > 32 characters (use the alt implementation - // if page.length() > 32?) - // Alternative implementation below, but it does create an extra StringBuffer. - //String key = wiki+":"+page+":"+actions; - + private static PagePermission getPagePermission(final String wiki, String page, final String actions) { final Integer key = wiki.hashCode() ^ page.hashCode() ^ actions.hashCode(); - - // - // It's fine if two threads update the cache, since the objects mean the same - // thing anyway. And this avoids nasty blocking effects. - // - lock.lock(); - try { - perm = c_cache.get( key ); - } finally { - lock.unlock(); - } - - if( perm == null ) - { - if( !wiki.isEmpty() ) page = wiki+":"+page; - perm = new PagePermission( page, actions ); + AtomicReference permRef = new AtomicReference<>(); + + Synchronizer.synchronize(lock, () -> { + PagePermission perm = c_cache.get(key); + permRef.set(perm); + }); + + if (permRef.get() == null) { + if (!wiki.isEmpty()) page = wiki + ":" + page; + PagePermission newPerm = new PagePermission(page, actions); - lock.lock(); - try { - c_cache.put( key, perm ); - } finally { - lock.unlock(); - } + Synchronizer.synchronize(lock, () -> { + c_cache.put(key, newPerm); + }); + + return newPerm; } - - return perm; + + return permRef.get(); } + } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java index c7251d6594..b0cc11182f 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java @@ -25,6 +25,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.auth.WikiPrincipal; import org.apache.wiki.auth.WikiSecurityException; import org.apache.wiki.util.Serializer; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import org.w3c.dom.Document; import org.w3c.dom.Element; @@ -52,6 +53,8 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Properties; import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -107,31 +110,44 @@ public XMLUserDatabase() { } /** {@inheritDoc} */ - @Override - public void deleteByLoginName( final String loginName ) throws WikiSecurityException { - lock.lock(); - try { - if( c_dom == null ) { - throw new WikiSecurityException( "FATAL: database does not exist" ); - } + public void deleteByLoginName(final String loginName) throws WikiSecurityException { + final AtomicBoolean userDeleted = new AtomicBoolean(false); + final AtomicReference exceptionRef = new AtomicReference<>(); - final NodeList users = c_dom.getDocumentElement().getElementsByTagName( USER_TAG ); - for( int i = 0; i < users.getLength(); i++ ) { - final Element user = ( Element )users.item( i ); - if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) { - c_dom.getDocumentElement().removeChild( user ); + Synchronizer.synchronize(lock, () -> { + try { + if (c_dom == null) { + throw new WikiSecurityException("FATAL: database does not exist"); + } - // Commit to disk - saveDOM(); - return; + final NodeList users = c_dom.getDocumentElement().getElementsByTagName(USER_TAG); + for (int i = 0; i < users.getLength(); i++) { + final Element user = (Element) users.item(i); + if (user.getAttribute(LOGIN_NAME).equals(loginName)) { + c_dom.getDocumentElement().removeChild(user); + + // Commit to disk + saveDOM(); + userDeleted.set(true); + return; + } } + } catch (WikiSecurityException e) { + exceptionRef.set(e); } - throw new NoSuchPrincipalException( "Not in database: " + loginName ); - } finally { - lock.unlock(); + }); + + if (exceptionRef.get() != null) { + throw exceptionRef.get(); + } + + if (!userDeleted.get()) { + throw new NoSuchPrincipalException("Not in database: " + loginName); } } + + /** {@inheritDoc} */ @Override public UserProfile findByEmail( final String index ) throws NoSuchPrincipalException { @@ -343,55 +359,51 @@ private void checkForRefresh() { */ @Override public void rename( final String loginName, final String newName) throws DuplicateUserException, WikiSecurityException { - lock.lock(); - try { - if( c_dom == null ) { - LOG.fatal( "Could not rename profile '" + loginName + "'; database does not exist" ); - throw new IllegalStateException( "FATAL: database does not exist" ); + Synchronizer.synchronize(lock, () -> { + if (c_dom == null) { + LOG.fatal("Could not rename profile '" + loginName + "'; database does not exist"); + throw new IllegalStateException("FATAL: database does not exist"); } checkForRefresh(); // Get the existing user; if not found, throws NoSuchPrincipalException - final UserProfile profile = findByLoginName( loginName ); + final UserProfile profile = findByLoginName(loginName); // Get user with the proposed name; if found, it's a collision try { - final UserProfile otherProfile = findByLoginName( newName ); - if( otherProfile != null ) { - throw new DuplicateUserException( "security.error.cannot.rename", newName ); + final UserProfile otherProfile = findByLoginName(newName); + if (otherProfile != null) { + throw new DuplicateUserException("security.error.cannot.rename", newName); } - } catch( final NoSuchPrincipalException e ) { + } catch (final NoSuchPrincipalException e) { // Good! That means it's safe to save using the new name } // Find the user with the old login id attribute, and change it - final NodeList users = c_dom.getElementsByTagName( USER_TAG ); - for( int i = 0; i < users.getLength(); i++ ) { - final Element user = ( Element )users.item( i ); - if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) { - final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT ); - final Date modDate = new Date( System.currentTimeMillis() ); - setAttribute( user, LOGIN_NAME, newName ); - setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) ); - profile.setLoginName( newName ); - profile.setLastModified( modDate ); + final NodeList users = c_dom.getElementsByTagName(USER_TAG); + for (int i = 0; i < users.getLength(); i++) { + final Element user = (Element) users.item(i); + if (user.getAttribute(LOGIN_NAME).equals(loginName)) { + final DateFormat c_format = new SimpleDateFormat(DATE_FORMAT); + final Date modDate = new Date(System.currentTimeMillis()); + setAttribute(user, LOGIN_NAME, newName); + setAttribute(user, LAST_MODIFIED, c_format.format(modDate)); + profile.setLoginName(newName); + profile.setLastModified(modDate); break; } } // Commit to disk saveDOM(); - } finally { - lock.unlock(); - } + }); } /** {@inheritDoc} */ @Override public void save( final UserProfile profile ) throws WikiSecurityException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if ( c_dom == null ) { LOG.fatal( "Could not save profile " + profile + " database does not exist" ); throw new IllegalStateException( "FATAL: database does not exist" ); @@ -462,9 +474,7 @@ public void save( final UserProfile profile ) throws WikiSecurityException { // Commit to disk saveDOM(); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java index bf4c71aa5c..0d70680e5e 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java @@ -24,6 +24,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.api.core.Context; import org.apache.wiki.api.core.Engine; import org.apache.wiki.api.exceptions.NoRequiredPropertyException; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import org.suigeneris.jrcs.diff.Diff; import org.suigeneris.jrcs.diff.DifferentiationFailedException; @@ -151,8 +152,7 @@ public void initialize( final Engine engine, final Properties properties) throws */ @Override public String makeDiffHtml( final Context ctx, final String wikiOld, final String wikiNew ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { // // Sequencing handles lineterminator to
and every-other consequtive space to a   // @@ -181,9 +181,7 @@ public String makeDiffHtml( final Context ctx, final String wikiOld, final Strin cm.shutdown(); sb.append( DIFF_END ); return sb.toString(); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java index b58c3f249b..bdbed514e6 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java @@ -26,6 +26,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiEventListener; import org.apache.wiki.event.WikiEventManager; import org.apache.wiki.event.WikiPageEvent; +import org.apache.wiki.util.Synchronizer; import java.util.Properties; import java.util.concurrent.locks.ReentrantLock; @@ -128,12 +129,9 @@ public void postSave( final Context wikiContext, final String content ) { * @param listener the event listener */ public final void addWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** @@ -142,12 +140,9 @@ public final void addWikiEventListener( final WikiEventListener listener ) { * @param listener the event listener */ public final void removeWikiEventListener( final WikiEventListener listener ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { WikiEventManager.removeWikiEventListener( this, listener ); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java index 981a291b6a..4c51af8a44 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java @@ -46,6 +46,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.ui.EditorManager; import org.apache.wiki.util.FileUtil; import org.apache.wiki.util.HttpUtil; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import org.suigeneris.jrcs.diff.Diff; import org.suigeneris.jrcs.diff.DifferentiationFailedException; @@ -452,8 +453,7 @@ private Collection< Pattern > parseBlacklist( final String list ) { */ private void checkSinglePageChange(final Context context, final Change change ) throws RedirectException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { final HttpServletRequest req = context.getHttpRequest(); if( req != null ) { @@ -534,9 +534,7 @@ private void checkSinglePageChange(final Context context, final Change change ) m_lastModifications.add( new Host( addr, change ) ); } - } finally { - lock.unlock(); - } + }); } @@ -651,8 +649,7 @@ private void checkUTF8( final Context context, final Change change ) throws Redi /** Goes through the ban list and cleans away any host which has expired from it. */ private void cleanBanList() { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { final long now = System.currentTimeMillis(); for( final Iterator< Host > i = m_temporaryBanList.iterator(); i.hasNext(); ) { final Host host = i.next(); @@ -662,9 +659,7 @@ private void cleanBanList() { i.remove(); } } - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java index ad59bef784..090917ae38 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java @@ -31,6 +31,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.pages.PageManager; import org.apache.wiki.parser.MarkupParser; import org.apache.wiki.preferences.Preferences; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.PrintWriter; @@ -182,8 +183,7 @@ public String execute( final Context context, final Map< String, String > params * and tries again. */ private String findNextPage( final Context context, final String title, final String baseName ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { final String basicPageName = ( ( baseName != null ) ? baseName : "Bug" ) + MarkupParser.cleanLink( title ); final Engine engine = context.getEngine(); @@ -192,11 +192,8 @@ private String findNextPage( final Context context, final String title, final St while( engine.getManager( PageManager.class ).wikiPageExists( pageName ) ) { pageName = basicPageName + lastbug++; } - return pageName; - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java index 6a2faed91d..39d8095c18 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java @@ -43,6 +43,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiPageRenameEvent; import org.apache.wiki.references.ReferenceManager; import org.apache.wiki.render.RenderingManager; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.File; @@ -59,6 +60,9 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Map.Entry; import java.util.Properties; import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; @@ -152,15 +156,12 @@ public PageViewPlugin() { @Override public void initialize( final Engine engine ) { LOG.info( "initializing PageViewPlugin" ); - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( c_singleton == null ) { c_singleton = new PageViewManager(); } c_singleton.initialize( engine ); - } finally { - lock.unlock(); - } + }); } /** @@ -221,8 +222,7 @@ public final class PageViewManager implements WikiEventListener { * @param engine The wiki engine. */ public void initialize( final Engine engine ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { LOG.info( "initializing PageView Manager" ); m_workDir = engine.getWorkDir(); engine.addWikiEventListener( this ); @@ -241,17 +241,14 @@ public void initialize( final Engine engine ) { } m_initialized = true; - } finally { - lock.unlock(); - } + }); } /** * Handle the shutdown event via the page counter thread. */ private void handleShutdown() { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { LOG.info( "handleShutdown: The counter store thread was shut down." ); cleanup(); @@ -271,9 +268,7 @@ private void handleShutdown() { m_initialized = false; m_pageCountSaveThread = null; - } finally { - lock.unlock(); - } + }); } /** @@ -317,14 +312,15 @@ public void actionPerformed( final WikiEvent event ) { public String execute( final Context context, final Map< String, String > params ) throws PluginException { final Engine engine = context.getEngine(); final Page page = context.getPage(); - String result = STR_EMPTY; + final AtomicReference atomicResult = new AtomicReference<>(STR_EMPTY); + final AtomicReference pluginExceptionRef = new AtomicReference<>(); if( page != null ) { // get parameters final String pagename = page.getName(); String count = params.get( PARAM_COUNT ); final String show = params.get( PARAM_SHOW ); - int entries = TextUtil.parseIntParameter( params.get( PARAM_MAX_ENTRIES ), Integer.MAX_VALUE ); + final AtomicInteger atomicEntries = new AtomicInteger(TextUtil.parseIntParameter(params.get(PARAM_MAX_ENTRIES), Integer.MAX_VALUE)); final int max = TextUtil.parseIntParameter( params.get( PARAM_MAX_COUNT ), Integer.MAX_VALUE ); final int min = TextUtil.parseIntParameter( params.get( PARAM_MIN_COUNT ), Integer.MIN_VALUE ); final String sort = params.get( PARAM_SORT ); @@ -333,22 +329,22 @@ public String execute( final Context context, final Map< String, String > params final Pattern[] include = compileGlobs( PARAM_INCLUDE, params.get( PARAM_INCLUDE ) ); final Pattern[] refer = compileGlobs( PARAM_REFER, params.get( PARAM_REFER ) ); final PatternMatcher matcher = (null != exclude || null != include || null != refer) ? new Perl5Matcher() : null; - boolean increment = false; + final AtomicBoolean atomicIncrement = new AtomicBoolean(false); // increment counter? if( STR_YES.equals( count ) ) { - increment = true; + atomicIncrement.set(true); } else { count = null; } // default increment counter? if( ( show == null || STR_NONE.equals( show ) ) && count == null ) { - increment = true; + atomicIncrement.set(true); } // filter on referring pages? - Collection< String > referrers = null; + final AtomicReference> atomicReferrers = new AtomicReference<>(null); if( refer != null ) { final ReferenceManager refManager = engine.getManager( ReferenceManager.class ); @@ -361,126 +357,134 @@ public String execute( final Context context, final Map< String, String > params if( use ) { final Collection< String > refs = engine.getManager( ReferenceManager.class ).findReferrers( name ); if( refs != null && !refs.isEmpty() ) { - if( referrers == null ) { - referrers = new HashSet<>(); + if (atomicReferrers.get() == null) { + atomicReferrers.set(new HashSet<>()); } - referrers.addAll( refs ); + atomicReferrers.get().addAll(refs); } } } } - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { + try { - Counter counter = m_counters.get( pagename ); + Counter counter = m_counters.get( pagename ); - // only count in view mode, keep storage values in sync - if( increment && ContextEnum.PAGE_VIEW.getRequestContext().equalsIgnoreCase( context.getRequestContext() ) ) { - if( counter == null ) { - counter = new Counter(); - m_counters.put( pagename, counter ); - } - counter.increment(); - m_storage.setProperty( pagename, counter.toString() ); - m_dirty = true; - } - - if( show == null || STR_NONE.equals( show ) ) { - // nothing to show - - } else if( PARAM_COUNT.equals( show ) ) { - // show page count - if( counter == null ) { - counter = new Counter(); - m_counters.put( pagename, counter ); + // only count in view mode, keep storage values in sync + if( atomicIncrement.get() && ContextEnum.PAGE_VIEW.getRequestContext().equalsIgnoreCase( context.getRequestContext() ) ) { + if( counter == null ) { + counter = new Counter(); + m_counters.put( pagename, counter ); + } + counter.increment(); m_storage.setProperty( pagename, counter.toString() ); m_dirty = true; } - result = counter.toString(); - - } else if( body != null && !body.isEmpty() && STR_LIST.equals( show ) ) { - // show list of counts - String header = STR_EMPTY; - String line = body; - String footer = STR_EMPTY; - int start = body.indexOf( STR_SEPARATOR ); - - // split body into header, line, footer on ---- separator - if( 0 < start ) { - header = body.substring( 0, start ); - start = skipWhitespace( start + STR_SEPARATOR.length(), body ); - int end = body.indexOf( STR_SEPARATOR, start ); - if( start >= end ) { - line = body.substring( start ); - } else { - line = body.substring( start, end ); - end = skipWhitespace( end + STR_SEPARATOR.length(), body ); - footer = body.substring( end ); + + if( show == null || STR_NONE.equals( show ) ) { + // nothing to show + + } else if( PARAM_COUNT.equals( show ) ) { + // show page count + if( counter == null ) { + counter = new Counter(); + m_counters.put( pagename, counter ); + m_storage.setProperty( pagename, counter.toString() ); + m_dirty = true; + } + atomicResult.set(counter.toString()); + + } else if( body != null && 0 < body.length() && STR_LIST.equals( show ) ) { + // show list of counts + String header = STR_EMPTY; + String line = body; + String footer = STR_EMPTY; + int start = body.indexOf( STR_SEPARATOR ); + + // split body into header, line, footer on ---- separator + if( 0 < start ) { + header = body.substring( 0, start ); + start = skipWhitespace( start + STR_SEPARATOR.length(), body ); + int end = body.indexOf( STR_SEPARATOR, start ); + if( start >= end ) { + line = body.substring( start ); + } else { + line = body.substring( start, end ); + end = skipWhitespace( end + STR_SEPARATOR.length(), body ); + footer = body.substring( end ); + } } - } - // sort on name or count? - Map< String, Counter > sorted = m_counters; - if( PARAM_COUNT.equals( sort ) ) { - sorted = new TreeMap<>( m_compareCountDescending ); - sorted.putAll( m_counters ); - } + // sort on name or count? + Map< String, Counter > sorted = m_counters; + if( PARAM_COUNT.equals( sort ) ) { + sorted = new TreeMap<>( m_compareCountDescending ); + sorted.putAll( m_counters ); + } - // build a messagebuffer with the list in wiki markup - final StringBuffer buf = new StringBuffer( header ); - final MessageFormat fmt = new MessageFormat( line ); - final Object[] args = new Object[] { pagename, STR_EMPTY, STR_EMPTY }; - final Iterator< Entry< String, Counter > > iter = sorted.entrySet().iterator(); + // build a messagebuffer with the list in wiki markup + final StringBuffer buf = new StringBuffer( header ); + final MessageFormat fmt = new MessageFormat( line ); + final Object[] args = new Object[] { pagename, STR_EMPTY, STR_EMPTY }; + final Iterator< Entry< String, Counter > > iter = sorted.entrySet().iterator(); - while( 0 < entries && iter.hasNext() ) { - final Entry< String, Counter > entry = iter.next(); - final String name = entry.getKey(); + while( 0 < atomicEntries.get() && iter.hasNext() ) { + final Entry< String, Counter > entry = iter.next(); + final String name = entry.getKey(); - // check minimum/maximum count - final int value = entry.getValue().getValue(); - boolean use = min <= value && value <= max; + // check minimum/maximum count + final int value = entry.getValue().getValue(); + boolean use = min <= value && value <= max; - // did we specify a refer-to page? - if( use && referrers != null ) { - use = referrers.contains( name ); - } + // did we specify a refer-to page? + if( use && atomicReferrers.get() != null ) { + use = atomicReferrers.get().contains( name ); + } - // did we specify what pages to include? - if( use && include != null ) { - use = false; + // did we specify what pages to include? + if( use && include != null ) { + use = false; - for( int n = 0; !use && n < include.length; n++ ) { - use = matcher.matches( name, include[ n ] ); + for( int n = 0; !use && n < include.length; n++ ) { + use = matcher.matches( name, include[ n ] ); + } } - } - // did we specify what pages to exclude? - if( use && null != exclude ) { - for( int n = 0; use && n < exclude.length; n++ ) { - use = !matcher.matches( name, exclude[ n ] ); + // did we specify what pages to exclude? + if( use && null != exclude ) { + for( int n = 0; use && n < exclude.length; n++ ) { + use = !matcher.matches( name, exclude[ n ] ); + } } - } - if( use ) { - args[ 1 ] = engine.getManager( RenderingManager.class ).beautifyTitle( name ); - args[ 2 ] = entry.getValue(); + if( use ) { + args[ 1 ] = engine.getManager( RenderingManager.class ).beautifyTitle( name ); + args[ 2 ] = entry.getValue(); - fmt.format( args, buf, null ); + fmt.format( args, buf, null ); - entries--; + atomicEntries.decrementAndGet(); + } } - } - buf.append( footer ); + buf.append( footer ); - // let the engine render the list - result = engine.getManager( RenderingManager.class ).textToHTML( context, buf.toString() ); + // let the engine render the list + atomicResult.set(engine.getManager(RenderingManager.class).textToHTML(context, buf.toString())); + } + } catch (Exception e) { + if (e instanceof PluginException) { + pluginExceptionRef.set(new PluginException(e.getMessage())); + } } - } finally { - lock.unlock(); - } + }); } - return result; + + if (pluginExceptionRef.get() != null) { + throw pluginExceptionRef.get(); + } + + return atomicResult.get(); } /** @@ -493,7 +497,7 @@ public String execute( final Context context, final Map< String, String > params */ private Pattern[] compileGlobs( final String name, final String value ) throws PluginException { Pattern[] result = null; - if( value != null && !value.isEmpty() && !STR_GLOBSTAR.equals( value ) ) { + if( value != null && 0 < value.length() && !STR_GLOBSTAR.equals( value ) ) { try { final PatternCompiler pc = new GlobCompiler(); final String[] ptrns = StringUtils.split( value, STR_COMMA ); @@ -541,8 +545,7 @@ int getCount( final Object key ) private void loadCounters() { if( m_counters != null && m_storage != null ) { LOG.info( "Loading counters." ); - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { try( final InputStream fis = Files.newInputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) { m_storage.load( fis ); } catch( final IOException ioe ) { @@ -555,9 +558,7 @@ private void loadCounters() { } LOG.info( "Loaded " + m_counters.size() + " counter values." ); - } finally { - lock.unlock(); - } + }); } } @@ -567,8 +568,7 @@ private void loadCounters() { void storeCounters() { if( m_counters != null && m_storage != null && m_dirty ) { LOG.info( "Storing " + m_counters.size() + " counter values." ); - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { // Write out the collection of counters try( final OutputStream fos = Files.newOutputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) { m_storage.store( fos, "\n# The number of times each page has been viewed.\n# Do not modify.\n" ); @@ -578,9 +578,7 @@ void storeCounters() { } catch( final IOException ioe ) { LOG.error( "Couldn't store counters values: " + ioe.getMessage() ); } - } finally { - lock.unlock(); - } + }); } } @@ -592,13 +590,7 @@ void storeCounters() { */ private boolean isRunning( final Thread thrd ) { - lock.lock(); - try { - return m_initialized && thrd == m_pageCountSaveThread; - } finally { - lock.unlock(); - } - + return Synchronizer.synchronize(lock, () -> m_initialized && thrd == m_pageCountSaveThread); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java index 8d5bcf415d..9b94ce2495 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java @@ -33,6 +33,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.cache.CacheInfo; import org.apache.wiki.cache.CachingManager; import org.apache.wiki.util.ClassUtil; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.IOException; @@ -156,8 +157,7 @@ public List< Attachment > listAllChanged( final Date timestamp ) throws Provider all = provider.listAllChanged( timestamp ); // Make sure that all attachments are in the cache. - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { for( final Attachment att : all ) { cachingManager.put( CachingManager.CACHE_ATTACHMENTS, att.getName(), att ); } @@ -165,9 +165,7 @@ public List< Attachment > listAllChanged( final Date timestamp ) throws Provider allRequested = true; attachments.set( all.size() ); } - } finally { - lock.unlock(); - } + }); } else { final List< String > keys = cachingManager.keys( CachingManager.CACHE_ATTACHMENTS ); all = new ArrayList<>(); @@ -259,8 +257,7 @@ public void deleteAttachment( final Attachment att ) throws ProviderException { */ @Override public String getProviderInfo() { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { final CacheInfo attCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS ); final CacheInfo attColCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS_COLLECTION ); return "Real provider: " + provider.getClass().getName() + @@ -268,9 +265,7 @@ public String getProviderInfo() { ". Attachment cache misses: " + attCacheInfo.getMisses() + ". Attachment collection cache hits: " + attColCacheInfo.getHits() + ". Attachment collection cache misses: " + attColCacheInfo.getMisses(); - } finally { - lock.unlock(); - } + }); } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java index 745e58b734..bc66c43aa8 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java @@ -35,6 +35,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.parser.MarkupParser; import org.apache.wiki.render.RenderingManager; import org.apache.wiki.util.ClassUtil; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.IOException; @@ -235,8 +236,7 @@ private String getTextFromCache( final String pageName ) throws ProviderExceptio */ @Override public void putPageText( final Page page, final String text ) throws ProviderException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { provider.putPageText( page, text ); page.setLastModified( new Date() ); @@ -246,9 +246,7 @@ public void putPageText( final Page page, final String text ) throws ProviderExc cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, page.getName() ); getPageInfoFromCache( page.getName() ); - } finally { - lock.unlock(); - } + }); pages.incrementAndGet(); } @@ -261,15 +259,12 @@ public Collection< Page > getAllPages() throws ProviderException { if ( !allRequested ) { all = provider.getAllPages(); // Make sure that all pages are in the cache. - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { for( final Page p : all ) { cachingManager.put( CachingManager.CACHE_PAGES, p.getName(), p ); } allRequested = true; - } finally { - lock.unlock(); - } + }); pages.set( all.size() ); } else { final List< String > keys = cachingManager.keys( CachingManager.CACHE_PAGES ); @@ -371,8 +366,7 @@ public List< Page > getVersionHistory( final String pageName) throws ProviderExc */ @Override public String getProviderInfo() { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { final CacheInfo pageCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES ); final CacheInfo pageHistoryCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES_HISTORY ); return "Real provider: " + provider.getClass().getName()+ @@ -380,9 +374,7 @@ public String getProviderInfo() { ". Page cache misses: " + pageCacheInfo.getMisses() + ". History cache hits: " + pageHistoryCacheInfo.getHits() + ". History cache misses: " + pageHistoryCacheInfo.getMisses(); - } finally { - lock.unlock(); - } + }); } /** @@ -391,8 +383,7 @@ public String getProviderInfo() { @Override public void deleteVersion( final String pageName, final int version ) throws ProviderException { // Luckily, this is such a rare operation it is okay to synchronize against the whole thing. - lock.lock(); - try { + Synchronizer.synchronize(lock, (Synchronizer.ThrowingRunnable) () -> { final Page cached = getPageInfoFromCache( pageName ); final int latestcached = ( cached != null ) ? cached.getVersion() : Integer.MIN_VALUE; @@ -404,9 +395,7 @@ public void deleteVersion( final String pageName, final int version ) throws Pro provider.deleteVersion( pageName, version ); cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, pageName ); - } finally { - lock.unlock(); - } + }); if( version == PageProvider.LATEST_VERSION ) { pages.decrementAndGet(); } @@ -418,15 +407,12 @@ public void deleteVersion( final String pageName, final int version ) throws Pro @Override public void deletePage( final String pageName ) throws ProviderException { // See note in deleteVersion(). - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { cachingManager.put( CachingManager.CACHE_PAGES, pageName, null ); cachingManager.put( CachingManager.CACHE_PAGES_TEXT, pageName, null ); cachingManager.put( CachingManager.CACHE_PAGES_HISTORY, pageName, null ); provider.deletePage( pageName ); - } finally { - lock.unlock(); - } + }); pages.decrementAndGet(); } @@ -436,8 +422,7 @@ public void deletePage( final String pageName ) throws ProviderException { @Override public void movePage( final String from, final String to ) throws ProviderException { provider.movePage( from, to ); - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { // Clear any cached version of the old page and new page cachingManager.remove( CachingManager.CACHE_PAGES, from ); cachingManager.remove( CachingManager.CACHE_PAGES_TEXT, from ); @@ -446,9 +431,7 @@ public void movePage( final String from, final String to ) throws ProviderExcept cachingManager.remove( CachingManager.CACHE_PAGES, to ); cachingManager.remove( CachingManager.CACHE_PAGES_TEXT, to ); cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, to ); - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java index d6908853ca..9f814df565 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java @@ -29,6 +29,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.api.providers.WikiProvider; import org.apache.wiki.api.spi.Wiki; import org.apache.wiki.util.FileUtil; +import org.apache.wiki.util.Synchronizer; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -42,6 +43,8 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Date; import java.util.List; import java.util.Properties; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; /** @@ -283,26 +286,29 @@ private int realVersion( final String page, final int requestedVersion ) throws * {@inheritDoc} */ @Override - public String getPageText( final String page, int version ) throws ProviderException { - lock.lock(); - try { - final File dir = findOldPageDir( page ); + public String getPageText(final String page, int version) throws ProviderException { + final AtomicReference result = new AtomicReference<>(); + final int finalVersion = version; // Make it effectively final for use in lambda - version = realVersion( page, version ); - if( version == -1 ) { + Synchronizer.synchronize(lock, () -> { + final File dir = findOldPageDir(page); + int realVersion = realVersion(page, finalVersion); + + if (realVersion == -1) { // We can let the FileSystemProvider take care of these requests. - return super.getPageText( page, PageProvider.LATEST_VERSION ); + result.set(super.getPageText(page, PageProvider.LATEST_VERSION)); + return; } - final File pageFile = new File( dir, ""+version+FILE_EXT ); - if( !pageFile.exists() ) { - throw new NoSuchVersionException("Version "+version+"does not exist."); + final File pageFile = new File(dir, "" + realVersion + FILE_EXT); + if (!pageFile.exists()) { + throw new NoSuchVersionException("Version " + realVersion + " does not exist."); } - return readFile( pageFile ); - } finally { - lock.unlock(); - } + result.set(readFile(pageFile)); + }); + + return result.get(); } @@ -346,8 +352,7 @@ private String readFile( final File pagedata ) throws ProviderException { */ @Override public void putPageText( final Page page, final String text ) throws ProviderException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { // This is a bit complicated. We'll first need to copy the old file to be the newest file. final int latest = findLatestVersion( page.getName() ); final File pageDir = findOldPageDir( page.getName() ); @@ -417,9 +422,7 @@ public void putPageText( final Page page, final String text ) throws ProviderExc LOG.error( "Saving failed", e ); throw new ProviderException("Could not save page text: "+e.getMessage()); } - } finally { - lock.unlock(); - } + }); } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java index 7a08d55765..bb6a8e7cc8 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java @@ -38,6 +38,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiPageEvent; import org.apache.wiki.pages.PageManager; import org.apache.wiki.render.RenderingManager; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.*; @@ -47,6 +48,8 @@ Licensed to the Apache Software Foundation (ASF) under one import java.security.NoSuchAlgorithmException; import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; @@ -246,63 +249,61 @@ public void initialize( final Collection< Page > pages ) throws ProviderExceptio */ @SuppressWarnings("unchecked") private long unserializeFromDisk() throws IOException, ClassNotFoundException { - lock.lock(); - try { - final long saved; - - final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); - try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { - final StopWatch sw = new StopWatch(); - sw.start(); + final AtomicLong saved = new AtomicLong(0); + Synchronizer.synchronize(lock, () -> { + try { + final File f = new File(m_engine.getWorkDir(), SERIALIZATION_FILE); + try (final ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(Files.newInputStream(f.toPath())))) { + final StopWatch sw = new StopWatch(); + sw.start(); - final long ver = in.readLong(); + final long ver = in.readLong(); - if( ver != serialVersionUID ) { - throw new IOException("File format has changed; I need to recalculate references."); - } + if (ver != serialVersionUID) { + throw new IOException("File format has changed; I need to recalculate references."); + } - saved = in.readLong(); - m_refersTo = ( Map< String, Collection< String > > ) in.readObject(); - m_referredBy = ( Map< String, Set< String > > ) in.readObject(); + saved.set(in.readLong()); + m_refersTo = (Map>) in.readObject(); + m_referredBy = (Map>) in.readObject(); - m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy ); - m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo ); + m_unmutableReferredBy = Collections.unmodifiableMap(m_referredBy); + m_unmutableRefersTo = Collections.unmodifiableMap(m_refersTo); - sw.stop(); - LOG.debug( "Read serialized data successfully in {}", sw ); + sw.stop(); + LOG.debug("Read serialized data successfully in {}", sw); + } + } catch (IOException | ClassNotFoundException e) { + throw new RuntimeException(e); } + }); - return saved; - } finally { - lock.unlock(); - } + return saved.get(); } + /** * Serializes hashmaps to disk. The format is private, don't touch it. */ private void serializeToDisk() { - lock.lock(); - try { - final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); - try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { + Synchronizer.synchronize(lock, () -> { + final File f = new File(m_engine.getWorkDir(), SERIALIZATION_FILE); + try (final ObjectOutputStream out = new ObjectOutputStream(new BufferedOutputStream(Files.newOutputStream(f.toPath())))) { final StopWatch sw = new StopWatch(); sw.start(); - out.writeLong( serialVersionUID ); - out.writeLong( System.currentTimeMillis() ); // Timestamp - out.writeObject( m_refersTo ); - out.writeObject( m_referredBy ); + out.writeLong(serialVersionUID); + out.writeLong(System.currentTimeMillis()); // Timestamp + out.writeObject(m_refersTo); + out.writeObject(m_referredBy); sw.stop(); - LOG.debug( "serialization done - took {}", sw ); - } catch( final IOException ioe ) { - LOG.error( "Unable to serialize!", ioe ); + LOG.debug("serialization done - took {}", sw); + } catch (final IOException ioe) { + LOG.error("Unable to serialize!", ioe); } - } finally { - lock.unlock(); - } + }); } private String getHashFileName( final String pageName ) { @@ -323,64 +324,73 @@ private String getHashFileName( final String pageName ) { /** * Reads the serialized data from the disk back to memory. Returns the date when the data was last written on disk */ - private long unserializeAttrsFromDisk( final Page p ) throws IOException, ClassNotFoundException { - lock.lock(); - try { - long saved = 0L; - - // Find attribute cache, and check if it exists - final String hashName = getHashFileName( p.getName() ); - if( hashName != null ) { - File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); - f = new File( f, hashName ); - if( !f.exists() ) { - return 0L; - } - - try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) { - final StopWatch sw = new StopWatch(); - sw.start(); - LOG.debug( "Deserializing attributes for {}", p.getName() ); + private long unserializeAttrsFromDisk(final Page p) throws IOException, ClassNotFoundException { + final AtomicLong saved = new AtomicLong(0L); + final AtomicReference ioExceptionRef = new AtomicReference<>(); + final AtomicReference classNotFoundExceptionRef = new AtomicReference<>(); - final long ver = in.readLong(); - if( ver != serialVersionUID ) { - LOG.debug( "File format has changed; cannot deserialize." ); - return 0L; + Synchronizer.synchronize(lock, () -> { + try { + long localSaved = 0L; + + // Find attribute cache, and check if it exists + final String hashName = getHashFileName(p.getName()); + if (hashName != null) { + File f = new File(m_engine.getWorkDir(), SERIALIZATION_DIR); + f = new File(f, hashName); + if (!f.exists()) { + return; } - saved = in.readLong(); - final String name = in.readUTF(); - if( !name.equals( p.getName() ) ) { - LOG.debug( "File name does not match ({}), skipping...", name ); - return 0L; // Not here - } + try (final ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(Files.newInputStream(f.toPath())))) { + // ... existing code here + final long ver = in.readLong(); + if (ver != serialVersionUID) { + // Log or handle as needed + return; + } - final long entries = in.readLong(); - for( int i = 0; i < entries; i++ ) { - final String key = in.readUTF(); - final Object value = in.readObject(); - p.setAttribute( key, value ); - LOG.debug( " attr: {}={}", key, value ); - } + localSaved = in.readLong(); + final String name = in.readUTF(); + if (!name.equals(p.getName())) { + // Log or handle as needed + return; + } - sw.stop(); - LOG.debug( "Read serialized data for {} successfully in {}", name, sw ); - p.setHasMetadata(); + final long entries = in.readLong(); + for (int i = 0; i < entries; i++) { + final String key = in.readUTF(); + final Object value = in.readObject(); + p.setAttribute(key, value); + // Log or handle as needed + } + } } + + saved.set(localSaved); + } catch (IOException e) { + ioExceptionRef.set(e); + } catch (ClassNotFoundException e) { + classNotFoundExceptionRef.set(e); } + }); - return saved; - } finally { - lock.unlock(); + if (ioExceptionRef.get() != null) { + throw ioExceptionRef.get(); } + + if (classNotFoundExceptionRef.get() != null) { + throw classNotFoundExceptionRef.get(); + } + + return saved.get(); } /** * Serializes hashmaps to disk. The format is private, don't touch it. */ private void serializeAttrsToDisk( final Page p ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { final StopWatch sw = new StopWatch(); sw.start(); @@ -424,9 +434,7 @@ private void serializeAttrsToDisk( final Page p ) { LOG.debug( "serialization for {} done - took {}", p.getName(), sw ); } } - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java index 2e3122137d..34834ea69e 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java @@ -34,6 +34,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.pages.PageManager; import org.apache.wiki.pages.PageTimeComparator; import org.apache.wiki.render.RenderingManager; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import org.apache.wiki.variables.VariableManager; @@ -213,23 +214,15 @@ public String generateFeed( final Context wikiContext, final List< Page > change /** {@inheritDoc} */ @Override public boolean isEnabled() { - lock.lock(); - try { - return m_enabled; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_enabled); } /** {@inheritDoc} */ @Override public void setEnabled( final boolean enabled ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_enabled = enabled; - } finally { - lock.unlock(); - } + }); } /** {@inheritDoc} */ diff --git a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java index 758284f006..6839f27fe7 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java @@ -67,6 +67,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.pages.PageManager; import org.apache.wiki.util.ClassUtil; import org.apache.wiki.util.FileUtil; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.File; @@ -319,8 +320,7 @@ protected String getAttachmentContent( final Attachment att ) { * @param text The page text to index. */ protected void updateLuceneIndex( final Page page, final String text ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { LOG.debug( "Updating Lucene index for page '{}'...", page.getName() ); pageRemoved( page ); @@ -337,9 +337,7 @@ protected void updateLuceneIndex( final Page page, final String text ) { } LOG.debug( "Done updating Lucene index for page '{}'.", page.getName() ); - } finally { - lock.unlock(); - } + }); } private Analyzer getLuceneAnalyzer() throws ProviderException { @@ -409,12 +407,9 @@ protected Document luceneIndexPage( final Page page, final String text, final In field = new Field( LUCENE_PAGE_KEYWORDS, page.getAttribute( "keywords" ).toString(), TextField.TYPE_STORED ); doc.add( field ); } - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { writer.addDocument( doc ); - } finally { - lock.unlock(); - } + }); return doc; } @@ -424,8 +419,7 @@ protected Document luceneIndexPage( final Page page, final String text, final In */ @Override public void pageRemoved( final Page page ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() ); final IndexWriter writer = getIndexWriter( luceneDir ) ) { final Query query = new TermQuery( new Term( LUCENE_ID, page.getName() ) ); @@ -433,9 +427,7 @@ public void pageRemoved( final Page page ) { } catch( final Exception e ) { LOG.error( "Unable to remove page '{}' from Lucene index", page.getName(), e ); } - } finally { - lock.unlock(); - } + }); } IndexWriter getIndexWriter( final Directory luceneDir ) throws IOException, ProviderException { @@ -605,17 +597,14 @@ public void startupTask() throws Exception { @Override public void backgroundTask() { m_watchdog.enterState( "Emptying index queue", 60 ); - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { while( m_provider.m_updates.isEmpty() ) { final Object[] pair = m_provider.m_updates.remove( 0 ); final Page page = ( Page )pair[ 0 ]; final String text = ( String )pair[ 1 ]; m_provider.updateLuceneIndex( page, text ); } - } finally { - lock.unlock(); - } + }); m_watchdog.exitState(); diff --git a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java index d85ec435c0..aa0b803f68 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java @@ -27,6 +27,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.ui.admin.SimpleAdminBean; import org.apache.wiki.ui.progress.ProgressItem; import org.apache.wiki.ui.progress.ProgressManager; +import org.apache.wiki.util.Synchronizer; import javax.management.NotCompliantMBeanException; import java.util.Collection; @@ -87,8 +88,7 @@ public String getTitle() * This method prevents itself from being called twice. */ public void reload() { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( m_updater == null ) { m_updater = new WikiBackgroundThread( m_engine, 0 ) { @@ -132,9 +132,7 @@ public int getProgress() { m_updater.start(); } - } finally { - lock.unlock(); - } + }); } @Override diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java index 20be5cb527..a617760035 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java @@ -20,6 +20,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.api.core.Context; import org.apache.wiki.api.exceptions.WikiException; +import org.apache.wiki.util.Synchronizer; import java.io.Serializable; import java.security.Principal; @@ -158,12 +159,7 @@ public final String getMessageKey() { */ @Override public final Outcome getOutcome() { - lock.lock(); - try { - return m_outcome; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_outcome); } /** @@ -195,8 +191,7 @@ public final boolean isStarted() { */ @Override public final void setOutcome(final Outcome outcome ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { // Is this an allowed Outcome? if( !m_successors.containsKey( outcome ) ) { if( !Outcome.STEP_CONTINUE.equals( outcome ) && !Outcome.STEP_ABORT.equals( outcome ) ) { @@ -213,9 +208,7 @@ public final void setOutcome(final Outcome outcome ) { m_end = new Date( System.currentTimeMillis() ); } m_outcome = outcome; - } finally { - lock.unlock(); - } + }); } @@ -224,16 +217,13 @@ public final void setOutcome(final Outcome outcome ) { */ @Override public final void start() throws WikiException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( m_started ) { throw new IllegalStateException( "Step already started." ); } m_started = true; m_start = new Date( System.currentTimeMillis() ); - } finally { - lock.unlock(); - } + }); } /** @@ -254,13 +244,10 @@ public final Step getSuccessor(final Outcome outcome ) { */ @Override public final void setWorkflow(final int workflowId, final Map< String, Serializable > workflowContext ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { this.workflowId = workflowId; this.workflowContext = workflowContext; - } finally { - lock.unlock(); - } + }); } public int getWorkflowId() { @@ -277,12 +264,9 @@ public Map< String, Serializable > getWorkflowContext() { * @param message the error message */ protected final void addError( final String message ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_errors.add( message ); - } finally { - lock.unlock(); - } + }); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java index c7f6393002..4d346e1ce8 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java @@ -22,6 +22,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.api.exceptions.WikiException; import org.apache.wiki.event.WikiEventEmitter; import org.apache.wiki.event.WorkflowEvent; +import org.apache.wiki.util.Synchronizer; import java.io.Serializable; import java.security.Principal; @@ -198,16 +199,13 @@ public boolean isReassignable() { * @param actor the actor to reassign the Decision to */ public final void reassign( final Principal actor ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( isReassignable() ) { m_actor = actor; } else { throw new IllegalArgumentException( "Decision cannot be reassigned." ); } - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java index 592c3eb533..2847797b91 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java @@ -23,6 +23,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.api.exceptions.WikiException; import org.apache.wiki.event.WikiEventEmitter; import org.apache.wiki.event.WorkflowEvent; +import org.apache.wiki.util.Synchronizer; import java.io.Serializable; import java.security.Principal; @@ -66,13 +67,10 @@ public DecisionQueue() { * @param decision the Decision to add */ protected void add( final Decision decision ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_queue.addLast( decision ); decision.setId( next.getAndIncrement() ); - } finally { - lock.unlock(); - } + }); } /** @@ -91,12 +89,9 @@ protected Decision[] decisions() { * @param decision the decision to remove */ protected void remove( final Decision decision ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_queue.remove( decision ); - } finally { - lock.unlock(); - } + }); } /** @@ -155,8 +150,7 @@ public void decide( final Decision decision, final Outcome outcome, final Contex * @throws WikiException never */ public void reassign( final Decision decision, final Principal owner ) throws WikiException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( decision.isReassignable() ) { decision.reassign( owner ); @@ -164,9 +158,7 @@ public void reassign( final Decision decision, final Principal owner ) throws Wi return; } throw new IllegalStateException( "Reassignments not allowed for this decision." ); - } finally { - lock.unlock(); - } + }); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java index 45a074eee4..49b666feba 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java @@ -31,6 +31,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WikiEvent; import org.apache.wiki.event.WikiEventEmitter; import org.apache.wiki.event.WorkflowEvent; +import org.apache.wiki.util.Synchronizer; import org.apache.wiki.util.TextUtil; import java.io.*; @@ -81,7 +82,6 @@ public DefaultWorkflowManager() { m_approvers = new ConcurrentHashMap<>(); m_queue = new DecisionQueue(); WikiEventEmitter.attach( this ); - // this.lock = new ReentrantLock(); } /** @@ -153,8 +153,7 @@ public void initialize( final Engine engine, final Properties props ) { */ @SuppressWarnings( "unchecked" ) long unserializeFromDisk( final File f ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { long saved = 0L; final StopWatch sw = new StopWatch(); sw.start(); @@ -176,17 +175,14 @@ long unserializeFromDisk( final File f ) { sw.stop(); return saved; - } finally { - lock.unlock(); - } + }); } /** * Serializes workflows and decisionqueue to disk. The format is private, don't touch it. */ void serializeToDisk( final File f ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) { final StopWatch sw = new StopWatch(); sw.start(); @@ -203,9 +199,7 @@ void serializeToDisk( final File f ) { } catch( final IOException ioe ) { LOG.error( "Unable to serialize!", ioe ); } - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java index 9eed1d90fc..9552fac4f0 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java @@ -18,6 +18,8 @@ Licensed to the Apache Software Foundation (ASF) under one */ package org.apache.wiki.workflow; +import org.apache.wiki.util.Synchronizer; + import java.io.Serializable; import java.security.Principal; import java.util.Map; @@ -92,12 +94,9 @@ public final Principal getActor() { * @param step the successor */ public final void setSuccessor( final Step step ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_successor = step; - } finally { - lock.unlock(); - } + }); } /** @@ -107,12 +106,7 @@ public final void setSuccessor( final Step step ) { * @return the next step */ public final Step getSuccessor() { - lock.lock(); - try { - return m_successor; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_successor); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java index 55848038c3..daa627fb09 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java @@ -22,6 +22,7 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.api.exceptions.WikiException; import org.apache.wiki.event.WikiEventEmitter; import org.apache.wiki.event.WorkflowEvent; +import org.apache.wiki.util.Synchronizer; import java.io.IOException; import java.io.ObjectInputStream; @@ -247,8 +248,7 @@ public Workflow( final String messageKey, final Principal owner ) { * If the Workflow had been previously aborted, this method throws an IllegalStateException. */ public final void abort( final Context context ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { // Check corner cases: previous abort or completion if( m_state == ABORTED ) { throw new IllegalStateException( "The workflow has already been aborted." ); @@ -267,9 +267,7 @@ public final void abort( final Context context ) { m_state = ABORTED; WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.ABORTED ); cleanup(); - } finally { - lock.unlock(); - } + }); } /** @@ -294,15 +292,12 @@ public final void addMessageArgument( final Serializable obj ) { * @return the current actor */ public final Principal getCurrentActor() { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { if( m_currentStep == null ) { return null; } return m_currentStep.getActor(); - } finally { - lock.unlock(); - } + }); } /** @@ -368,12 +363,7 @@ public final Date getEndTime() { */ public final int getId() { - lock.lock(); - try { - return m_id; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_id); } /** @@ -463,13 +453,10 @@ public final boolean isAborted() * @return true if the workflow has been started but has no more steps to perform; false if not. */ public final boolean isCompleted() { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { // If current step is null, then we're done return m_started && m_state == COMPLETED; - } finally { - lock.unlock(); - } + }); } /** @@ -502,8 +489,7 @@ public final Step getPreviousStep() * @throws WikiException if the current task's {@link Task#execute( Context )} method throws an exception */ public final void restart( final Context context ) throws WikiException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( m_state != WAITING ) { throw new IllegalStateException( "Workflow is not paused; cannot restart." ); } @@ -518,9 +504,7 @@ public final void restart( final Context context ) throws WikiException { abort( context ); throw e; } - } finally { - lock.unlock(); - } + }); } /** @@ -545,12 +529,9 @@ public final void setAttribute( final String attr, final Serializable obj ) { */ public final void setFirstStep( final Step step ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_firstStep = step; - } finally { - lock.unlock(); - } + }); } /** @@ -560,12 +541,9 @@ public final void setFirstStep( final Step step ) */ public final void setId( final int id ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { this.m_id = id; - } finally { - lock.unlock(); - } + }); } /** @@ -577,8 +555,7 @@ public final void setId( final int id ) * @throws WikiException if the current Step's {@link Step#start()} method throws an exception of any kind */ public final void start( final Context context ) throws WikiException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( m_state == ABORTED ) { throw new IllegalStateException( "Workflow cannot be started; it has already been aborted." ); } @@ -601,9 +578,7 @@ public final void start( final Context context ) throws WikiException { abort( context ); throw e; } - } finally { - lock.unlock(); - } + }); } /** @@ -611,16 +586,13 @@ public final void start( final Context context ) throws WikiException { * IllegalStateException. Once paused, the Workflow can be un-paused by executing the {@link #restart(Context)} method. */ public final void waitstate() { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if ( m_state != RUNNING ) { throw new IllegalStateException( "Workflow is not running; cannot pause." ); } m_state = WAITING; WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.WAITING ); - } finally { - lock.unlock(); - } + }); } /** @@ -636,16 +608,13 @@ protected void cleanup() { * calls the {@link #cleanup()} method to flush retained objects. This method will no-op if it has previously been called. */ protected final void complete() { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { if( !isCompleted() ) { m_state = COMPLETED; WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.COMPLETED ); cleanup(); } - } finally { - lock.unlock(); - } + }); } /** diff --git a/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java b/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java index 5e207e5412..565665bf92 100644 --- a/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java +++ b/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java @@ -78,16 +78,13 @@ public CommentedProperties(final Properties defaultValues ) @Override public void load(final InputStream inStream ) throws IOException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { // Load the file itself into a string m_propertyString = FileUtil.readContents( inStream, StandardCharsets.ISO_8859_1.name() ); // Now load it into the properties object as normal super.load( new ByteArrayInputStream( m_propertyString.getBytes(StandardCharsets.ISO_8859_1) ) ); - } finally { - lock.unlock(); - } + }); } /** @@ -99,15 +96,12 @@ public void load(final InputStream inStream ) throws IOException @Override public void load(final Reader in ) throws IOException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { m_propertyString = FileUtil.readContents( in ); // Now load it into the properties object as normal super.load( new ByteArrayInputStream( m_propertyString.getBytes(StandardCharsets.ISO_8859_1) ) ); - } finally { - lock.unlock(); - } + }); } /** @@ -116,12 +110,7 @@ public void load(final Reader in ) throws IOException @Override public Object setProperty(final String key, final String value ) { - lock.lock(); - try { - return put(key, value); - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> put(key, value)); } /** @@ -130,14 +119,11 @@ public Object setProperty(final String key, final String value ) @Override public void store(final OutputStream out, final String comments ) throws IOException { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { final byte[] bytes = m_propertyString.getBytes( StandardCharsets.ISO_8859_1 ); FileUtil.copyContents( new ByteArrayInputStream( bytes ), out ); out.flush(); - } finally { - lock.unlock(); - } + }); } /** @@ -146,16 +132,13 @@ public void store(final OutputStream out, final String comments ) throws IOExcep @Override public Object put(final Object arg0, final Object arg1 ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { // Write the property to the stored string writeProperty( arg0, arg1 ); // Return the result of from the superclass properties object return super.put(arg0, arg1); - } finally { - lock.unlock(); - } + }); } /** @@ -164,8 +147,7 @@ public Object put(final Object arg0, final Object arg1 ) @Override public void putAll(final Map< ? , ? > arg0 ) { - lock.lock(); - try { + Synchronizer.synchronize(lock, () -> { // Shove all of the entries into the property string for (final Entry value : arg0.entrySet()) { @SuppressWarnings("unchecked") final Entry entry = (Entry) value; @@ -174,9 +156,7 @@ public void putAll(final Map< ? , ? > arg0 ) // Call the superclass method super.putAll(arg0); - } finally { - lock.unlock(); - } + }); } /** @@ -185,16 +165,13 @@ public void putAll(final Map< ? , ? > arg0 ) @Override public Object remove(final Object key ) { - lock.lock(); - try { + return Synchronizer.synchronize(lock, () -> { // Remove from the property string deleteProperty( key ); // Call the superclass method return super.remove(key); - } finally { - lock.unlock(); - } + }); } /** @@ -203,12 +180,7 @@ public Object remove(final Object key ) @Override public String toString() { - lock.lock(); - try { - return m_propertyString; - } finally { - lock.unlock(); - } + return Synchronizer.synchronize(lock, () -> m_propertyString); } diff --git a/jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java b/jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java new file mode 100644 index 0000000000..3e409c9240 --- /dev/null +++ b/jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java @@ -0,0 +1,150 @@ +/* + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + */ + +package org.apache.wiki.util; + +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; + +/** + *

Synchronizer Utility Class

+ * + *

This utility class is designed to provide a simplified interface for + * executing code blocks in a synchronized manner using {@link ReentrantLock}. + * It aims to improve code readability and maintainability by abstracting + * common locking idioms.

+ * + *

Usage Example:

+ *
+ * {@code
+ * ReentrantLock lock = new ReentrantLock();
+ * String result = Synchronizer.synchronize(lock, () -> {
+ *     // Your synchronized code here
+ *     return "some result";
+ * });
+ * }
+ * 
+ * + * @since 2.12.2 + */ +public class Synchronizer { + + /** + * Executes a given {@link Supplier} within a synchronized block managed by + * a {@link ReentrantLock}. + * + *

This method acquires the lock, executes the supplier's code, and then + * releases the lock. It is designed to replace the traditional lock idiom:

+ * + *
+     * @code
+     * lock.lock();
+     * try {
+     *     doSomething();
+     * } finally {
+     *     lock.unlock();
+     * }
+     *
+     * 
+ * + *

Parameters:

+ * + * + *

Returns:

+ *

The result produced by the supplier.

+ * + *

Throws:

+ *

This method propagates any exceptions thrown by the supplier's code.

+ * + * @param The type of result produced by the supplier. + * @param lock The ReentrantLock to be used for synchronization. + * @param supplier The supplier to be executed within the synchronized block. + * @return The result produced by the supplier. + */ + public static T synchronize(final ReentrantLock lock, final Supplier supplier) { + lock.lock(); + try { + return supplier.get(); + } finally { + lock.unlock(); + } + } + + /** + *

Functional interface for runnable tasks that can throw exceptions.

+ * + * @param the type of exception that may be thrown + */ + @FunctionalInterface + public interface ThrowingRunnable { + /** + * Executes the operation. + * + * @throws E if an exception occurs during the operation + */ + void run() throws E; + } + + /** + *

Throws the given exception as an unchecked exception.

+ * + * @param the type of exception to throw + * @param exception the exception to throw + * @throws E the thrown exception + */ + @SuppressWarnings("unchecked") + private static void throwAsUnchecked(final Exception exception) throws E { + throw (E) exception; + } + + /** + *

Executes a given {@link ThrowingRunnable} within a synchronized block managed by + * a {@link ReentrantLock}.

+ * + *

Parameters:

+ *
    + *
  • {@code lock} - The ReentrantLock to be used for synchronization.
  • + *
  • {@code throwingRunnable} - The ThrowingRunnable whose code needs to be executed within + * the synchronized block.
  • + *
+ * + *

Throws:

+ *

This method propagates any exceptions thrown by the ThrowingRunnable's code.

+ * + * @param the type of exception that may be thrown + * @param lock the ReentrantLock to use for synchronization + * @param throwingRunnable the ThrowingRunnable to execute + */ + public static void synchronize(final ReentrantLock lock, final ThrowingRunnable throwingRunnable) { + lock.lock(); + try { + throwingRunnable.run(); + } catch (final Exception e) { + throwAsUnchecked(e); + } finally { + lock.unlock(); + } + } + +} + diff --git a/pom.xml b/pom.xml index 485a5debf1..7f21b34994 100644 --- a/pom.xml +++ b/pom.xml @@ -458,6 +458,13 @@ javax.servlet-api ${javax-servlet-api.version} + + + org.apache.jspwiki + jspwiki-util + ${project.version} + + From 1fa8bd70c1590372d8138c5846ea2cec8738d12d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:06:30 +0200 Subject: [PATCH 03/10] Move ReentrantLock instantiation to class level variables + some format edits --- .../search/kendra/KendraSearchProvider.java | 47 ++++++++--------- .../auth/DefaultAuthenticationManager.java | 7 +-- .../auth/DefaultAuthorizationManager.java | 9 +--- .../apache/wiki/auth/DefaultUserManager.java | 13 ++--- .../org/apache/wiki/auth/SessionMonitor.java | 37 +++++--------- .../apache/wiki/auth/acl/AclEntryImpl.java | 10 +--- .../org/apache/wiki/auth/acl/AclImpl.java | 9 +--- .../org/apache/wiki/auth/authorize/Group.java | 4 +- .../wiki/auth/user/XMLUserDatabase.java | 50 ++++++++----------- .../wiki/diff/ContextualDiffProvider.java | 22 ++------ .../apache/wiki/filters/PageEventFilter.java | 6 +-- .../org/apache/wiki/filters/SpamFilter.java | 11 ++-- .../apache/wiki/plugin/BugReportHandler.java | 6 +-- .../apache/wiki/plugin/PageViewPlugin.java | 28 +++++------ .../providers/CachingAttachmentProvider.java | 6 +-- .../wiki/providers/CachingProvider.java | 8 +-- .../providers/VersioningFileProvider.java | 11 ++-- .../references/DefaultReferenceManager.java | 11 +--- .../apache/wiki/rss/DefaultRSSGenerator.java | 4 +- .../ui/admin/beans/SearchManagerBean.java | 3 +- .../org/apache/wiki/workflow/Workflow.java | 13 +++-- .../apache/wiki/util/CommentedProperties.java | 13 ++--- .../org/apache/wiki/util/Synchronizer.java | 10 ++-- 23 files changed, 112 insertions(+), 226 deletions(-) diff --git a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java index bb4c643f60..334988f779 100644 --- a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java +++ b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java @@ -97,11 +97,7 @@ public class KendraSearchProvider implements SearchProvider { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public KendraSearchProvider() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * {@inheritDoc} @@ -121,7 +117,7 @@ public void initialize( final Engine engine, final Properties properties ) throw // Start the Kendra update thread, which waits first for a little while // before starting to go through the "pages that need updating". - if ( initialDelay >= 0 ) { + if( initialDelay >= 0 ) { final KendraUpdater updater = new KendraUpdater( engine, this, initialDelay, indexDelay ); updater.start(); } @@ -129,14 +125,14 @@ public void initialize( final Engine engine, final Properties properties ) throw private Map< String, Object > getContentTypes() { final Gson gson = new GsonBuilder().create(); - try ( final InputStream in = KendraSearchProvider.class.getResourceAsStream( "content_types.json" ) ) { + try( final InputStream in = KendraSearchProvider.class.getResourceAsStream( "content_types.json" ) ) { if ( in != null ) { final Type collectionType = new TypeToken< HashMap< String, Object > >() { }.getType(); return gson.fromJson( new InputStreamReader( in ), collectionType ); } - } catch ( final IOException e ) { - LOG.error( format( "Unable to load default propertyfile 'content_types.json': %s", e.getMessage() ), e ); + } catch( final IOException e ) { + LOG.error( "Unable to load default propertyfile 'content_types.json': {}", e.getMessage(), e ); } return null; } @@ -155,13 +151,12 @@ public String getProviderInfo() { @Override public void pageRemoved( final Page page ) { final String pageName = page.getName(); - final BatchDeleteDocumentRequest request = new BatchDeleteDocumentRequest().withIndexId( indexId ) - .withDocumentIdList( pageName ); + final BatchDeleteDocumentRequest request = new BatchDeleteDocumentRequest().withIndexId( indexId ).withDocumentIdList( pageName ); final BatchDeleteDocumentResult result = getKendra().batchDeleteDocument( request ); - if ( result.getFailedDocuments().size() == 0 ) { - LOG.debug( format( "Page '%s' was removed from index", pageName ) ); + if( result.getFailedDocuments().isEmpty() ) { + LOG.debug( "Page '{}' was removed from index", pageName ); } else { - LOG.error( format( "Failed to remove Page '%s' from index", pageName ) ); + LOG.error( "Failed to remove Page '{}' from index", pageName ); } } @@ -170,9 +165,9 @@ public void pageRemoved( final Page page ) { */ @Override public void reindexPage( final Page page ) { - if ( page != null ) { + if( page != null ) { updates.add( page ); - LOG.debug( format( "Scheduling page '%s' for indexing ...", page.getName() ) ); + LOG.debug( "Scheduling page '{}' for indexing ...", page.getName() ); } } @@ -206,16 +201,15 @@ public Collection< SearchResult > findPages( final String query, final Context w new String[]{ documentExcerpt } ); searchResults.add( searchResult ); } else { - LOG.error( format( "Page '%s' is not accessible", documentId ) ); + LOG.error( "Page '{}' is not accessible", documentId ); } } else { - LOG.error( - format( "Kendra found a result page '%s' that could not be loaded, removing from index", documentId ) ); + LOG.error( "Kendra found a result page '{}' that could not be loaded, removing from index", documentId ); pageRemoved( Wiki.contents().page( this.engine, documentId ) ); } break; default: - LOG.error( format( "Unknown query result type: %s", item.getType() ) ); + LOG.error( "Unknown query result type: {}", item.getType() ); } } return searchResults; @@ -326,7 +320,7 @@ private void doFullReindex() throws IOException { if ( pages.isEmpty() ) { return; } - LOG.debug( format( "Indexing all %d pages. Please wait ...", pages.size() ) ); + LOG.debug( "Indexing all {} pages. Please wait ...", pages.size() ); final String executionId = startExecution(); for ( final Page page : pages ) { // Since I do not want to handle the size limit @@ -395,18 +389,17 @@ private void indexOnePage( final Page page, final String executionId ) { final String pageName = page.getName(); try { final Document document = newDocument( page, executionId ); - final BatchPutDocumentRequest request = new BatchPutDocumentRequest().withIndexId( indexId ) - .withDocuments( document ); + final BatchPutDocumentRequest request = new BatchPutDocumentRequest().withIndexId( indexId ).withDocuments( document ); final BatchPutDocumentResult result = getKendra().batchPutDocument( request ); - if ( result.getFailedDocuments().size() == 0 ) { - LOG.info( format( "Successfully indexed Page '%s' as %s", page.getName(), document.getContentType() ) ); + if ( result.getFailedDocuments().isEmpty() ) { + LOG.info( "Successfully indexed Page '{}' as {}", page.getName(), document.getContentType() ); } else { for ( final BatchPutDocumentResponseFailedDocument failedDocument : result.getFailedDocuments() ) { - LOG.error( format( "Failed to index Page '%s': %s", failedDocument.getId(), failedDocument.getErrorMessage() ) ); + LOG.error( "Failed to index Page '{}': {}", failedDocument.getId(), failedDocument.getErrorMessage() ); } } } catch ( final IOException e ) { - LOG.error( format( "Failed to index Page '%s': %s", pageName, e.getMessage() ) ); + LOG.error( "Failed to index Page '{}': {}", pageName, e.getMessage() ); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java index 4fe3392def..f393e8b35a 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java @@ -113,12 +113,7 @@ public class DefaultAuthenticationManager implements AuthenticationManager { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public DefaultAuthenticationManager () { - lock = new ReentrantLock(); - } - + private final ReentrantLock lock = new ReentrantLock(); /** * {@inheritDoc} diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java index 79a72ce8a0..ca19d678a8 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java @@ -98,14 +98,7 @@ public class DefaultAuthorizationManager implements AuthorizationManager { * * @see java.util.concurrent.locks.ReentrantLock */ - private ReentrantLock lock; - - /** - * Constructs a new DefaultAuthorizationManager instance. - */ - public DefaultAuthorizationManager() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** {@inheritDoc} */ @Override diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java index 74c8ded51c..4c1808563f 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java @@ -106,12 +106,7 @@ public class DefaultUserManager implements UserManager { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public DefaultUserManager() { - lock = new ReentrantLock(); - } - + private final ReentrantLock lock = new ReentrantLock(); /** {@inheritDoc} */ @Override @@ -423,7 +418,8 @@ public Principal[] listWikiNames() throws WikiSecurityException { * This is a convenience method. * @param listener the event listener */ - @Override public void addWikiEventListener( final WikiEventListener listener ) { + @Override + public void addWikiEventListener( final WikiEventListener listener ) { Synchronizer.synchronize(lock, () -> { WikiEventManager.addWikiEventListener( this, listener ); }); @@ -434,7 +430,8 @@ public Principal[] listWikiNames() throws WikiSecurityException { * This is a convenience method. * @param listener the event listener */ - @Override public void removeWikiEventListener( final WikiEventListener listener ) { + @Override + public void removeWikiEventListener( final WikiEventListener listener ) { Synchronizer.synchronize(lock, () -> { WikiEventManager.removeWikiEventListener( this, listener ); }); diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java index 0b5c45709a..33aef281bd 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java @@ -71,11 +71,8 @@ public class SessionMonitor implements HttpSessionListener { * * @see ReentrantLock */ - private final ReentrantLock createGuestSessionForLock; - private final ReentrantLock removeLock; - private final ReentrantLock userPrincipalsLock; - private final ReentrantLock addWikiEventListenerLock; - private final ReentrantLock removeWikiEventListenerLock; + private final ReentrantLock mSessionsLock = new ReentrantLock(); + private final ReentrantLock listenerLock = new ReentrantLock(); /** @@ -98,18 +95,8 @@ public static SessionMonitor getInstance( final Engine engine ) { } /** Construct the SessionListener */ - public SessionMonitor() { - createGuestSessionForLock = new ReentrantLock(); - removeLock = new ReentrantLock(); - userPrincipalsLock = new ReentrantLock(); - addWikiEventListenerLock = new ReentrantLock(); - removeWikiEventListenerLock = new ReentrantLock(); - } - private SessionMonitor( final Engine engine ) { - this(); m_engine = engine; - } /** @@ -195,9 +182,9 @@ public final Session find( final String sessionId ) { private Session createGuestSessionFor( final String sessionId ) { LOG.debug( "Session for session ID={}... not found. Creating guestSession()", sessionId ); final Session wikiSession = Wiki.session().guest( m_engine ); - Synchronizer.synchronize(createGuestSessionForLock, () -> { + Synchronizer.synchronize( mSessionsLock, () -> { m_sessions.put(sessionId, wikiSession); - }); + } ); return wikiSession; } @@ -222,9 +209,9 @@ public final void remove( final HttpSession session ) { if( session == null ) { throw new IllegalArgumentException( "Session cannot be null." ); } - Synchronizer.synchronize(removeLock, () -> { + Synchronizer.synchronize( mSessionsLock, () -> { m_sessions.remove( session.getId() ); - }); + } ); } /** @@ -246,11 +233,11 @@ public final int sessions() * @return the array of user principals */ public final Principal[] userPrincipals() { - final Collection principals = Synchronizer.synchronize(userPrincipalsLock, () -> - m_sessions.values().stream().map(Session::getUserPrincipal).collect(Collectors.toList())); + final Collection< Principal > principals = Synchronizer.synchronize( mSessionsLock, () -> + m_sessions.values().stream().map( Session::getUserPrincipal ).collect( Collectors.toList() ) ); - final Principal[] p = principals.toArray(new Principal[0]); - Arrays.sort(p, m_comparator); + final Principal[] p = principals.toArray( new Principal[ 0 ] ); + Arrays.sort( p, m_comparator ); return p; } @@ -261,7 +248,7 @@ public final Principal[] userPrincipals() { * @since 2.4.75 */ public final void addWikiEventListener( final WikiEventListener listener ) { - Synchronizer.synchronize(addWikiEventListenerLock, () -> { + Synchronizer.synchronize( listenerLock, () -> { WikiEventManager.addWikiEventListener( this, listener ); }); } @@ -273,7 +260,7 @@ public final void addWikiEventListener( final WikiEventListener listener ) { * @since 2.4.75 */ public final void removeWikiEventListener(final WikiEventListener listener) { - Synchronizer.synchronize(removeWikiEventListenerLock, () -> { + Synchronizer.synchronize( listenerLock, () -> { WikiEventManager.removeWikiEventListener(this, listener); }); } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java index 3a2d6d6d0b..2c570267da 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java @@ -19,7 +19,6 @@ Licensed to the Apache Software Foundation (ASF) under one package org.apache.wiki.auth.acl; import org.apache.wiki.auth.permissions.PagePermission; -import org.apache.wiki.event.WikiEventManager; import org.apache.wiki.util.Synchronizer; import java.io.Serializable; @@ -50,14 +49,7 @@ public class AclEntryImpl implements AclEntry, Serializable { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - /** - * Constructs a new AclEntryImpl instance. - */ - public AclEntryImpl() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * Adds the specified permission to this ACL entry. The permission must be of type diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java index 54af42c608..e974535b2a 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java @@ -46,18 +46,11 @@ public class AclImpl implements Acl, Serializable { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock lock = new ReentrantLock(); private static final long serialVersionUID = 1L; private final Vector< AclEntry > m_entries = new Vector<>(); - /** - * Constructs a new AclImpl instance. - */ - public AclImpl() { - lock = new ReentrantLock(); - } - /** {@inheritDoc} */ @Override public Principal[] findPrincipals( final Permission permission ) { diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java index 327af4841b..05c172be49 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java @@ -19,7 +19,6 @@ Licensed to the Apache Software Foundation (ASF) under one package org.apache.wiki.auth.authorize; import org.apache.wiki.auth.GroupPrincipal; -import org.apache.wiki.event.WikiEventManager; import org.apache.wiki.util.Synchronizer; import java.security.Principal; @@ -89,7 +88,7 @@ public class Group { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock lock = new ReentrantLock(); /** * Protected constructor to prevent direct instantiation except by other @@ -104,7 +103,6 @@ protected Group( final String name, final String wiki ) { m_name = name; m_wiki = wiki; m_principal = new GroupPrincipal( name ); - lock = new ReentrantLock(); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java index b0cc11182f..0eebbbeeb3 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java @@ -103,46 +103,38 @@ public class XMLUserDatabase extends AbstractUserDatabase { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public XMLUserDatabase() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** {@inheritDoc} */ - public void deleteByLoginName(final String loginName) throws WikiSecurityException { - final AtomicBoolean userDeleted = new AtomicBoolean(false); - final AtomicReference exceptionRef = new AtomicReference<>(); - - Synchronizer.synchronize(lock, () -> { - try { - if (c_dom == null) { - throw new WikiSecurityException("FATAL: database does not exist"); + public void deleteByLoginName( final String loginName ) throws WikiSecurityException { + final AtomicBoolean userDeleted = new AtomicBoolean( false ); + final AtomicReference< WikiSecurityException > exceptionRef = new AtomicReference<>(); + Synchronizer.synchronize( lock, () -> { + try{ + if( c_dom == null ) { + throw new WikiSecurityException( "FATAL: database does not exist" ); } - - final NodeList users = c_dom.getDocumentElement().getElementsByTagName(USER_TAG); - for (int i = 0; i < users.getLength(); i++) { - final Element user = (Element) users.item(i); - if (user.getAttribute(LOGIN_NAME).equals(loginName)) { - c_dom.getDocumentElement().removeChild(user); + final NodeList users = c_dom.getDocumentElement().getElementsByTagName( USER_TAG ); + for( int i = 0; i < users.getLength(); i++ ) { + final Element user = ( Element ) users.item( i ); + if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) { + c_dom.getDocumentElement().removeChild( user ); // Commit to disk saveDOM(); - userDeleted.set(true); + userDeleted.set( true ); return; } } - } catch (WikiSecurityException e) { - exceptionRef.set(e); + } catch( WikiSecurityException e ) { + exceptionRef.set( e ); } - }); - - if (exceptionRef.get() != null) { + } ); + if( exceptionRef.get() != null ) { throw exceptionRef.get(); } - - if (!userDeleted.get()) { - throw new NoSuchPrincipalException("Not in database: " + loginName); + if( !userDeleted.get() ) { + throw new NoSuchPrincipalException( "Not in database: " + loginName ); } } @@ -446,7 +438,7 @@ public void save( final UserProfile profile ) throws WikiSecurityException { // Hash and save the new password if it's different from old one final String newPassword = profile.getPassword(); - if( newPassword != null && !newPassword.equals( "" ) ) { + if( newPassword != null && !newPassword.isEmpty() ) { final String oldPassword = user.getAttribute( PASSWORD ); if( !oldPassword.equals( newPassword ) ) { setAttribute( user, PASSWORD, getHash( newPassword ) ); diff --git a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java index 0d70680e5e..c79c94e6bd 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java @@ -48,9 +48,7 @@ Licensed to the Apache Software Foundation (ASF) under one /** - * A seriously better diff provider, which highlights changes word-by-word using CSS. - * - * Suggested by John Volkar. + * A seriously better diff provider, which highlights changes word-by-word using CSS. Suggested by John Volkar. */ public class ContextualDiffProvider implements DiffProvider { @@ -102,16 +100,7 @@ public class ContextualDiffProvider implements DiffProvider { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - - /** - * Constructs this provider. - */ - public ContextualDiffProvider() - { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * @see org.apache.wiki.api.providers.WikiProvider#getProviderInfo() @@ -188,9 +177,8 @@ public String makeDiffHtml( final Context ctx, final String wikiOld, final Strin * Take the string and create an array from it, split it first on newlines, making * sure to preserve the newlines in the elements, split each resulting element on * spaces, preserving the spaces. - * - * All this preseving of newlines and spaces is so the wikitext when diffed will have fidelity - * to it's original form. As a side affect we see edits of purely whilespace. + * All this preserving of newlines and spaces is so the wikitext when diffed will have fidelity + * to its original form. As a side effect we see edits of purely whitespace. */ private String[] sequence( final String wikiText ) { final String[] linesArray = Diff.stringToArray( wikiText ); @@ -199,7 +187,7 @@ private String[] sequence( final String wikiText ) { String lastToken = null; String token; - // StringTokenizer might be discouraged but it still is perfect here... + // StringTokenizer might be discouraged, but it is still perfect here... for( final StringTokenizer st = new StringTokenizer( line, " ", true ); st.hasMoreTokens(); ) { token = st.nextToken(); diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java index bdbed514e6..5ffe768bcd 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java @@ -60,11 +60,7 @@ public class PageEventFilter extends BasePageFilter { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public PageEventFilter() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * Called whenever a new PageFilter is instantiated and reset. diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java index 4c51af8a44..434834ca52 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java @@ -237,9 +237,9 @@ public class SpamFilter extends BasePageFilter { private static String c_hashName; private static long c_lastUpdate; - /** The HASH_DELAY value is a maximum amount of time that an user can keep + /** The HASH_DELAY value is a maximum amount of time that a user can keep * a session open, because after the value has expired, we will invent a new - * hash field name. By default this is {@value} hours, which should be ample + * hash field name. By default, this is {@value} hours, which should be ample * time for someone. */ private static final long HASH_DELAY = 24; @@ -252,12 +252,7 @@ public class SpamFilter extends BasePageFilter { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public SpamFilter() { - lock = new ReentrantLock(); - } - + private final ReentrantLock lock = new ReentrantLock(); /** * {@inheritDoc} diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java index 090917ae38..03f9c8b59b 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java @@ -83,11 +83,7 @@ public class BugReportHandler implements Plugin { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public BugReportHandler() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * {@inheritDoc} diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java index 39d8095c18..dad216e510 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java @@ -142,11 +142,7 @@ public class PageViewPlugin extends AbstractReferralPlugin implements Plugin, In * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public PageViewPlugin() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * Initialize the PageViewPlugin and its singleton. @@ -156,12 +152,12 @@ public PageViewPlugin() { @Override public void initialize( final Engine engine ) { LOG.info( "initializing PageViewPlugin" ); - Synchronizer.synchronize(lock, () -> { + Synchronizer.synchronize( lock, () -> { if( c_singleton == null ) { c_singleton = new PageViewManager(); } c_singleton.initialize( engine ); - }); + } ); } /** @@ -222,7 +218,7 @@ public final class PageViewManager implements WikiEventListener { * @param engine The wiki engine. */ public void initialize( final Engine engine ) { - Synchronizer.synchronize(lock, () -> { + Synchronizer.synchronize( lock, () -> { LOG.info( "initializing PageView Manager" ); m_workDir = engine.getWorkDir(); engine.addWikiEventListener( this ); @@ -241,7 +237,7 @@ public void initialize( final Engine engine ) { } m_initialized = true; - }); + } ); } /** @@ -366,7 +362,7 @@ public String execute( final Context context, final Map< String, String > params } } - Synchronizer.synchronize(lock, () -> { + Synchronizer.synchronize( lock, () -> { try { Counter counter = m_counters.get( pagename ); @@ -477,7 +473,7 @@ public String execute( final Context context, final Map< String, String > params pluginExceptionRef.set(new PluginException(e.getMessage())); } } - }); + } ); } if (pluginExceptionRef.get() != null) { @@ -545,7 +541,7 @@ int getCount( final Object key ) private void loadCounters() { if( m_counters != null && m_storage != null ) { LOG.info( "Loading counters." ); - Synchronizer.synchronize(lock, () -> { + Synchronizer.synchronize( lock, () -> { try( final InputStream fis = Files.newInputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) { m_storage.load( fis ); } catch( final IOException ioe ) { @@ -558,7 +554,7 @@ private void loadCounters() { } LOG.info( "Loaded " + m_counters.size() + " counter values." ); - }); + } ); } } @@ -568,7 +564,7 @@ private void loadCounters() { void storeCounters() { if( m_counters != null && m_storage != null && m_dirty ) { LOG.info( "Storing " + m_counters.size() + " counter values." ); - Synchronizer.synchronize(lock, () -> { + Synchronizer.synchronize( lock, () -> { // Write out the collection of counters try( final OutputStream fos = Files.newOutputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) { m_storage.store( fos, "\n# The number of times each page has been viewed.\n# Do not modify.\n" ); @@ -578,7 +574,7 @@ void storeCounters() { } catch( final IOException ioe ) { LOG.error( "Couldn't store counters values: " + ioe.getMessage() ); } - }); + } ); } } @@ -590,7 +586,7 @@ void storeCounters() { */ private boolean isRunning( final Thread thrd ) { - return Synchronizer.synchronize(lock, () -> m_initialized && thrd == m_pageCountSaveThread); + return Synchronizer.synchronize( lock, () -> m_initialized && thrd == m_pageCountSaveThread ); } } diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java index 9b94ce2495..cd45469dc8 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java @@ -72,11 +72,7 @@ public class CachingAttachmentProvider implements AttachmentProvider { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public CachingAttachmentProvider() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * {@inheritDoc} diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java index bc66c43aa8..3aef37e446 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java @@ -80,18 +80,14 @@ public class CachingProvider implements PageProvider { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public CachingProvider() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * {@inheritDoc} */ @Override public void initialize( final Engine engine, final Properties properties ) throws NoRequiredPropertyException, IOException { - LOG.debug( "Initing CachingProvider" ); + LOG.debug( "Initializing CachingProvider" ); // engine is used for getting the search engine this.engine = engine; diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java index 9f814df565..e925e0c5f2 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java @@ -43,7 +43,6 @@ Licensed to the Apache Software Foundation (ASF) under one import java.util.Date; import java.util.List; import java.util.Properties; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; @@ -64,7 +63,7 @@ Licensed to the Apache Software Foundation (ASF) under one * * In this case, "Main" has three versions, and "Foobar" just one version. *

- * The properties file contains the necessary metainformation (such as author) + * The properties file contains the necessary meta information (such as author) * information of the page. DO NOT MESS WITH IT! * *

@@ -93,11 +92,7 @@ public class VersioningFileProvider extends AbstractFileProvider { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public VersioningFileProvider() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** * {@inheritDoc} @@ -105,7 +100,7 @@ public VersioningFileProvider() { @Override public void initialize( final Engine engine, final Properties properties ) throws NoRequiredPropertyException, IOException { super.initialize( engine, properties ); - // some additional sanity checks : + // some additional sanity checks: final File oldpages = new File( getPageDirectory(), PAGEDIR ); if( !oldpages.exists() ) { if( !oldpages.mkdirs() ) { diff --git a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java index bb6a8e7cc8..84c582dd35 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java @@ -144,7 +144,7 @@ public class DefaultReferenceManager extends BasePageFilter implements Reference * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock lock = new ReentrantLock(); /** * Builds a new ReferenceManager. @@ -157,12 +157,9 @@ public DefaultReferenceManager( final Engine engine ) { m_engine = engine; m_matchEnglishPlurals = TextUtil.getBooleanProperty( engine.getWikiProperties(), Engine.PROP_MATCHPLURALS, false ); - // // Create two maps that contain unmutable versions of the two basic maps. - // m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy ); m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo ); - lock = new ReentrantLock(); } /** @@ -610,13 +607,10 @@ protected Map< String, Set< String > > getReferredBy() { /** * Cleans the 'referred by' list, removing references by 'referrer' to any other page. Called after 'referrer' is removed. - * * Two ways to go about this. One is to look up all pages previously referred by referrer and remove referrer * from their lists, and let the update put them back in (except possibly removed ones). - * * The other is to get the old referred-to list, compare to the new, and tell the ones missing in the latter to remove referrer from * their list. - * * We'll just try the first for now. Need to come back and optimize this a bit. */ private void cleanReferredBy( final String referrer, @@ -849,10 +843,9 @@ public Collection< String > findRefersTo( final String pageName ) { * This 'deepHashCode' can be used to determine if there were any modifications made to the underlying to and by maps of the * ReferenceManager. The maps of the ReferenceManager are not synchronized, so someone could add/remove entries in them while the * hashCode is being computed. - * * This method traps and retries if a concurrent modification occurs. * - * @return Sum of the hashCodes for the to and by maps of the ReferenceManager + * @return Sum of the hashCodes for the 'to' and 'by' maps of the ReferenceManager * @since 2.3.24 */ // diff --git a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java index 34834ea69e..2f1d65c7ec 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java @@ -49,7 +49,6 @@ Licensed to the Apache Software Foundation (ASF) under one /** * Default implementation for {@link RSSGenerator}. - * * {@inheritDoc} */ // FIXME: Limit diff and page content size. @@ -74,7 +73,7 @@ public class DefaultRSSGenerator implements RSSGenerator { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock lock = new ReentrantLock(); /** * Builds the RSS generator for a given Engine. @@ -87,7 +86,6 @@ public DefaultRSSGenerator( final Engine engine, final Properties properties ) { m_channelDescription = properties.getProperty( PROP_CHANNEL_DESCRIPTION, m_channelDescription ); m_channelLanguage = properties.getProperty( PROP_CHANNEL_LANGUAGE, m_channelLanguage ); m_rssFile = TextUtil.getStringProperty( properties, DefaultRSSGenerator.PROP_RSSFILE, "rss.rdf" ); - this.lock = new ReentrantLock(); } /** diff --git a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java index aa0b803f68..59f1186d8b 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java @@ -56,12 +56,11 @@ public class SearchManagerBean extends SimpleAdminBean { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock lock = new ReentrantLock(); public SearchManagerBean( final Engine engine ) throws NotCompliantMBeanException { super(); initialize( engine ); - this.lock = new ReentrantLock(); } @Override diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java index daa627fb09..0d5739b283 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java @@ -24,11 +24,14 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.event.WorkflowEvent; import org.apache.wiki.util.Synchronizer; -import java.io.IOException; -import java.io.ObjectInputStream; import java.io.Serializable; import java.security.Principal; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; @@ -217,7 +220,7 @@ public class Workflow implements Serializable { * * @see java.util.concurrent.locks.ReentrantLock */ - private ReentrantLock lock = new ReentrantLock(); + private final ReentrantLock lock = new ReentrantLock(); /** * Constructs a new Workflow object with a supplied message key, owner Principal, and undefined unique identifier {@link #ID_NOT_SET}. @@ -429,7 +432,7 @@ public final Date getStartTime() * Returns a Step history for this Workflow as a List, chronologically, from the first Step to the currently executing one. The first * step is the first item in the array. If the Workflow has not started, this method returns a zero-length array. * - * @return an array of Steps representing those that have executed, or are currently executing + * @return a List of Steps representing those that have executed, or are currently executing. */ public final List< Step > getHistory() { diff --git a/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java b/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java index 565665bf92..4d01387977 100644 --- a/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java +++ b/jspwiki-util/src/main/java/org/apache/wiki/util/CommentedProperties.java @@ -35,8 +35,8 @@ Licensed to the Apache Software Foundation (ASF) under one * comments present in the file are preserved. * @since 2.4.22 */ -public class CommentedProperties extends Properties -{ +public class CommentedProperties extends Properties { + private static final long serialVersionUID = 8057284636436329669L; private String m_propertyString; @@ -49,7 +49,7 @@ public class CommentedProperties extends Properties * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; + private final ReentrantLock lock = new ReentrantLock(); /** * @see java.util.Properties#Properties() @@ -57,19 +57,16 @@ public class CommentedProperties extends Properties public CommentedProperties() { super(); - lock = new ReentrantLock(); } /** * Creates new properties. * - * @param defaultValues A list of default values, which are used if in subsequent gets - * a key is not found. + * @param defaultValues A list of default values, which are used if in subsequent gets a key is not found. */ - public CommentedProperties(final Properties defaultValues ) + public CommentedProperties( final Properties defaultValues ) { super( defaultValues ); - lock = new ReentrantLock(); } /** diff --git a/jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java b/jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java index 3e409c9240..9f5d57ce87 100644 --- a/jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java +++ b/jspwiki-util/src/main/java/org/apache/wiki/util/Synchronizer.java @@ -16,7 +16,6 @@ Licensed to the Apache Software Foundation (ASF) under one specific language governing permissions and limitations under the License. */ - package org.apache.wiki.util; import java.util.concurrent.locks.ReentrantLock; @@ -60,7 +59,6 @@ public class Synchronizer { * } finally { * lock.unlock(); * } - * * * *

Parameters:

@@ -113,7 +111,7 @@ public interface ThrowingRunnable { * @throws E the thrown exception */ @SuppressWarnings("unchecked") - private static void throwAsUnchecked(final Exception exception) throws E { + private static < E extends Throwable > void throwAsUnchecked( final Exception exception ) throws E { throw (E) exception; } @@ -135,12 +133,12 @@ private static void throwAsUnchecked(final Exception excep * @param lock the ReentrantLock to use for synchronization * @param throwingRunnable the ThrowingRunnable to execute */ - public static void synchronize(final ReentrantLock lock, final ThrowingRunnable throwingRunnable) { + public static < E extends Exception > void synchronize( final ReentrantLock lock, final ThrowingRunnable throwingRunnable ) { lock.lock(); try { throwingRunnable.run(); - } catch (final Exception e) { - throwAsUnchecked(e); + } catch( final Exception e ) { + throwAsUnchecked( e ); } finally { lock.unlock(); } From fb83d61d5c29b57ebdd847539992956244d4d659 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:07:32 +0200 Subject: [PATCH 04/10] Requested changes on PR and comments on ML --- .../apache/wiki/event/WikiEventManager.java | 165 ++++++++---------- 1 file changed, 72 insertions(+), 93 deletions(-) diff --git a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java index 87b6f05eff..0bdb543eeb 100644 --- a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java +++ b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java @@ -24,16 +24,16 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.wiki.util.Synchronizer; import java.lang.ref.WeakReference; -import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.ConcurrentModificationException; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.Vector; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; @@ -136,7 +136,7 @@ public final class WikiEventManager { private static WikiEventListener c_monitor; /* The Map of client object to WikiEventDelegate. */ - private final Map< Object, WikiEventDelegate > m_delegates = new HashMap<>(); + private final Map< Object, WikiEventDelegate > m_delegates = new ConcurrentHashMap<>(); /* The Vector containing any preloaded WikiEventDelegates. */ private final Vector< WikiEventDelegate > m_preloadCache = new Vector<>(); @@ -144,9 +144,9 @@ public final class WikiEventManager { /* Singleton instance of the WikiEventManager. */ private static WikiEventManager c_instance; - /** - * A lock used to ensure thread safety when accessing shared resources. - * This lock provides more flexibility and capabilities than the intrinsic locking mechanism, + /* + * Locks used to ensure thread safety when accessing shared resources. + * This locks provide more flexibility and capabilities than the intrinsic locking mechanism, * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread * waiting to acquire a lock. * @@ -154,17 +154,6 @@ public final class WikiEventManager { */ private static final ReentrantLock instanceLock = new ReentrantLock(); private static final ReentrantLock removeWikiEventListenerLock = new ReentrantLock(); - private static final ReentrantLock delegatesLockLock = new ReentrantLock(); - private static final ReentrantLock preloadCacheLock = new ReentrantLock(); - private static final ReentrantLock delegateForLock = new ReentrantLock(); - private static final ReentrantLock wikiEventListenersLock = new ReentrantLock(); - private static final ReentrantLock wikiEventListenerLock = new ReentrantLock(); - private static final ReentrantLock removeWikiEventListenerLock2 = new ReentrantLock(); - private static final ReentrantLock isListeningLock = new ReentrantLock(); - private static final ReentrantLock fireEventLock = new ReentrantLock(); - - - /** Constructor for a WikiEventManager. */ private WikiEventManager() { @@ -285,8 +274,8 @@ public static boolean removeWikiEventListener( final WikiEventListener listener } private void removeDelegates() { - Synchronizer.synchronize(delegatesLockLock, m_delegates::clear); - Synchronizer.synchronize(preloadCacheLock, m_preloadCache::clear); + m_delegates.clear(); + m_preloadCache.clear(); } public static void shutdown() { @@ -338,34 +327,32 @@ private Map< Object, WikiEventDelegate > getDelegates() { * @return the WikiEventDelegate. */ private WikiEventDelegate getDelegateFor(final Object client) { - return Synchronizer.synchronize(delegateForLock, () -> { - if (client == null || client instanceof Class) { // then preload the cache - final WikiEventDelegate delegate = new WikiEventDelegate(client); - m_preloadCache.add(delegate); - m_delegates.put(client, delegate); - return delegate; - } else if (!m_preloadCache.isEmpty()) { - // then see if any of the cached delegates match the class of the incoming client - for (int i = m_preloadCache.size() - 1; i >= 0; i--) { // start with most-recently added - final WikiEventDelegate delegate = m_preloadCache.elementAt(i); - if (delegate.getClientClass() == null || delegate.getClientClass().equals(client.getClass())) { - // we have a hit, so use it, but only on a client we haven't seen before - if (!m_delegates.containsKey(client)) { - m_preloadCache.remove(delegate); - m_delegates.put(client, delegate); - return delegate; - } + if (client == null || client instanceof Class) { // then preload the cache + final WikiEventDelegate delegate = new WikiEventDelegate(client); + m_preloadCache.add(delegate); + m_delegates.put(client, delegate); + return delegate; + } else if (!m_preloadCache.isEmpty()) { + // then see if any of the cached delegates match the class of the incoming client + for (int i = m_preloadCache.size() - 1; i >= 0; i--) { // start with most-recently added + final WikiEventDelegate delegate = m_preloadCache.elementAt(i); + if (delegate.getClientClass() == null || delegate.getClientClass().equals(client.getClass())) { + // we have a hit, so use it, but only on a client we haven't seen before + if (!m_delegates.containsKey(client)) { + m_preloadCache.remove(delegate); + m_delegates.put(client, delegate); + return delegate; } } } - // otherwise treat normally... - WikiEventDelegate delegate = m_delegates.get(client); - if (delegate == null) { - delegate = new WikiEventDelegate(client); - m_delegates.put(client, delegate); - } - return delegate; - }); + } + // otherwise treat normally... + WikiEventDelegate delegate = m_delegates.get(client); + if (delegate == null) { + delegate = new WikiEventDelegate(client); + m_delegates.put(client, delegate); + } + return delegate; } @@ -381,7 +368,7 @@ private WikiEventDelegate getDelegateFor(final Object client) { private static final class WikiEventDelegate { /* A list of event listeners for this instance. */ - private final ArrayList< WeakReference< WikiEventListener > > m_listenerList = new ArrayList<>(); + private final CopyOnWriteArrayList< WeakReference< WikiEventListener > > m_listenerList = new CopyOnWriteArrayList<>(); private Class< ? > m_class; /** @@ -409,17 +396,15 @@ private static final class WikiEventDelegate { * @throws java.lang.UnsupportedOperationException if any attempt is made to modify the Set */ public Set< WikiEventListener > getWikiEventListeners() { - return Synchronizer.synchronize(wikiEventListenersLock, () -> { - final TreeSet< WikiEventListener > set = new TreeSet<>( new WikiEventListenerComparator() ); - for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) { - final WikiEventListener l = wikiEventListenerWeakReference.get(); - if( l != null ) { - set.add( l ); - } + final TreeSet< WikiEventListener > set = new TreeSet<>( new WikiEventListenerComparator() ); + for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) { + final WikiEventListener l = wikiEventListenerWeakReference.get(); + if( l != null ) { + set.add( l ); } + } - return Collections.unmodifiableSet( set ); - }); + return Collections.unmodifiableSet( set ); } /** @@ -429,15 +414,13 @@ public Set< WikiEventListener > getWikiEventListeners() { * @return true if the listener was added (i.e., it was not already in the list and was added) */ public boolean addWikiEventListener( final WikiEventListener listener ) { - return Synchronizer.synchronize(wikiEventListenerLock, () -> { - final boolean listenerAlreadyContained = m_listenerList.stream() - .map( WeakReference::get ) - .anyMatch( ref -> ref == listener ); - if( !listenerAlreadyContained ) { - return m_listenerList.add( new WeakReference<>( listener ) ); - } - return false; - }); + final boolean listenerAlreadyContained = m_listenerList.stream() + .map( WeakReference::get ) + .anyMatch( ref -> ref == listener ); + if( !listenerAlreadyContained ) { + return m_listenerList.add( new WeakReference<>( listener ) ); + } + return false; } /** @@ -447,54 +430,50 @@ public boolean addWikiEventListener( final WikiEventListener listener ) { * @return true if the listener was removed (i.e., it was actually in the list and was removed) */ public boolean removeWikiEventListener( final WikiEventListener listener ) { - return Synchronizer.synchronize(removeWikiEventListenerLock2, () -> { - for (final Iterator> i = m_listenerList.iterator(); i.hasNext(); ) { - final WikiEventListener l = i.next().get(); - if (l == listener) { - i.remove(); - return true; - } + for (final Iterator> i = m_listenerList.iterator(); i.hasNext(); ) { + final WikiEventListener l = i.next().get(); + if (l == listener) { + i.remove(); + return true; } - return false; - }); + } + return false; } /** * Returns true if there are one or more listeners registered with this instance. */ public boolean isListening() { - return Synchronizer.synchronize(isListeningLock, () -> !m_listenerList.isEmpty()); + return !m_listenerList.isEmpty(); } /** * Notify all listeners having a registered interest in change events of the supplied WikiEvent. */ - public void fireEvent(final WikiEvent event) { - final AtomicBoolean needsCleanup = new AtomicBoolean(false); + public void fireEvent( final WikiEvent event ) { + boolean needsCleanup = false; try { - Synchronizer.synchronize(fireEventLock, () -> { - for (final WeakReference wikiEventListenerWeakReference : m_listenerList) { - final WikiEventListener listener = wikiEventListenerWeakReference.get(); - if (listener != null) { - listener.actionPerformed(event); - } else { - needsCleanup.set(true); - } + for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) { + final WikiEventListener listener = wikiEventListenerWeakReference.get(); + if( listener != null ) { + listener.actionPerformed( event ); + } else { + needsCleanup = true; } + } - // Remove all such listeners which have expired - if (needsCleanup.get()) { - for (int i = 0; i < m_listenerList.size(); i++) { - final WeakReference w = m_listenerList.get(i); - if (w.get() == null) { - m_listenerList.remove(i--); - } + // Remove all such listeners which have expired + if( needsCleanup ) { + for( int i = 0; i < m_listenerList.size(); i++ ) { + final WeakReference< WikiEventListener > w = m_listenerList.get( i ); + if( w.get() == null ) { + m_listenerList.remove( i-- ); } } - }); - } catch (final ConcurrentModificationException e) { + } + } catch ( final ConcurrentModificationException e ) { // We don't die, we just don't do notifications in that case. - LOG.info("Concurrent modification of event list; please report this.", e); + LOG.info( "Concurrent modification of event list; please report this.", e ); } } } From 5a34ccd72cd018cd466bc9a3a3447df08fa46c14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:08:10 +0200 Subject: [PATCH 05/10] Requested changes --- .../main/java/org/apache/wiki/WatchDog.java | 36 ++++++++----------- 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java index f33fdd6fb4..9c38feba69 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java @@ -68,12 +68,7 @@ public final class WatchDog { * @see java.util.concurrent.locks.ReentrantLock */ private final ReentrantLock watchDogLock = new ReentrantLock(); - private final ReentrantLock enableLock = new ReentrantLock(); - private final ReentrantLock disableLock = new ReentrantLock(); - private final ReentrantLock enterStateLock = new ReentrantLock(); - private final ReentrantLock exitStateLock = new ReentrantLock(); - private final ReentrantLock checkLock = new ReentrantLock(); - private final ReentrantLock toStringLock = new ReentrantLock(); + private final ReentrantLock stateStackLock = new ReentrantLock(); /** * Returns the current watchdog for the current thread. This is the preferred method of getting you a Watchdog, since it @@ -154,26 +149,26 @@ private static void scrub() { * Can be used to enable the WatchDog. Will cause a new Thread to be created, if none was existing previously. */ public void enable() { - Synchronizer.synchronize(enableLock, () -> { + Synchronizer.synchronize( watchDogLock, () -> { if( !m_enabled ) { m_enabled = true; c_watcherThread = new WatchDogThread( m_engine ); c_watcherThread.start(); } - }); + } ); } /** * Is used to disable a WatchDog. The watchdog thread is shut down and resources released. */ public void disable() { - Synchronizer.synchronize(disableLock, () -> { + Synchronizer.synchronize( watchDogLock, () -> { if( m_enabled ) { m_enabled = false; c_watcherThread.shutdown(); c_watcherThread = null; } - }); + } ); } /** @@ -189,14 +184,12 @@ public void enterState( final String state ) { /** * Enters a watched state which has an expected completion time. This is the main method for using the * WatchDog. For example: - * * * WatchDog w = m_engine.getCurrentWatchDog(); * w.enterState("Processing Foobar", 60); * foobar(); * w.exitState(); * - * * If the call to foobar() takes more than 60 seconds, you will receive an ERROR in the log stream. * * @param state A free-form string description of the state @@ -204,10 +197,10 @@ public void enterState( final String state ) { */ public void enterState( final String state, final int expectedCompletionTime ) { LOG.debug( "{}: Entering state {}, expected completion in {} s", m_watchable.getName(), state, expectedCompletionTime ); - Synchronizer.synchronize(enterStateLock, () -> { + Synchronizer.synchronize( stateStackLock, () -> { final State st = new State( state, expectedCompletionTime ); m_stateStack.push( st ); - }); + } ); } /** @@ -226,7 +219,7 @@ public void exitState() { */ public void exitState( final String state ) { if( !m_stateStack.empty() ) { - Synchronizer.synchronize(exitStateLock, () -> { + Synchronizer.synchronize( stateStackLock, () -> { final State st = m_stateStack.peek(); if( state == null || st.getState().equals( state ) ) { m_stateStack.pop(); @@ -236,7 +229,7 @@ public void exitState( final String state ) { // FIXME: should actually go and fix things for that LOG.error( "exitState() called before enterState()" ); } - }); + } ); } else { LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" ); } @@ -262,14 +255,14 @@ public boolean isWatchableAlive() { private void check() { LOG.debug( "Checking watchdog '{}'", m_watchable.getName() ); - Synchronizer.synchronize(checkLock, () -> { + Synchronizer.synchronize( stateStackLock, () -> { if( !m_stateStack.empty() ) { final State st = m_stateStack.peek(); final long now = System.currentTimeMillis(); if( now > st.getExpiryTime() ) { LOG.info( "Watchable '" + m_watchable.getName() + "' exceeded timeout in state '" + st.getState() + - "' by " + (now - st.getExpiryTime()) / 1000 + " seconds" + + "' by " + ( now - st.getExpiryTime() ) / 1_000 + " seconds" + ( LOG.isDebugEnabled() ? "" : "Enable DEBUG-level logging to see stack traces." ) ); dumpStackTraceForWatchable(); @@ -278,7 +271,7 @@ private void check() { } else { LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" ); } - }); + } ); } /** @@ -319,7 +312,7 @@ private void dumpStackTraceForWatchable() { */ @Override public String toString() { - return Synchronizer.synchronize(toStringLock, () -> { + return Synchronizer.synchronize( stateStackLock, () -> { String state = "Idle"; if( !m_stateStack.empty() ) { @@ -327,7 +320,7 @@ public String toString() { state = st.getState(); } return "WatchDog state=" + state; - }); + } ); } /** @@ -353,7 +346,6 @@ public void shutdownTask() { /** * Checks if the watchable is alive, and if it is, checks if the stack is finished. - * * If the watchable has been deleted in the meantime, will simply shut down itself. */ @Override From 88a75eb42dd3dba7b7e2395bebd89bbc949efcc3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:09:10 +0200 Subject: [PATCH 06/10] Synchronize only on delegated getInstance method --- .../src/main/java/org/apache/wiki/WikiEngine.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java index 1c8bd1fdc0..3d9ef8ab22 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java @@ -160,7 +160,7 @@ public class WikiEngine implements Engine { * @throws InternalWikiException in case something fails. This is a RuntimeException, so be prepared for it. */ public static WikiEngine getInstance( final ServletConfig config ) throws InternalWikiException { - return Synchronizer.synchronize(lock, () -> getInstance(config.getServletContext(), null)); + return getInstance( config.getServletContext(), null ); } /** @@ -174,7 +174,7 @@ public static WikiEngine getInstance( final ServletConfig config ) throws Intern * @return One well-behaving WikiEngine instance. */ public static WikiEngine getInstance( final ServletConfig config, final Properties props ) { - return Synchronizer.synchronize(lock, () -> getInstance( config.getServletContext(), props )); + return getInstance( config.getServletContext(), props ); } /** @@ -187,7 +187,7 @@ public static WikiEngine getInstance( final ServletConfig config, final Properti */ public static WikiEngine getInstance(final ServletContext context, Properties props) throws InternalWikiException { final AtomicReference propsRef = new AtomicReference<>(props); - return Synchronizer.synchronize(lock, () -> { + return Synchronizer.synchronize( lock, () -> { WikiEngine engine = (WikiEngine) context.getAttribute(ATTR_WIKIENGINE); if (engine == null) { final String appid = Integer.toString(context.hashCode()); @@ -216,7 +216,7 @@ public static WikiEngine getInstance(final ServletContext context, Properties pr } } return engine; - }); + } ); } /** From 8e128cd6b1197d809f2266d478024141fa1837eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:09:45 +0200 Subject: [PATCH 07/10] requested changes --- .../auth/authorize/DefaultGroupManager.java | 60 ++++++------------- 1 file changed, 17 insertions(+), 43 deletions(-) diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java index 005e1248a3..c635c1c2fc 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java @@ -44,12 +44,12 @@ Licensed to the Apache Software Foundation (ASF) under one import java.security.Principal; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.ReentrantLock; @@ -75,7 +75,7 @@ public class DefaultGroupManager implements GroupManager, Authorizer, WikiEventL private GroupDatabase m_groupDatabase; /** Map with GroupPrincipals as keys, and Groups as values */ - private final Map< Principal, Group > m_groups = new HashMap<>(); + private final Map< Principal, Group > m_groups = new ConcurrentHashMap<>(); /** * A lock used to ensure thread safety when accessing shared resources. @@ -85,23 +85,7 @@ public class DefaultGroupManager implements GroupManager, Authorizer, WikiEventL * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock initializeLock; - private final ReentrantLock removeGroupLock; - private final ReentrantLock setGroupLock; - private final ReentrantLock setGroupLockPut; - private final ReentrantLock setGroupLockCatch; - private final ReentrantLock addWikiEventListenerLock; - private final ReentrantLock removeWikiEventListenerLock; - - public DefaultGroupManager() { - initializeLock = new ReentrantLock(); - removeGroupLock = new ReentrantLock(); - setGroupLock = new ReentrantLock(); - setGroupLockPut = new ReentrantLock(); - setGroupLockCatch = new ReentrantLock(); - addWikiEventListenerLock = new ReentrantLock(); - removeWikiEventListenerLock = new ReentrantLock(); - } + private final ReentrantLock wikiEventListenerLock = new ReentrantLock(); /** {@inheritDoc} */ @Override @@ -180,13 +164,11 @@ public void initialize( final Engine engine, final Properties props ) throws Wik // Load all groups from the database into the cache final Group[] groups = m_groupDatabase.groups(); - Synchronizer.synchronize(initializeLock, () -> { - for( final Group group : groups ) { - // Add new group to cache; fire GROUP_ADD event - m_groups.put( group.getPrincipal(), group ); - fireEvent( WikiSecurityEvent.GROUP_ADD, group ); - } - }); + for( final Group group : groups ) { + // Add new group to cache; fire GROUP_ADD event + m_groups.put( group.getPrincipal(), group ); + fireEvent( WikiSecurityEvent.GROUP_ADD, group ); + } // Make the GroupManager listen for WikiEvents (WikiSecurityEvents for changed user profiles) engine.getManager( UserManager.class ).addWikiEventListener( this ); @@ -281,9 +263,7 @@ public void removeGroup( final String index ) throws WikiSecurityException { // Delete the group // TODO: need rollback procedure - Synchronizer.synchronize(removeGroupLock, () -> { - m_groups.remove( group.getPrincipal() ); - }); + m_groups.remove( group.getPrincipal() ); m_groupDatabase.delete( group ); fireEvent( WikiSecurityEvent.GROUP_REMOVE, group ); } @@ -297,9 +277,7 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu final Group oldGroup = m_groups.get( group.getPrincipal() ); if( oldGroup != null ) { fireEvent( WikiSecurityEvent.GROUP_REMOVE, oldGroup ); - Synchronizer.synchronize(setGroupLock, () -> { - m_groups.remove( oldGroup.getPrincipal() ); - }); + m_groups.remove( oldGroup.getPrincipal() ); } // Copy existing modifier info & timestamps @@ -311,13 +289,11 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu } // Add new group to cache; announce GROUP_ADD event - Synchronizer.synchronize(setGroupLockPut, () -> { - m_groups.put( group.getPrincipal(), group ); - }); + m_groups.put( group.getPrincipal(), group ); fireEvent( WikiSecurityEvent.GROUP_ADD, group ); // Save the group to back-end database; if it fails, roll back to previous state. Note that the back-end - // MUST timestammp the create/modify fields in the Group. + // MUST timestamp the create/modify fields in the Group. try { m_groupDatabase.save( group, session.getUserPrincipal() ); } @@ -328,9 +304,7 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu // Restore previous version, re-throw... fireEvent( WikiSecurityEvent.GROUP_REMOVE, group ); fireEvent( WikiSecurityEvent.GROUP_ADD, oldGroup ); - Synchronizer.synchronize(setGroupLockCatch, () -> { - m_groups.put( oldGroup.getPrincipal(), oldGroup ); - }); + m_groups.put( oldGroup.getPrincipal(), oldGroup ); throw new WikiSecurityException( e.getMessage() + " (rolled back to previous version).", e ); } // Re-throw security exception @@ -396,17 +370,17 @@ protected String[] extractMembers( final String memberLine ) { /** {@inheritDoc} */ @Override public void addWikiEventListener( final WikiEventListener listener ) { - Synchronizer.synchronize(addWikiEventListenerLock, () -> { + Synchronizer.synchronize( wikiEventListenerLock, () -> { WikiEventManager.addWikiEventListener( this, listener ); - }); + } ); } /** {@inheritDoc} */ @Override public void removeWikiEventListener( final WikiEventListener listener ) { - Synchronizer.synchronize(removeWikiEventListenerLock, () -> { + Synchronizer.synchronize( wikiEventListenerLock, () -> { WikiEventManager.removeWikiEventListener( this, listener ); - }); + } ); } /** {@inheritDoc} */ From 540ccd206b3fbb3dd101eab834c067bedda5a676 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:11:03 +0200 Subject: [PATCH 08/10] Move ReentrantLock instantiation to class level variable --- .../java/org/apache/wiki/search/LuceneSearchProvider.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java index 6839f27fe7..1a6a56e536 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java @@ -143,11 +143,7 @@ public class LuceneSearchProvider implements SearchProvider { * * @see java.util.concurrent.locks.ReentrantLock */ - private final ReentrantLock lock; - - public LuceneSearchProvider() { - lock = new ReentrantLock(); - } + private final ReentrantLock lock = new ReentrantLock(); /** {@inheritDoc} */ @Override From 0fd016cbe7146513c836086569ec8b49beaf662a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:12:00 +0200 Subject: [PATCH 09/10] IndexWriter can cause sync issues only if its shared across threads, which is not happening here --- .../wiki/search/LuceneSearchProvider.java | 20 ++++++++----------- 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java index 1a6a56e536..b77a3b3d63 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java @@ -403,9 +403,7 @@ protected Document luceneIndexPage( final Page page, final String text, final In field = new Field( LUCENE_PAGE_KEYWORDS, page.getAttribute( "keywords" ).toString(), TextField.TYPE_STORED ); doc.add( field ); } - Synchronizer.synchronize(lock, () -> { - writer.addDocument( doc ); - }); + writer.addDocument( doc ); return doc; } @@ -415,15 +413,13 @@ protected Document luceneIndexPage( final Page page, final String text, final In */ @Override public void pageRemoved( final Page page ) { - Synchronizer.synchronize(lock, () -> { - try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() ); - final IndexWriter writer = getIndexWriter( luceneDir ) ) { - final Query query = new TermQuery( new Term( LUCENE_ID, page.getName() ) ); - writer.deleteDocuments( query ); - } catch( final Exception e ) { - LOG.error( "Unable to remove page '{}' from Lucene index", page.getName(), e ); - } - }); + try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() ); + final IndexWriter writer = getIndexWriter( luceneDir ) ) { + final Query query = new TermQuery( new Term( LUCENE_ID, page.getName() ) ); + writer.deleteDocuments( query ); + } catch( final Exception e ) { + LOG.error( "Unable to remove page '{}' from Lucene index", page.getName(), e ); + } } IndexWriter getIndexWriter( final Directory luceneDir ) throws IOException, ProviderException { From 15898f089e13c1f27de76953b02dcbdea97e1214 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Pablo=20Santos=20Rodr=C3=ADguez?= Date: Sat, 6 Apr 2024 14:12:46 +0200 Subject: [PATCH 10/10] revert code change on PR to original while condition --- .../main/java/org/apache/wiki/search/LuceneSearchProvider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java index b77a3b3d63..322d64132d 100644 --- a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java +++ b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java @@ -590,7 +590,7 @@ public void startupTask() throws Exception { public void backgroundTask() { m_watchdog.enterState( "Emptying index queue", 60 ); Synchronizer.synchronize(lock, () -> { - while( m_provider.m_updates.isEmpty() ) { + while( !m_provider.m_updates.isEmpty() ) { final Object[] pair = m_provider.m_updates.remove( 0 ); final Page page = ( Page )pair[ 0 ]; final String text = ( String )pair[ 1 ];