From e50fdc7a922dbf1ea4140d8b7f061734cec78f71 Mon Sep 17 00:00:00 2001
From: Arturo Bernal
Date: Mon, 11 Sep 2023 22:58:05 +0200
Subject: [PATCH 01/10] [JSPWIKI-1178] Address potential deadlock with JDK 21
Virtual Threads.
Refactored synchronized blocks/methods containing blocking operations to prevent potential deadlocks introduced by the upcoming Virtual Threads feature in JDK 21.
---
.../apache/wiki/event/WikiEventManager.java | 70 ++++-
.../search/kendra/KendraSearchProvider.java | 17 +-
.../main/java/org/apache/wiki/WatchDog.java | 49 +++-
.../main/java/org/apache/wiki/WikiEngine.java | 105 +++++---
.../auth/DefaultAuthenticationManager.java | 34 ++-
.../auth/DefaultAuthorizationManager.java | 30 ++-
.../apache/wiki/auth/DefaultUserManager.java | 34 ++-
.../org/apache/wiki/auth/SessionMonitor.java | 61 ++++-
.../apache/wiki/auth/acl/AclEntryImpl.java | 73 ++++--
.../org/apache/wiki/auth/acl/AclImpl.java | 44 +++-
.../auth/authorize/DefaultGroupManager.java | 58 ++++-
.../org/apache/wiki/auth/authorize/Group.java | 130 +++++++---
.../CookieAuthenticationLoginModule.java | 47 ++--
.../auth/permissions/PermissionFactory.java | 25 +-
.../wiki/auth/user/XMLUserDatabase.java | 241 ++++++++++--------
.../wiki/diff/ContextualDiffProvider.java | 74 ++++--
.../apache/wiki/filters/PageEventFilter.java | 33 ++-
.../org/apache/wiki/filters/SpamFilter.java | 163 +++++++-----
.../apache/wiki/plugin/BugReportHandler.java | 40 ++-
.../apache/wiki/plugin/PageViewPlugin.java | 118 ++++++---
.../providers/CachingAttachmentProvider.java | 42 ++-
.../wiki/providers/CachingProvider.java | 61 ++++-
.../providers/VersioningFileProvider.java | 168 ++++++------
.../references/DefaultReferenceManager.java | 239 +++++++++--------
.../apache/wiki/rss/DefaultRSSGenerator.java | 30 ++-
.../wiki/search/LuceneSearchProvider.java | 93 +++++--
.../ui/admin/beans/SearchManagerBean.java | 79 +++---
.../apache/wiki/workflow/AbstractStep.java | 87 +++++--
.../org/apache/wiki/workflow/Decision.java | 26 +-
.../apache/wiki/workflow/DecisionQueue.java | 50 +++-
.../wiki/workflow/DefaultWorkflowManager.java | 94 ++++---
.../java/org/apache/wiki/workflow/Task.java | 28 +-
.../org/apache/wiki/workflow/Workflow.java | 210 +++++++++------
jspwiki-main/src/test/resources/wkflmgr.ser | Bin 3770 -> 4030 bytes
.../apache/wiki/util/CommentedProperties.java | 124 ++++++---
35 files changed, 1904 insertions(+), 873 deletions(-)
diff --git a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java
index 635ec6e444..0aedc2659a 100644
--- a/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java
+++ b/jspwiki-event/src/main/java/org/apache/wiki/event/WikiEventManager.java
@@ -33,6 +33,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Set;
import java.util.TreeSet;
import java.util.Vector;
+import java.util.concurrent.locks.ReentrantLock;
/**
* A singleton class that manages the addition and removal of WikiEvent listeners to a event source, as well as the firing of events
@@ -141,6 +142,17 @@ public final class WikiEventManager {
/* Singleton instance of the WikiEventManager. */
private static WikiEventManager c_instance;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private static final ReentrantLock lock = new ReentrantLock();
+
+
/** Constructor for a WikiEventManager. */
private WikiEventManager() {
c_instance = this;
@@ -154,10 +166,15 @@ private WikiEventManager() {
* @return A shared instance of the WikiEventManager
*/
public static WikiEventManager getInstance() {
- if( c_instance == null ) {
- synchronized( WikiEventManager.class ) {
- return new WikiEventManager();
- // start up any post-instantiation services here
+ if (c_instance == null) {
+ lock.lock();
+ try {
+ if (c_instance == null) {
+ c_instance = new WikiEventManager();
+ // start up any post-instantiation services here
+ }
+ } finally {
+ lock.unlock();
}
}
return c_instance;
@@ -242,7 +259,8 @@ public static boolean removeWikiEventListener( final WikiEventListener listener
// get the Map.entry object for the entire Map, then check match on entry (listener)
final WikiEventManager mgr = getInstance();
final Map< Object, WikiEventDelegate > sources = Collections.synchronizedMap( mgr.getDelegates() );
- synchronized( sources ) {
+ lock.lock();
+ try {
// get an iterator over the Map.Enty objects in the map
for( final Map.Entry< Object, WikiEventDelegate > entry : sources.entrySet() ) {
// the entry value is the delegate
@@ -253,16 +271,24 @@ public static boolean removeWikiEventListener( final WikiEventListener listener
removed = true; // was removed
}
}
+ } finally {
+ lock.unlock();
}
return removed;
}
private void removeDelegates() {
- synchronized( m_delegates ) {
+ lock.lock();
+ try {
m_delegates.clear();
+ } finally {
+ lock.unlock();
}
- synchronized( m_preloadCache ) {
+ lock.lock();
+ try {
m_preloadCache.clear();
+ } finally {
+ lock.unlock();
}
}
@@ -315,7 +341,8 @@ private Map< Object, WikiEventDelegate > getDelegates() {
* @return the WikiEventDelegate.
*/
private WikiEventDelegate getDelegateFor( final Object client ) {
- synchronized( m_delegates ) {
+ lock.lock();
+ try {
if( client == null || client instanceof Class ) { // then preload the cache
final WikiEventDelegate delegate = new WikiEventDelegate( client );
m_preloadCache.add( delegate );
@@ -342,6 +369,8 @@ private WikiEventDelegate getDelegateFor( final Object client ) {
m_delegates.put( client, delegate );
}
return delegate;
+ } finally {
+ lock.unlock();
}
}
@@ -386,7 +415,8 @@ private static final class WikiEventDelegate {
* @throws java.lang.UnsupportedOperationException if any attempt is made to modify the Set
*/
public Set< WikiEventListener > getWikiEventListeners() {
- synchronized( m_listenerList ) {
+ lock.lock();
+ try {
final TreeSet< WikiEventListener > set = new TreeSet<>( new WikiEventListenerComparator() );
for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) {
final WikiEventListener l = wikiEventListenerWeakReference.get();
@@ -396,6 +426,8 @@ public Set< WikiEventListener > getWikiEventListeners() {
}
return Collections.unmodifiableSet( set );
+ } finally {
+ lock.unlock();
}
}
@@ -406,13 +438,16 @@ public Set< WikiEventListener > getWikiEventListeners() {
* @return true if the listener was added (i.e., it was not already in the list and was added)
*/
public boolean addWikiEventListener( final WikiEventListener listener ) {
- synchronized( m_listenerList ) {
+ lock.lock();
+ try {
final boolean listenerAlreadyContained = m_listenerList.stream()
.map( WeakReference::get )
.anyMatch( ref -> ref == listener );
if( !listenerAlreadyContained ) {
return m_listenerList.add( new WeakReference<>( listener ) );
}
+ } finally {
+ lock.unlock();
}
return false;
}
@@ -424,7 +459,8 @@ public boolean addWikiEventListener( final WikiEventListener listener ) {
* @return true if the listener was removed (i.e., it was actually in the list and was removed)
*/
public boolean removeWikiEventListener( final WikiEventListener listener ) {
- synchronized( m_listenerList ) {
+ lock.lock();
+ try {
for( final Iterator< WeakReference< WikiEventListener > > i = m_listenerList.iterator(); i.hasNext(); ) {
final WikiEventListener l = i.next().get();
if( l == listener ) {
@@ -432,6 +468,8 @@ public boolean removeWikiEventListener( final WikiEventListener listener ) {
return true;
}
}
+ } finally {
+ lock.unlock();
}
return false;
@@ -441,8 +479,11 @@ public boolean removeWikiEventListener( final WikiEventListener listener ) {
* Returns true if there are one or more listeners registered with this instance.
*/
public boolean isListening() {
- synchronized( m_listenerList ) {
+ lock.lock();
+ try {
return !m_listenerList.isEmpty();
+ } finally {
+ lock.unlock();
}
}
@@ -452,7 +493,8 @@ public boolean isListening() {
public void fireEvent( final WikiEvent event ) {
boolean needsCleanup = false;
try {
- synchronized( m_listenerList ) {
+ lock.lock();
+ try {
for( final WeakReference< WikiEventListener > wikiEventListenerWeakReference : m_listenerList ) {
final WikiEventListener listener = wikiEventListenerWeakReference.get();
if( listener != null ) {
@@ -472,6 +514,8 @@ public void fireEvent( final WikiEvent event ) {
}
}
+ } finally {
+ lock.unlock();
}
} catch( final ConcurrentModificationException e ) {
// We don't die, we just don't do notifications in that case.
diff --git a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java
index 9563e59f2f..8bc11fe50b 100644
--- a/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java
+++ b/jspwiki-kendra-searchprovider/src/main/java/org/apache/wiki/search/kendra/KendraSearchProvider.java
@@ -55,6 +55,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.*;
+import java.util.concurrent.locks.ReentrantLock;
import static java.lang.String.format;
@@ -87,7 +88,18 @@ public class KendraSearchProvider implements SearchProvider {
private static final String PROP_KENDRA_INDEXDELAY = "jspwiki.kendra.indexdelay";
private static final String PROP_KENDRA_INITIALDELAY = "jspwiki.kendra.initialdelay";
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
public KendraSearchProvider() {
+ lock = new ReentrantLock();
}
/**
@@ -339,7 +351,8 @@ private void doPartialReindex() {
}
LOG.debug( "Indexing updated pages. Please wait ..." );
final String executionId = startExecution();
- synchronized ( updates ) {
+ lock.lock();
+ try {
try {
while (!updates.isEmpty()) {
indexOnePage( updates.remove( 0 ), executionId );
@@ -347,6 +360,8 @@ private void doPartialReindex() {
} finally {
stopExecution();
}
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java
index 8b83a13228..820ece30a4 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/WatchDog.java
@@ -28,6 +28,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Set;
import java.util.Stack;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -57,6 +58,16 @@ public final class WatchDog {
private static final Map< Integer, WeakReference< WatchDog > > c_kennel = new ConcurrentHashMap<>();
private static WikiBackgroundThread c_watcherThread;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
/**
* Returns the current watchdog for the current thread. This is the preferred method of getting you a Watchdog, since it
* keeps an internal list of Watchdogs for you so that there won't be more than one watchdog per thread.
@@ -92,11 +103,16 @@ public WatchDog( final Engine engine, final Watchable watch ) {
m_engine = engine;
m_watchable = watch;
- synchronized( WatchDog.class ) {
+ lock = new ReentrantLock();
+
+ lock.lock();
+ try {
if( c_watcherThread == null ) {
c_watcherThread = new WatchDogThread( engine );
c_watcherThread.start();
}
+ } finally {
+ lock.unlock();
}
}
@@ -136,12 +152,15 @@ private static void scrub() {
* Can be used to enable the WatchDog. Will cause a new Thread to be created, if none was existing previously.
*/
public void enable() {
- synchronized( WatchDog.class ) {
+ lock.lock();
+ try {
if( !m_enabled ) {
m_enabled = true;
c_watcherThread = new WatchDogThread( m_engine );
c_watcherThread.start();
}
+ } finally {
+ lock.unlock();
}
}
@@ -149,12 +168,15 @@ public void enable() {
* Is used to disable a WatchDog. The watchdog thread is shut down and resources released.
*/
public void disable() {
- synchronized( WatchDog.class ) {
+ lock.lock();
+ try {
if( m_enabled ) {
m_enabled = false;
c_watcherThread.shutdown();
c_watcherThread = null;
}
+ } finally {
+ lock.unlock();
}
}
@@ -186,9 +208,12 @@ public void enterState( final String state ) {
*/
public void enterState( final String state, final int expectedCompletionTime ) {
LOG.debug( "{}: Entering state {}, expected completion in {} s", m_watchable.getName(), state, expectedCompletionTime );
- synchronized( m_stateStack ) {
+ lock.lock();
+ try {
final State st = new State( state, expectedCompletionTime );
m_stateStack.push( st );
+ } finally {
+ lock.unlock();
}
}
@@ -208,7 +233,8 @@ public void exitState() {
*/
public void exitState( final String state ) {
if( !m_stateStack.empty() ) {
- synchronized( m_stateStack ) {
+ lock.lock();
+ try {
final State st = m_stateStack.peek();
if( state == null || st.getState().equals( state ) ) {
m_stateStack.pop();
@@ -218,6 +244,8 @@ public void exitState( final String state ) {
// FIXME: should actually go and fix things for that
LOG.error( "exitState() called before enterState()" );
}
+ } finally {
+ lock.unlock();
}
} else {
LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" );
@@ -244,8 +272,8 @@ public boolean isWatchableAlive() {
private void check() {
LOG.debug( "Checking watchdog '{}'", m_watchable.getName() );
-
- synchronized( m_stateStack ) {
+ lock.lock();
+ try {
if( !m_stateStack.empty() ) {
final State st = m_stateStack.peek();
final long now = System.currentTimeMillis();
@@ -261,6 +289,8 @@ private void check() {
} else {
LOG.warn( "Stack for " + m_watchable.getName() + " is empty!" );
}
+ } finally {
+ lock.unlock();
}
}
@@ -302,7 +332,8 @@ private void dumpStackTraceForWatchable() {
*/
@Override
public String toString() {
- synchronized( m_stateStack ) {
+ lock.lock();
+ try {
String state = "Idle";
if( !m_stateStack.empty() ) {
@@ -310,6 +341,8 @@ public String toString() {
state = st.getState();
}
return "WatchDog state=" + state;
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java
index 4ec79d82e0..9895796b16 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/WikiEngine.java
@@ -81,6 +81,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
@@ -138,6 +139,16 @@ public class WikiEngine implements Engine {
/** Stores WikiEngine's associated managers. */
protected final Map< Class< ? >, Object > managers = new ConcurrentHashMap<>();
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private static final ReentrantLock lock = new ReentrantLock();
+
/**
* Gets a WikiEngine related to this servlet. Since this method is only called from JSP pages (and JspInit()) to be specific,
* we throw a RuntimeException if things don't work.
@@ -146,8 +157,13 @@ public class WikiEngine implements Engine {
* @return A WikiEngine instance.
* @throws InternalWikiException in case something fails. This is a RuntimeException, so be prepared for it.
*/
- public static synchronized WikiEngine getInstance( final ServletConfig config ) throws InternalWikiException {
- return getInstance( config.getServletContext(), null );
+ public static WikiEngine getInstance( final ServletConfig config ) throws InternalWikiException {
+ lock.lock();
+ try {
+ return getInstance( config.getServletContext(), null );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -160,8 +176,13 @@ public static synchronized WikiEngine getInstance( final ServletConfig config )
*
* @return One well-behaving WikiEngine instance.
*/
- public static synchronized WikiEngine getInstance( final ServletConfig config, final Properties props ) {
- return getInstance( config.getServletContext(), props );
+ public static WikiEngine getInstance( final ServletConfig config, final Properties props ) {
+ lock.lock();
+ try {
+ return getInstance( config.getServletContext(), props );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -172,33 +193,39 @@ public static synchronized WikiEngine getInstance( final ServletConfig config, f
* @return One fully functional, properly behaving WikiEngine.
* @throws InternalWikiException If the WikiEngine instantiation fails.
*/
- public static synchronized WikiEngine getInstance( final ServletContext context, Properties props ) throws InternalWikiException {
- WikiEngine engine = ( WikiEngine )context.getAttribute( ATTR_WIKIENGINE );
- if( engine == null ) {
- final String appid = Integer.toString( context.hashCode() );
- context.log( " Assigning new engine to " + appid );
- try {
- if( props == null ) {
- props = PropertyReader.loadWebAppProps( context );
- }
-
- engine = new WikiEngine( context, appid );
+ public static WikiEngine getInstance(final ServletContext context, Properties props) throws InternalWikiException {
+ lock.lock();
+ WikiEngine engine;
+ try {
+ engine = (WikiEngine) context.getAttribute(ATTR_WIKIENGINE);
+ if (engine == null) {
+ final String appid = Integer.toString(context.hashCode());
+ context.log(" Assigning new engine to " + appid);
try {
- // Note: May be null, if JSPWiki has been deployed in a WAR file.
- engine.start( props );
- LOG.info( "Root path for this Wiki is: '{}'", engine.getRootPath() );
- } catch( final Exception e ) {
- final String msg = Release.APPNAME + ": Unable to load and setup properties from jspwiki.properties. " + e.getMessage();
- context.log( msg );
- LOG.error( msg, e );
- throw new WikiException( msg, e );
+ if (props == null) {
+ props = PropertyReader.loadWebAppProps(context);
+ }
+
+ engine = new WikiEngine(context, appid);
+ try {
+ // Note: May be null, if JSPWiki has been deployed in a WAR file.
+ engine.start(props);
+ LOG.info("Root path for this Wiki is: '{}'", engine.getRootPath());
+ } catch (final Exception e) {
+ final String msg = Release.APPNAME + ": Unable to load and setup properties from jspwiki.properties. " + e.getMessage();
+ context.log(msg);
+ LOG.error(msg, e);
+ throw new WikiException(msg, e);
+ }
+ context.setAttribute(ATTR_WIKIENGINE, engine);
+ } catch (final Exception e) {
+ context.log("ERROR: Failed to create a Wiki engine: " + e.getMessage());
+ LOG.error("ERROR: Failed to create a Wiki engine, stacktrace follows ", e);
+ throw new InternalWikiException("No wiki engine, check logs.", e);
}
- context.setAttribute( ATTR_WIKIENGINE, engine );
- } catch( final Exception e ) {
- context.log( "ERROR: Failed to create a Wiki engine: " + e.getMessage() );
- LOG.error( "ERROR: Failed to create a Wiki engine, stacktrace follows ", e );
- throw new InternalWikiException( "No wiki engine, check logs.", e );
}
+ } finally {
+ lock.unlock();
}
return engine;
}
@@ -943,14 +970,26 @@ public InternationalizationManager getInternationalizationManager() {
/** {@inheritDoc} */
@Override
- public final synchronized void addWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.addWikiEventListener( this, listener );
+ public final void addWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.addWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
- /** {@inheritDoc} */
+ /**
+ * {@inheritDoc}
+ */
@Override
- public final synchronized void removeWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.removeWikiEventListener( this, listener );
+ public final void removeWikiEventListener(final WikiEventListener listener) {
+ lock.lock();
+ try {
+ WikiEventManager.removeWikiEventListener(this, listener);
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java
index 6f86584c14..79d5682644 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthenticationManager.java
@@ -53,6 +53,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Map;
import java.util.Properties;
import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -103,6 +104,21 @@ public class DefaultAuthenticationManager implements AuthenticationManager {
/** Keeps a list of the usernames who have attempted a login recently. */
private final TimedCounterList< String > m_lastLoginAttempts = new TimedCounterList<>();
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public DefaultAuthenticationManager () {
+ lock = new ReentrantLock();
+ }
+
+
/**
* {@inheritDoc}
*/
@@ -352,16 +368,26 @@ public Set< Principal > doJAASLogin( final Class< ? extends LoginModule > clazz,
* {@inheritDoc}
*/
@Override
- public synchronized void addWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.addWikiEventListener( this, listener );
+ public void addWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.addWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/**
* {@inheritDoc}
*/
@Override
- public synchronized void removeWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.removeWikiEventListener( this, listener );
+ public void removeWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.removeWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java
index f39f65cdcd..f6747cbb18 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultAuthorizationManager.java
@@ -64,6 +64,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.WeakHashMap;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -88,10 +89,21 @@ public class DefaultAuthorizationManager implements AuthorizationManager {
private LocalPolicy m_localPolicy;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private ReentrantLock lock;
+
/**
* Constructs a new DefaultAuthorizationManager instance.
*/
public DefaultAuthorizationManager() {
+ lock = new ReentrantLock();
}
/** {@inheritDoc} */
@@ -373,14 +385,24 @@ public Principal resolvePrincipal( final String name ) {
/** {@inheritDoc} */
@Override
- public synchronized void addWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.addWikiEventListener( this, listener );
+ public void addWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.addWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/** {@inheritDoc} */
@Override
- public synchronized void removeWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.removeWikiEventListener( this, listener );
+ public void removeWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.removeWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java
index 048fc76315..32dde21a35 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/DefaultUserManager.java
@@ -69,6 +69,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.WeakHashMap;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -96,6 +97,21 @@ public class DefaultUserManager implements UserManager {
/** The user database loads, manages and persists user identities */
private UserDatabase m_database;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public DefaultUserManager() {
+ lock = new ReentrantLock();
+ }
+
+
/** {@inheritDoc} */
@Override
public void initialize( final Engine engine, final Properties props ) {
@@ -406,8 +422,13 @@ public Principal[] listWikiNames() throws WikiSecurityException {
* This is a convenience method.
* @param listener the event listener
*/
- @Override public synchronized void addWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.addWikiEventListener( this, listener );
+ @Override public void addWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.addWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -415,8 +436,13 @@ public Principal[] listWikiNames() throws WikiSecurityException {
* This is a convenience method.
* @param listener the event listener
*/
- @Override public synchronized void removeWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.removeWikiEventListener( this, listener );
+ @Override public void removeWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.removeWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java
index dc7e082e1c..638e246ae1 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/SessionMonitor.java
@@ -22,6 +22,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import org.apache.logging.log4j.Logger;
import org.apache.wiki.api.core.Engine;
import org.apache.wiki.api.core.Session;
+import org.apache.wiki.api.spi.SessionSPI;
import org.apache.wiki.api.spi.Wiki;
import org.apache.wiki.event.WikiEventListener;
import org.apache.wiki.event.WikiEventManager;
@@ -32,12 +33,14 @@ Licensed to the Apache Software Foundation (ASF) under one
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
+import java.lang.ref.WeakReference;
import java.security.Principal;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
/**
@@ -59,6 +62,16 @@ public class SessionMonitor implements HttpSessionListener {
private final PrincipalComparator m_comparator = new PrincipalComparator();
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see ReentrantLock
+ */
+ private final ReentrantLock lock;
+
/**
* Returns the instance of the SessionMonitor for this wiki. Only one SessionMonitor exists per Engine.
*
@@ -80,10 +93,13 @@ public static SessionMonitor getInstance( final Engine engine ) {
/** Construct the SessionListener */
public SessionMonitor() {
+ lock = new ReentrantLock();
}
private SessionMonitor( final Engine engine ) {
+ this();
m_engine = engine;
+
}
/**
@@ -123,10 +139,10 @@ private Session findSession( final String sessionId ) {
/**
*
Looks up the wiki session associated with a user's Http session and adds it to the session cache. This method will return the
- * "guest session" as constructed by {@link org.apache.wiki.api.spi.SessionSPI#guest(Engine)} if the HttpSession is not currently
+ * "guest session" as constructed by {@link SessionSPI#guest(Engine)} if the HttpSession is not currently
* associated with a WikiSession. This method is guaranteed to return a non-null WikiSession.
*
Internally, the session is stored in a HashMap; keys are the HttpSession objects, while the values are
- * {@link java.lang.ref.WeakReference}-wrapped WikiSessions.
+ * {@link WeakReference}-wrapped WikiSessions.
*
* @param session the HTTP session
* @return the wiki session
@@ -143,10 +159,10 @@ public final Session find( final HttpSession session ) {
/**
*
Looks up the wiki session associated with a user's Http session and adds it to the session cache. This method will return the
- * "guest session" as constructed by {@link org.apache.wiki.api.spi.SessionSPI#guest(Engine)} if the HttpSession is not currently
+ * "guest session" as constructed by {@link SessionSPI#guest(Engine)} if the HttpSession is not currently
* associated with a WikiSession. This method is guaranteed to return a non-null WikiSession.
*
Internally, the session is stored in a HashMap; keys are the HttpSession objects, while the values are
- * {@link java.lang.ref.WeakReference}-wrapped WikiSessions.
+ * {@link WeakReference}-wrapped WikiSessions.
*
* @param sessionId the HTTP session
* @return the wiki session
@@ -169,8 +185,11 @@ public final Session find( final String sessionId ) {
private Session createGuestSessionFor( final String sessionId ) {
LOG.debug( "Session for session ID={}... not found. Creating guestSession()", sessionId );
final Session wikiSession = Wiki.session().guest( m_engine );
- synchronized( m_sessions ) {
+ lock.lock();
+ try {
m_sessions.put( sessionId, wikiSession );
+ } finally {
+ lock.unlock();
}
return wikiSession;
}
@@ -196,8 +215,11 @@ public final void remove( final HttpSession session ) {
if( session == null ) {
throw new IllegalArgumentException( "Session cannot be null." );
}
- synchronized( m_sessions ) {
+ lock.lock();
+ try {
m_sessions.remove( session.getId() );
+ } finally {
+ lock.unlock();
}
}
@@ -214,15 +236,18 @@ public final int sessions()
*
Returns the current wiki users as a sorted array of Principal objects. The principals are those returned by
* each WikiSession's {@link Session#getUserPrincipal()}'s method.
*
To obtain the list of current WikiSessions, we iterate through our session Map and obtain the list of values,
- * which are WikiSessions wrapped in {@link java.lang.ref.WeakReference} objects. Those WeakReferences
+ * which are WikiSessions wrapped in {@link WeakReference} objects. Those WeakReferences
* whose get() method returns non-null values are valid sessions.
*
* @return the array of user principals
*/
public final Principal[] userPrincipals() {
final Collection principals;
- synchronized ( m_sessions ) {
+ lock.lock();
+ try {
principals = m_sessions.values().stream().map(Session::getUserPrincipal).collect(Collectors.toList());
+ } finally {
+ lock.unlock();
}
final Principal[] p = principals.toArray( new Principal[0] );
Arrays.sort( p, m_comparator );
@@ -235,8 +260,13 @@ public final Principal[] userPrincipals() {
* @param listener the event listener
* @since 2.4.75
*/
- public final synchronized void addWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.addWikiEventListener( this, listener );
+ public final void addWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.addWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -245,8 +275,13 @@ public final synchronized void addWikiEventListener( final WikiEventListener lis
* @param listener the event listener
* @since 2.4.75
*/
- public final synchronized void removeWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.removeWikiEventListener( this, listener );
+ public final void removeWikiEventListener(final WikiEventListener listener) {
+ lock.lock();
+ try {
+ WikiEventManager.removeWikiEventListener(this, listener);
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -265,7 +300,7 @@ protected final void fireEvent( final int type, final Principal principal, final
/**
* Fires when the web container creates a new HTTP session.
- *
+ *
* @param se the HTTP session event
*/
@Override
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java
index cc7464a45e..befac2c77e 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclEntryImpl.java
@@ -19,12 +19,14 @@ Licensed to the Apache Software Foundation (ASF) under one
package org.apache.wiki.auth.acl;
import org.apache.wiki.auth.permissions.PagePermission;
+import org.apache.wiki.event.WikiEventManager;
import java.io.Serializable;
import java.security.Permission;
import java.security.Principal;
import java.util.Enumeration;
import java.util.Vector;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
@@ -39,10 +41,21 @@ public class AclEntryImpl implements AclEntry, Serializable {
private final Vector< Permission > m_permissions = new Vector<>();
private Principal m_principal;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
/**
* Constructs a new AclEntryImpl instance.
*/
public AclEntryImpl() {
+ lock = new ReentrantLock();
}
/**
@@ -54,13 +67,18 @@ public AclEntryImpl() {
* already part of this entry's permission set, and false if the permission is not of type PagePermission
*/
@Override
- public synchronized boolean addPermission(final Permission permission ) {
- if( permission instanceof PagePermission && findPermission( permission ) == null ) {
- m_permissions.add( permission );
- return true;
- }
+ public boolean addPermission(final Permission permission) {
+ lock.lock();
+ try {
+ if (permission instanceof PagePermission && findPermission(permission) == null) {
+ m_permissions.add(permission);
+ return true;
+ }
- return false;
+ return false;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -81,8 +99,13 @@ public boolean checkPermission(final Permission permission ) {
* @return the principal associated with this entry.
*/
@Override
- public synchronized Principal getPrincipal() {
- return m_principal;
+ public Principal getPrincipal() {
+ lock.lock();
+ try {
+ return m_principal;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -102,14 +125,19 @@ public Enumeration< Permission > permissions() {
* @return true if the permission is removed, false if the permission was not part of this entry's permission set.
*/
@Override
- public synchronized boolean removePermission(final Permission permission ) {
- final Permission p = findPermission( permission );
- if( p != null ) {
- m_permissions.remove( p );
- return true;
- }
+ public boolean removePermission(final Permission permission ) {
+ lock.lock();
+ try {
+ final Permission p = findPermission(permission);
+ if (p != null) {
+ m_permissions.remove(p);
+ return true;
+ }
- return false;
+ return false;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -121,12 +149,17 @@ public synchronized boolean removePermission(final Permission permission ) {
* principal set for this entry
*/
@Override
- public synchronized boolean setPrincipal(final Principal user ) {
- if( m_principal != null || user == null ) {
- return false;
+ public boolean setPrincipal(final Principal user) {
+ lock.lock();
+ try {
+ if (m_principal != null || user == null) {
+ return false;
+ }
+ m_principal = user;
+ return true;
+ } finally {
+ lock.unlock();
}
- m_principal = user;
- return true;
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java
index 11af72c27f..3eb2ddf729 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/acl/AclImpl.java
@@ -27,6 +27,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Enumeration;
import java.util.List;
import java.util.Vector;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -36,6 +37,16 @@ Licensed to the Apache Software Foundation (ASF) under one
*/
public class AclImpl implements Acl, Serializable {
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
private static final long serialVersionUID = 1L;
private final Vector< AclEntry > m_entries = new Vector<>();
@@ -43,6 +54,7 @@ public class AclImpl implements Acl, Serializable {
* Constructs a new AclImpl instance.
*/
public AclImpl() {
+ lock = new ReentrantLock();
}
/** {@inheritDoc} */
@@ -86,24 +98,34 @@ private boolean hasEntry( final AclEntry entry ) {
/** {@inheritDoc} */
@Override
- public synchronized boolean addEntry( final AclEntry entry ) {
- if( entry.getPrincipal() == null ) {
- throw new IllegalArgumentException( "Entry principal cannot be null" );
- }
+ public boolean addEntry( final AclEntry entry ) {
+ lock.lock();
+ try {
+ if( entry.getPrincipal() == null ) {
+ throw new IllegalArgumentException( "Entry principal cannot be null" );
+ }
- if( hasEntry( entry ) ) {
- return false;
- }
+ if( hasEntry( entry ) ) {
+ return false;
+ }
- m_entries.add( entry );
+ m_entries.add( entry );
- return true;
+ return true;
+ } finally {
+ lock.unlock();
+ }
}
/** {@inheritDoc} */
@Override
- public synchronized boolean removeEntry( final AclEntry entry ) {
- return m_entries.remove( entry );
+ public boolean removeEntry( final AclEntry entry ) {
+ lock.lock();
+ try {
+ return m_entries.remove( entry );
+ } finally {
+ lock.unlock();
+ }
}
/** {@inheritDoc} */
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java
index fccf6c1cb0..eefc53bec0 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/DefaultGroupManager.java
@@ -49,6 +49,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -75,6 +76,20 @@ public class DefaultGroupManager implements GroupManager, Authorizer, WikiEventL
/** Map with GroupPrincipals as keys, and Groups as values */
private final Map< Principal, Group > m_groups = new HashMap<>();
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public DefaultGroupManager() {
+ lock = new ReentrantLock();
+ }
+
/** {@inheritDoc} */
@Override
public Principal findRole( final String name ) {
@@ -152,12 +167,15 @@ public void initialize( final Engine engine, final Properties props ) throws Wik
// Load all groups from the database into the cache
final Group[] groups = m_groupDatabase.groups();
- synchronized( m_groups ) {
+ lock.lock();
+ try {
for( final Group group : groups ) {
// Add new group to cache; fire GROUP_ADD event
m_groups.put( group.getPrincipal(), group );
fireEvent( WikiSecurityEvent.GROUP_ADD, group );
}
+ } finally {
+ lock.unlock();
}
// Make the GroupManager listen for WikiEvents (WikiSecurityEvents for changed user profiles)
@@ -253,8 +271,11 @@ public void removeGroup( final String index ) throws WikiSecurityException {
// Delete the group
// TODO: need rollback procedure
- synchronized( m_groups ) {
+ lock.lock();
+ try {
m_groups.remove( group.getPrincipal() );
+ } finally {
+ lock.unlock();
}
m_groupDatabase.delete( group );
fireEvent( WikiSecurityEvent.GROUP_REMOVE, group );
@@ -269,8 +290,11 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu
final Group oldGroup = m_groups.get( group.getPrincipal() );
if( oldGroup != null ) {
fireEvent( WikiSecurityEvent.GROUP_REMOVE, oldGroup );
- synchronized( m_groups ) {
+ lock.lock();
+ try {
m_groups.remove( oldGroup.getPrincipal() );
+ } finally {
+ lock.unlock();
}
}
@@ -283,8 +307,11 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu
}
// Add new group to cache; announce GROUP_ADD event
- synchronized( m_groups ) {
+ lock.lock();
+ try {
m_groups.put( group.getPrincipal(), group );
+ } finally {
+ lock.unlock();
}
fireEvent( WikiSecurityEvent.GROUP_ADD, group );
@@ -300,8 +327,11 @@ public void setGroup( final Session session, final Group group ) throws WikiSecu
// Restore previous version, re-throw...
fireEvent( WikiSecurityEvent.GROUP_REMOVE, group );
fireEvent( WikiSecurityEvent.GROUP_ADD, oldGroup );
- synchronized( m_groups ) {
+ lock.lock();
+ try {
m_groups.put( oldGroup.getPrincipal(), oldGroup );
+ } finally {
+ lock.unlock();
}
throw new WikiSecurityException( e.getMessage() + " (rolled back to previous version).", e );
}
@@ -367,14 +397,24 @@ protected String[] extractMembers( final String memberLine ) {
/** {@inheritDoc} */
@Override
- public synchronized void addWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.addWikiEventListener( this, listener );
+ public void addWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.addWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/** {@inheritDoc} */
@Override
- public synchronized void removeWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.removeWikiEventListener( this, listener );
+ public void removeWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.removeWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/** {@inheritDoc} */
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java
index fc0b733abb..31b67dbec1 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/authorize/Group.java
@@ -23,6 +23,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.security.Principal;
import java.util.Date;
import java.util.Vector;
+import java.util.concurrent.locks.ReentrantLock;
/**
*
@@ -77,6 +78,16 @@ public class Group {
private final String m_wiki;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
/**
* Protected constructor to prevent direct instantiation except by other
* package members. Callers should use
@@ -90,6 +101,7 @@ protected Group( final String name, final String wiki ) {
m_name = name;
m_wiki = wiki;
m_principal = new GroupPrincipal( name );
+ lock = new ReentrantLock();
}
/**
@@ -98,20 +110,30 @@ protected Group( final String name, final String wiki ) {
* @param user the principal to add
* @return true if the operation was successful
*/
- public synchronized boolean add( final Principal user ) {
- if( isMember( user ) ) {
- return false;
+ public boolean add( final Principal user ) {
+ lock.lock();
+ try {
+ if( isMember( user ) ) {
+ return false;
+ }
+
+ m_members.add( user );
+ return true;
+ } finally {
+ lock.unlock();
}
-
- m_members.add( user );
- return true;
}
/**
* Clears all Principals from the group list.
*/
- public synchronized void clear() {
- m_members.clear();
+ public void clear() {
+ lock.lock();
+ try {
+ m_members.clear();
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -156,8 +178,13 @@ public int hashCode() {
*
* @return the creation date
*/
- public synchronized Date getCreated() {
- return m_created;
+ public Date getCreated() {
+ lock.lock();
+ try {
+ return m_created;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -165,8 +192,13 @@ public synchronized Date getCreated() {
*
* @return the creator
*/
- public final synchronized String getCreator() {
- return m_creator;
+ public final String getCreator() {
+ lock.lock();
+ try {
+ return m_creator;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -174,8 +206,13 @@ public final synchronized String getCreator() {
*
* @return the date and time of last modification
*/
- public synchronized Date getLastModified() {
- return m_modified;
+ public Date getLastModified() {
+ lock.lock();
+ try {
+ return m_modified;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -183,8 +220,13 @@ public synchronized Date getLastModified() {
*
* @return the modifier
*/
- public final synchronized String getModifier() {
- return m_modifier;
+ public final String getModifier() {
+ lock.lock();
+ try {
+ return m_modifier;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -240,14 +282,20 @@ public Principal[] members() {
* @param user the principal to remove
* @return true if the operation was successful
*/
- public synchronized boolean remove( Principal user ) {
- user = findMember( user.getName() );
- if( user == null )
- return false;
-
- m_members.remove( user );
+ public boolean remove( Principal user ) {
+ lock.lock();
+ try {
+ user = findMember( user.getName() );
+ if( user == null )
+ return false;
+
+ m_members.remove( user );
+
+ return true;
+ } finally {
+ lock.unlock();
+ }
- return true;
}
/**
@@ -255,16 +303,26 @@ public synchronized boolean remove( Principal user ) {
*
* @param date the creation date
*/
- public synchronized void setCreated( final Date date ) {
- m_created = date;
+ public void setCreated( final Date date ) {
+ lock.lock();
+ try {
+ m_created = date;
+ } finally {
+ lock.unlock();
+ }
}
/**
* Sets the creator of this Group.
* @param creator the creator
*/
- public final synchronized void setCreator( final String creator ) {
- this.m_creator = creator;
+ public final void setCreator( final String creator ) {
+ lock.lock();
+ try {
+ this.m_creator = creator;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -272,8 +330,13 @@ public final synchronized void setCreator( final String creator ) {
*
* @param date the last-modified date
*/
- public synchronized void setLastModified( final Date date ) {
- m_modified = date;
+ public void setLastModified( final Date date ) {
+ lock.lock();
+ try {
+ m_modified = date;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -281,8 +344,13 @@ public synchronized void setLastModified( final Date date ) {
*
* @param modifier the modifier
*/
- public final synchronized void setModifier( final String modifier ) {
- this.m_modifier = modifier;
+ public final void setModifier( final String modifier ) {
+ lock.lock();
+ try {
+ this.m_modifier = modifier;
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java
index 1d98e66cbd..28299c2a1a 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/login/CookieAuthenticationLoginModule.java
@@ -44,6 +44,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.UUID;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -99,6 +100,16 @@ public class CookieAuthenticationLoginModule extends AbstractLoginModule {
*/
private static final long SCRUB_PERIOD = 60 * 60 * 1_000L; // In milliseconds
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private static final ReentrantLock lock = new ReentrantLock();
+
/**
* {@inheritDoc}
*
@@ -268,25 +279,31 @@ private static Cookie getLoginCookie( final String value ) {
* @param days number of days that the cookie will survive
* @param cookieDir cookie directory
*/
- private static synchronized void scrub( final int days, final File cookieDir ) {
- LOG.debug( "Scrubbing cookieDir..." );
- final File[] files = cookieDir.listFiles();
- final long obsoleteDateLimit = System.currentTimeMillis() - ( ( long )days + 1 ) * 24 * 60 * 60 * 1000L;
- int deleteCount = 0;
-
- for( int i = 0; i < files.length; i++ ) {
- final File f = files[ i ];
- final long lastModified = f.lastModified();
- if( lastModified < obsoleteDateLimit ) {
- if( f.delete() ) {
- deleteCount++;
- } else {
- LOG.debug( "Error deleting cookie login with index {}", i );
+ private static void scrub( final int days, final File cookieDir ) {
+ lock.lock();
+ try {
+ LOG.debug( "Scrubbing cookieDir..." );
+ final File[] files = cookieDir.listFiles();
+ final long obsoleteDateLimit = System.currentTimeMillis() - ( ( long )days + 1 ) * 24 * 60 * 60 * 1000L;
+ int deleteCount = 0;
+
+ for( int i = 0; i < files.length; i++ ) {
+ final File f = files[ i ];
+ final long lastModified = f.lastModified();
+ if( lastModified < obsoleteDateLimit ) {
+ if( f.delete() ) {
+ deleteCount++;
+ } else {
+ LOG.debug( "Error deleting cookie login with index {}", i );
+ }
}
}
+
+ LOG.debug( "Removed {} obsolete cookie logins", deleteCount );
+ } finally {
+ lock.unlock();
}
- LOG.debug( "Removed {} obsolete cookie logins", deleteCount );
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java
index 760c879f4b..22d13be93f 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/permissions/PermissionFactory.java
@@ -21,6 +21,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import org.apache.wiki.api.core.Page;
import java.util.WeakHashMap;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -42,6 +43,16 @@ private PermissionFactory() {}
* cached page permissions.
*/
private static final WeakHashMap c_cache = new WeakHashMap<>();
+
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private static final ReentrantLock lock = new ReentrantLock();
/**
* Get a permission object for a WikiPage and a set of actions.
@@ -93,19 +104,23 @@ private static PagePermission getPagePermission( final String wiki, String page,
// It's fine if two threads update the cache, since the objects mean the same
// thing anyway. And this avoids nasty blocking effects.
//
- synchronized( c_cache )
- {
+ lock.lock();
+ try {
perm = c_cache.get( key );
+ } finally {
+ lock.unlock();
}
if( perm == null )
{
if( !wiki.isEmpty() ) page = wiki+":"+page;
perm = new PagePermission( page, actions );
-
- synchronized( c_cache )
- {
+
+ lock.lock();
+ try {
c_cache.put( key, perm );
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java
index dd7c6ee582..c7251d6594 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/auth/user/XMLUserDatabase.java
@@ -52,6 +52,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.SortedSet;
import java.util.TreeSet;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@@ -91,25 +92,44 @@ public class XMLUserDatabase extends AbstractUserDatabase {
private Document c_dom;
private File c_file;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public XMLUserDatabase() {
+ lock = new ReentrantLock();
+ }
+
/** {@inheritDoc} */
@Override
- public synchronized void deleteByLoginName( final String loginName ) throws WikiSecurityException {
- if( c_dom == null ) {
- throw new WikiSecurityException( "FATAL: database does not exist" );
- }
+ public void deleteByLoginName( final String loginName ) throws WikiSecurityException {
+ lock.lock();
+ try {
+ if( c_dom == null ) {
+ throw new WikiSecurityException( "FATAL: database does not exist" );
+ }
- final NodeList users = c_dom.getDocumentElement().getElementsByTagName( USER_TAG );
- for( int i = 0; i < users.getLength(); i++ ) {
- final Element user = ( Element )users.item( i );
- if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) {
- c_dom.getDocumentElement().removeChild( user );
+ final NodeList users = c_dom.getDocumentElement().getElementsByTagName( USER_TAG );
+ for( int i = 0; i < users.getLength(); i++ ) {
+ final Element user = ( Element )users.item( i );
+ if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) {
+ c_dom.getDocumentElement().removeChild( user );
- // Commit to disk
- saveDOM();
- return;
+ // Commit to disk
+ saveDOM();
+ return;
+ }
}
+ throw new NoSuchPrincipalException( "Not in database: " + loginName );
+ } finally {
+ lock.unlock();
}
- throw new NoSuchPrincipalException( "Not in database: " + loginName );
}
/** {@inheritDoc} */
@@ -322,118 +342,129 @@ private void checkForRefresh() {
* @see org.apache.wiki.auth.user.UserDatabase#rename(String, String)
*/
@Override
- public synchronized void rename( final String loginName, final String newName) throws DuplicateUserException, WikiSecurityException {
- if( c_dom == null ) {
- LOG.fatal( "Could not rename profile '" + loginName + "'; database does not exist" );
- throw new IllegalStateException( "FATAL: database does not exist" );
- }
- checkForRefresh();
+ public void rename( final String loginName, final String newName) throws DuplicateUserException, WikiSecurityException {
+ lock.lock();
+ try {
+ if( c_dom == null ) {
+ LOG.fatal( "Could not rename profile '" + loginName + "'; database does not exist" );
+ throw new IllegalStateException( "FATAL: database does not exist" );
+ }
+ checkForRefresh();
- // Get the existing user; if not found, throws NoSuchPrincipalException
- final UserProfile profile = findByLoginName( loginName );
+ // Get the existing user; if not found, throws NoSuchPrincipalException
+ final UserProfile profile = findByLoginName( loginName );
- // Get user with the proposed name; if found, it's a collision
- try {
- final UserProfile otherProfile = findByLoginName( newName );
- if( otherProfile != null ) {
- throw new DuplicateUserException( "security.error.cannot.rename", newName );
+ // Get user with the proposed name; if found, it's a collision
+ try {
+ final UserProfile otherProfile = findByLoginName( newName );
+ if( otherProfile != null ) {
+ throw new DuplicateUserException( "security.error.cannot.rename", newName );
+ }
+ } catch( final NoSuchPrincipalException e ) {
+ // Good! That means it's safe to save using the new name
}
- } catch( final NoSuchPrincipalException e ) {
- // Good! That means it's safe to save using the new name
- }
- // Find the user with the old login id attribute, and change it
- final NodeList users = c_dom.getElementsByTagName( USER_TAG );
- for( int i = 0; i < users.getLength(); i++ ) {
- final Element user = ( Element )users.item( i );
- if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) {
- final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT );
- final Date modDate = new Date( System.currentTimeMillis() );
- setAttribute( user, LOGIN_NAME, newName );
- setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) );
- profile.setLoginName( newName );
- profile.setLastModified( modDate );
- break;
+ // Find the user with the old login id attribute, and change it
+ final NodeList users = c_dom.getElementsByTagName( USER_TAG );
+ for( int i = 0; i < users.getLength(); i++ ) {
+ final Element user = ( Element )users.item( i );
+ if( user.getAttribute( LOGIN_NAME ).equals( loginName ) ) {
+ final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT );
+ final Date modDate = new Date( System.currentTimeMillis() );
+ setAttribute( user, LOGIN_NAME, newName );
+ setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) );
+ profile.setLoginName( newName );
+ profile.setLastModified( modDate );
+ break;
+ }
}
+
+ // Commit to disk
+ saveDOM();
+ } finally {
+ lock.unlock();
}
- // Commit to disk
- saveDOM();
}
/** {@inheritDoc} */
@Override
- public synchronized void save( final UserProfile profile ) throws WikiSecurityException {
- if ( c_dom == null ) {
- LOG.fatal( "Could not save profile " + profile + " database does not exist" );
- throw new IllegalStateException( "FATAL: database does not exist" );
- }
+ public void save( final UserProfile profile ) throws WikiSecurityException {
+ lock.lock();
+ try {
+ if ( c_dom == null ) {
+ LOG.fatal( "Could not save profile " + profile + " database does not exist" );
+ throw new IllegalStateException( "FATAL: database does not exist" );
+ }
- checkForRefresh();
+ checkForRefresh();
- final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT );
- final String index = profile.getLoginName();
- final NodeList users = c_dom.getElementsByTagName( USER_TAG );
- Element user = IntStream.range(0, users.getLength()).mapToObj(i -> (Element) users.item(i)).filter(currentUser -> currentUser.getAttribute(LOGIN_NAME).equals(index)).findFirst().orElse(null);
-
- boolean isNew = false;
-
- final Date modDate = new Date( System.currentTimeMillis() );
- if( user == null ) {
- // Create new user node
- profile.setCreated( modDate );
- LOG.info( "Creating new user " + index );
- user = c_dom.createElement( USER_TAG );
- c_dom.getDocumentElement().appendChild( user );
- setAttribute( user, CREATED, c_format.format( profile.getCreated() ) );
- isNew = true;
- } else {
- // To update existing user node, delete old attributes first...
- final NodeList attributes = user.getElementsByTagName( ATTRIBUTES_TAG );
- for( int i = 0; i < attributes.getLength(); i++ ) {
- user.removeChild( attributes.item( i ) );
+ final DateFormat c_format = new SimpleDateFormat( DATE_FORMAT );
+ final String index = profile.getLoginName();
+ final NodeList users = c_dom.getElementsByTagName( USER_TAG );
+ Element user = IntStream.range(0, users.getLength()).mapToObj(i -> (Element) users.item(i)).filter(currentUser -> currentUser.getAttribute(LOGIN_NAME).equals(index)).findFirst().orElse(null);
+
+ boolean isNew = false;
+
+ final Date modDate = new Date( System.currentTimeMillis() );
+ if( user == null ) {
+ // Create new user node
+ profile.setCreated( modDate );
+ LOG.info( "Creating new user " + index );
+ user = c_dom.createElement( USER_TAG );
+ c_dom.getDocumentElement().appendChild( user );
+ setAttribute( user, CREATED, c_format.format( profile.getCreated() ) );
+ isNew = true;
+ } else {
+ // To update existing user node, delete old attributes first...
+ final NodeList attributes = user.getElementsByTagName( ATTRIBUTES_TAG );
+ for( int i = 0; i < attributes.getLength(); i++ ) {
+ user.removeChild( attributes.item( i ) );
+ }
}
- }
- setAttribute( user, UID, profile.getUid() );
- setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) );
- setAttribute( user, LOGIN_NAME, profile.getLoginName() );
- setAttribute( user, FULL_NAME, profile.getFullname() );
- setAttribute( user, WIKI_NAME, profile.getWikiName() );
- setAttribute( user, EMAIL, profile.getEmail() );
- final Date lockExpiry = profile.getLockExpiry();
- setAttribute( user, LOCK_EXPIRY, lockExpiry == null ? "" : c_format.format( lockExpiry ) );
-
- // Hash and save the new password if it's different from old one
- final String newPassword = profile.getPassword();
- if( newPassword != null && !newPassword.equals( "" ) ) {
- final String oldPassword = user.getAttribute( PASSWORD );
- if( !oldPassword.equals( newPassword ) ) {
- setAttribute( user, PASSWORD, getHash( newPassword ) );
+ setAttribute( user, UID, profile.getUid() );
+ setAttribute( user, LAST_MODIFIED, c_format.format( modDate ) );
+ setAttribute( user, LOGIN_NAME, profile.getLoginName() );
+ setAttribute( user, FULL_NAME, profile.getFullname() );
+ setAttribute( user, WIKI_NAME, profile.getWikiName() );
+ setAttribute( user, EMAIL, profile.getEmail() );
+ final Date lockExpiry = profile.getLockExpiry();
+ setAttribute( user, LOCK_EXPIRY, lockExpiry == null ? "" : c_format.format( lockExpiry ) );
+
+ // Hash and save the new password if it's different from old one
+ final String newPassword = profile.getPassword();
+ if( newPassword != null && !newPassword.equals( "" ) ) {
+ final String oldPassword = user.getAttribute( PASSWORD );
+ if( !oldPassword.equals( newPassword ) ) {
+ setAttribute( user, PASSWORD, getHash( newPassword ) );
+ }
}
- }
- // Save the attributes as Base64 string
- if(!profile.getAttributes().isEmpty()) {
- try {
- final String encodedAttributes = Serializer.serializeToBase64( profile.getAttributes() );
- final Element attributes = c_dom.createElement( ATTRIBUTES_TAG );
- user.appendChild( attributes );
- final Text value = c_dom.createTextNode( encodedAttributes );
- attributes.appendChild( value );
- } catch( final IOException e ) {
- throw new WikiSecurityException( "Could not save user profile attribute. Reason: " + e.getMessage(), e );
+ // Save the attributes as Base64 string
+ if( profile.getAttributes().isEmpty()) {
+ try {
+ final String encodedAttributes = Serializer.serializeToBase64( profile.getAttributes() );
+ final Element attributes = c_dom.createElement( ATTRIBUTES_TAG );
+ user.appendChild( attributes );
+ final Text value = c_dom.createTextNode( encodedAttributes );
+ attributes.appendChild( value );
+ } catch( final IOException e ) {
+ throw new WikiSecurityException( "Could not save user profile attribute. Reason: " + e.getMessage(), e );
+ }
}
- }
- // Set the profile timestamps
- if( isNew ) {
- profile.setCreated( modDate );
- }
- profile.setLastModified( modDate );
+ // Set the profile timestamps
+ if( isNew ) {
+ profile.setCreated( modDate );
+ }
+ profile.setLastModified( modDate );
- // Commit to disk
- saveDOM();
+ // Commit to disk
+ saveDOM();
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java
index 32d96bce18..bf4c71aa5c 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/diff/ContextualDiffProvider.java
@@ -42,6 +42,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
@@ -92,12 +93,24 @@ public class ContextualDiffProvider implements DiffProvider {
private static final int LIMIT_MAX_VALUE = (Integer.MAX_VALUE /2) - 1;
private int m_unchangedContextLimit = LIMIT_MAX_VALUE;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
/**
* Constructs this provider.
*/
public ContextualDiffProvider()
- {}
+ {
+ lock = new ReentrantLock();
+ }
/**
* @see org.apache.wiki.api.providers.WikiProvider#getProviderInfo()
@@ -137,35 +150,40 @@ public void initialize( final Engine engine, final Properties properties) throws
* {@inheritDoc}
*/
@Override
- public synchronized String makeDiffHtml( final Context ctx, final String wikiOld, final String wikiNew ) {
- //
- // Sequencing handles lineterminator to and every-other consequtive space to a
- //
- final String[] alpha = sequence( TextUtil.replaceEntities( wikiOld ) );
- final String[] beta = sequence( TextUtil.replaceEntities( wikiNew ) );
-
- final Revision rev;
+ public String makeDiffHtml( final Context ctx, final String wikiOld, final String wikiNew ) {
+ lock.lock();
try {
- rev = Diff.diff( alpha, beta, new MyersDiff() );
- } catch( final DifferentiationFailedException dfe ) {
- LOG.error( "Diff generation failed", dfe );
- return "Error while creating version diff.";
- }
-
- final int revSize = rev.size();
- final StringBuffer sb = new StringBuffer();
-
- sb.append( DIFF_START );
+ //
+ // Sequencing handles lineterminator to and every-other consequtive space to a
+ //
+ final String[] alpha = sequence( TextUtil.replaceEntities( wikiOld ) );
+ final String[] beta = sequence( TextUtil.replaceEntities( wikiNew ) );
+
+ final Revision rev;
+ try {
+ rev = Diff.diff( alpha, beta, new MyersDiff() );
+ } catch( final DifferentiationFailedException dfe ) {
+ LOG.error( "Diff generation failed", dfe );
+ return "Error while creating version diff.";
+ }
- //
- // The MyersDiff is a bit dumb by converting a single line multi-word diff into a series
- // of Changes. The ChangeMerger pulls them together again...
- //
- final ChangeMerger cm = new ChangeMerger( sb, alpha, revSize );
- rev.accept( cm );
- cm.shutdown();
- sb.append( DIFF_END );
- return sb.toString();
+ final int revSize = rev.size();
+ final StringBuffer sb = new StringBuffer();
+
+ sb.append( DIFF_START );
+
+ //
+ // The MyersDiff is a bit dumb by converting a single line multi-word diff into a series
+ // of Changes. The ChangeMerger pulls them together again...
+ //
+ final ChangeMerger cm = new ChangeMerger( sb, alpha, revSize );
+ rev.accept( cm );
+ cm.shutdown();
+ sb.append( DIFF_END );
+ return sb.toString();
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java
index 299e25b62f..b58c3f249b 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/PageEventFilter.java
@@ -28,6 +28,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import org.apache.wiki.event.WikiPageEvent;
import java.util.Properties;
+import java.util.concurrent.locks.ReentrantLock;
/**
* Fires WikiPageEvents for page events.
@@ -50,6 +51,20 @@ Licensed to the Apache Software Foundation (ASF) under one
*/
public class PageEventFilter extends BasePageFilter {
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public PageEventFilter() {
+ lock = new ReentrantLock();
+ }
+
/**
* Called whenever a new PageFilter is instantiated and reset.
*/
@@ -112,8 +127,13 @@ public void postSave( final Context wikiContext, final String content ) {
*
* @param listener the event listener
*/
- public final synchronized void addWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.addWikiEventListener( this, listener );
+ public final void addWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.addWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -121,8 +141,13 @@ public final synchronized void addWikiEventListener( final WikiEventListener lis
*
* @param listener the event listener
*/
- public final synchronized void removeWikiEventListener( final WikiEventListener listener ) {
- WikiEventManager.removeWikiEventListener( this, listener );
+ public final void removeWikiEventListener( final WikiEventListener listener ) {
+ lock.lock();
+ try {
+ WikiEventManager.removeWikiEventListener( this, listener );
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java
index 1160d8b744..981a291b6a 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java
@@ -77,6 +77,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.StringTokenizer;
import java.util.Vector;
import java.util.concurrent.ThreadLocalRandom;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -242,6 +243,20 @@ public class SpamFilter extends BasePageFilter {
*/
private static final long HASH_DELAY = 24;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public SpamFilter() {
+ lock = new ReentrantLock();
+ }
+
/**
* {@inheritDoc}
@@ -435,87 +450,92 @@ private Collection< Pattern > parseBlacklist( final String list ) {
* @param change page change
* @throws RedirectException spam filter rejects the page change.
*/
- private synchronized void checkSinglePageChange(final Context context, final Change change )
+ private void checkSinglePageChange(final Context context, final Change change )
throws RedirectException {
- final HttpServletRequest req = context.getHttpRequest();
+ lock.lock();
+ try {
+ final HttpServletRequest req = context.getHttpRequest();
- if( req != null ) {
- final String addr = HttpUtil.getRemoteAddress( req );
- int hostCounter = 0;
- int changeCounter = 0;
+ if( req != null ) {
+ final String addr = HttpUtil.getRemoteAddress( req );
+ int hostCounter = 0;
+ int changeCounter = 0;
- LOG.debug( "Change is " + change.m_change );
+ LOG.debug( "Change is " + change.m_change );
- final long time = System.currentTimeMillis() - 60*1000L; // 1 minute
+ final long time = System.currentTimeMillis() - 60*1000L; // 1 minute
- for( final Iterator< Host > i = m_lastModifications.iterator(); i.hasNext(); ) {
- final Host host = i.next();
+ for( final Iterator< Host > i = m_lastModifications.iterator(); i.hasNext(); ) {
+ final Host host = i.next();
- // Check if this item is invalid
- if( host.getAddedTime() < time ) {
- LOG.debug( "Removed host " + host.getAddress() + " from modification queue (expired)" );
- i.remove();
- continue;
- }
+ // Check if this item is invalid
+ if( host.getAddedTime() < time ) {
+ LOG.debug( "Removed host " + host.getAddress() + " from modification queue (expired)" );
+ i.remove();
+ continue;
+ }
- // Check if this IP address has been seen before
- if( host.getAddress().equals( addr ) ) {
- hostCounter++;
- }
+ // Check if this IP address has been seen before
+ if( host.getAddress().equals( addr ) ) {
+ hostCounter++;
+ }
- // Check, if this change has been seen before
- if( host.getChange() != null && host.getChange().equals( change ) ) {
- changeCounter++;
+ // Check, if this change has been seen before
+ if( host.getChange() != null && host.getChange().equals( change ) ) {
+ changeCounter++;
+ }
}
- }
- // Now, let's check against the limits.
- if( hostCounter >= m_limitSinglePageChanges ) {
- final Host host = new Host( addr, null );
- m_temporaryBanList.add( host );
+ // Now, let's check against the limits.
+ if( hostCounter >= m_limitSinglePageChanges ) {
+ final Host host = new Host( addr, null );
+ m_temporaryBanList.add( host );
- final String uid = log( context, REJECT, REASON_TOO_MANY_MODIFICATIONS, change.m_change );
- LOG.info( "SPAM:TooManyModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many modifications/minute" );
- checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" );
- }
+ final String uid = log( context, REJECT, REASON_TOO_MANY_MODIFICATIONS, change.m_change );
+ LOG.info( "SPAM:TooManyModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many modifications/minute" );
+ checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" );
+ }
- if( changeCounter >= m_limitSimilarChanges ) {
- final Host host = new Host( addr, null );
- m_temporaryBanList.add( host );
+ if( changeCounter >= m_limitSimilarChanges ) {
+ final Host host = new Host( addr, null );
+ m_temporaryBanList.add( host );
- final String uid = log( context, REJECT, REASON_SIMILAR_MODIFICATIONS, change.m_change );
- LOG.info( "SPAM:SimilarModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many similar modifications" );
- checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code "+uid+")");
- }
+ final String uid = log( context, REJECT, REASON_SIMILAR_MODIFICATIONS, change.m_change );
+ LOG.info( "SPAM:SimilarModifications (" + uid + "). Added host " + addr + " to temporary ban list for doing too many similar modifications" );
+ checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code "+uid+")");
+ }
- // Calculate the number of links in the addition.
- String tstChange = change.toString();
- int urlCounter = 0;
- while( m_matcher.contains( tstChange,m_urlPattern ) ) {
- final MatchResult m = m_matcher.getMatch();
- tstChange = tstChange.substring( m.endOffset(0) );
- urlCounter++;
- }
+ // Calculate the number of links in the addition.
+ String tstChange = change.toString();
+ int urlCounter = 0;
+ while( m_matcher.contains( tstChange,m_urlPattern ) ) {
+ final MatchResult m = m_matcher.getMatch();
+ tstChange = tstChange.substring( m.endOffset(0) );
+ urlCounter++;
+ }
- if( urlCounter > m_maxUrls ) {
- final Host host = new Host( addr, null );
- m_temporaryBanList.add( host );
+ if( urlCounter > m_maxUrls ) {
+ final Host host = new Host( addr, null );
+ m_temporaryBanList.add( host );
- final String uid = log( context, REJECT, REASON_TOO_MANY_URLS, change.toString() );
- LOG.info( "SPAM:TooManyUrls (" + uid + "). Added host " + addr + " to temporary ban list for adding too many URLs" );
- checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" );
- }
+ final String uid = log( context, REJECT, REASON_TOO_MANY_URLS, change.toString() );
+ LOG.info( "SPAM:TooManyUrls (" + uid + "). Added host " + addr + " to temporary ban list for adding too many URLs" );
+ checkStrategy( context, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" );
+ }
- // Check bot trap
- checkBotTrap( context, change );
+ // Check bot trap
+ checkBotTrap( context, change );
- // Check UTF-8 mangling
- checkUTF8( context, change );
+ // Check UTF-8 mangling
+ checkUTF8( context, change );
- // Do Akismet check. This is good to be the last, because this is the most expensive operation.
- checkAkismet( context, change );
+ // Do Akismet check. This is good to be the last, because this is the most expensive operation.
+ checkAkismet( context, change );
- m_lastModifications.add( new Host( addr, change ) );
+ m_lastModifications.add( new Host( addr, change ) );
+ }
+ } finally {
+ lock.unlock();
}
}
@@ -630,15 +650,20 @@ private void checkUTF8( final Context context, final Change change ) throws Redi
}
/** Goes through the ban list and cleans away any host which has expired from it. */
- private synchronized void cleanBanList() {
- final long now = System.currentTimeMillis();
- for( final Iterator< Host > i = m_temporaryBanList.iterator(); i.hasNext(); ) {
- final Host host = i.next();
-
- if( host.getReleaseTime() < now ) {
- LOG.debug( "Removed host " + host.getAddress() + " from temporary ban list (expired)" );
- i.remove();
+ private void cleanBanList() {
+ lock.lock();
+ try {
+ final long now = System.currentTimeMillis();
+ for( final Iterator< Host > i = m_temporaryBanList.iterator(); i.hasNext(); ) {
+ final Host host = i.next();
+
+ if( host.getReleaseTime() < now ) {
+ LOG.debug( "Removed host " + host.getAddress() + " from temporary ban list (expired)" );
+ i.remove();
+ }
}
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java
index f3d7eff0a0..ad59bef784 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/BugReportHandler.java
@@ -43,6 +43,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.StringTokenizer;
+import java.util.concurrent.locks.ReentrantLock;
/**
* Provides a handler for bug reports. Still under construction.
@@ -73,6 +74,20 @@ public class BugReportHandler implements Plugin {
/** Parameter name for setting the page. Value is {@value}. */
public static final String PARAM_PAGE = "page";
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public BugReportHandler() {
+ lock = new ReentrantLock();
+ }
+
/**
* {@inheritDoc}
*/
@@ -166,17 +181,22 @@ public String execute( final Context context, final Map< String, String > params
* Finds a free page name for adding the bug report. Tries to construct a page, and if it's found, adds a number to it
* and tries again.
*/
- private synchronized String findNextPage( final Context context, final String title, final String baseName ) {
- final String basicPageName = ( ( baseName != null ) ? baseName : "Bug" ) + MarkupParser.cleanLink( title );
- final Engine engine = context.getEngine();
-
- String pageName = basicPageName;
- long lastbug = 2;
- while( engine.getManager( PageManager.class ).wikiPageExists( pageName ) ) {
- pageName = basicPageName + lastbug++;
- }
+ private String findNextPage( final Context context, final String title, final String baseName ) {
+ lock.lock();
+ try {
+ final String basicPageName = ( ( baseName != null ) ? baseName : "Bug" ) + MarkupParser.cleanLink( title );
+ final Engine engine = context.getEngine();
- return pageName;
+ String pageName = basicPageName;
+ long lastbug = 2;
+ while( engine.getManager( PageManager.class ).wikiPageExists( pageName ) ) {
+ pageName = basicPageName + lastbug++;
+ }
+
+ return pageName;
+ } finally {
+ lock.unlock();
+ }
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java
index 850d7c210a..6a2faed91d 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/plugin/PageViewPlugin.java
@@ -59,6 +59,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TreeMap;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -129,6 +130,20 @@ public class PageViewPlugin extends AbstractReferralPlugin implements Plugin, In
/** Constant for storage interval in seconds. */
private static final int STORAGE_INTERVAL = 60;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public PageViewPlugin() {
+ lock = new ReentrantLock();
+ }
+
/**
* Initialize the PageViewPlugin and its singleton.
*
@@ -137,11 +152,14 @@ public class PageViewPlugin extends AbstractReferralPlugin implements Plugin, In
@Override
public void initialize( final Engine engine ) {
LOG.info( "initializing PageViewPlugin" );
- synchronized( this ) {
+ lock.lock();
+ try {
if( c_singleton == null ) {
c_singleton = new PageViewManager();
}
c_singleton.initialize( engine );
+ } finally {
+ lock.unlock();
}
}
@@ -202,50 +220,60 @@ public final class PageViewManager implements WikiEventListener {
*
* @param engine The wiki engine.
*/
- public synchronized void initialize( final Engine engine ) {
- LOG.info( "initializing PageView Manager" );
- m_workDir = engine.getWorkDir();
- engine.addWikiEventListener( this );
- if( m_counters == null ) {
- // Load the counters into a collection
- m_storage = new Properties();
- m_counters = new TreeMap<>();
-
- loadCounters();
- }
+ public void initialize( final Engine engine ) {
+ lock.lock();
+ try {
+ LOG.info( "initializing PageView Manager" );
+ m_workDir = engine.getWorkDir();
+ engine.addWikiEventListener( this );
+ if( m_counters == null ) {
+ // Load the counters into a collection
+ m_storage = new Properties();
+ m_counters = new TreeMap<>();
+
+ loadCounters();
+ }
- // backup counters every 5 minutes
- if( m_pageCountSaveThread == null ) {
- m_pageCountSaveThread = new CounterSaveThread( engine, 5 * STORAGE_INTERVAL, this );
- m_pageCountSaveThread.start();
- }
+ // backup counters every 5 minutes
+ if( m_pageCountSaveThread == null ) {
+ m_pageCountSaveThread = new CounterSaveThread( engine, 5 * STORAGE_INTERVAL, this );
+ m_pageCountSaveThread.start();
+ }
- m_initialized = true;
+ m_initialized = true;
+ } finally {
+ lock.unlock();
+ }
}
/**
* Handle the shutdown event via the page counter thread.
*/
- private synchronized void handleShutdown() {
- LOG.info( "handleShutdown: The counter store thread was shut down." );
+ private void handleShutdown() {
+ lock.lock();
+ try {
+ LOG.info( "handleShutdown: The counter store thread was shut down." );
- cleanup();
+ cleanup();
- if( m_counters != null ) {
+ if( m_counters != null ) {
- m_dirty = true;
- storeCounters();
+ m_dirty = true;
+ storeCounters();
- m_counters.clear();
- m_counters = null;
+ m_counters.clear();
+ m_counters = null;
- m_storage.clear();
- m_storage = null;
- }
+ m_storage.clear();
+ m_storage = null;
+ }
- m_initialized = false;
+ m_initialized = false;
- m_pageCountSaveThread = null;
+ m_pageCountSaveThread = null;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -342,7 +370,9 @@ public String execute( final Context context, final Map< String, String > params
}
}
- synchronized( this ) {
+ lock.lock();
+ try {
+
Counter counter = m_counters.get( pagename );
// only count in view mode, keep storage values in sync
@@ -446,6 +476,8 @@ public String execute( final Context context, final Map< String, String > params
// let the engine render the list
result = engine.getManager( RenderingManager.class ).textToHTML( context, buf.toString() );
}
+ } finally {
+ lock.unlock();
}
}
return result;
@@ -509,7 +541,8 @@ int getCount( final Object key )
private void loadCounters() {
if( m_counters != null && m_storage != null ) {
LOG.info( "Loading counters." );
- synchronized( this ) {
+ lock.lock();
+ try {
try( final InputStream fis = Files.newInputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) {
m_storage.load( fis );
} catch( final IOException ioe ) {
@@ -520,8 +553,10 @@ private void loadCounters() {
for( final Entry< ?, ? > entry : m_storage.entrySet() ) {
m_counters.put( ( String )entry.getKey(), new Counter( ( String )entry.getValue() ) );
}
-
+
LOG.info( "Loaded " + m_counters.size() + " counter values." );
+ } finally {
+ lock.unlock();
}
}
}
@@ -532,7 +567,8 @@ private void loadCounters() {
void storeCounters() {
if( m_counters != null && m_storage != null && m_dirty ) {
LOG.info( "Storing " + m_counters.size() + " counter values." );
- synchronized( this ) {
+ lock.lock();
+ try {
// Write out the collection of counters
try( final OutputStream fos = Files.newOutputStream( new File( m_workDir, COUNTER_PAGE ).toPath() ) ) {
m_storage.store( fos, "\n# The number of times each page has been viewed.\n# Do not modify.\n" );
@@ -542,6 +578,8 @@ void storeCounters() {
} catch( final IOException ioe ) {
LOG.error( "Couldn't store counters values: " + ioe.getMessage() );
}
+ } finally {
+ lock.unlock();
}
}
}
@@ -552,9 +590,15 @@ void storeCounters() {
* @param thrd thread that can be the current background thread.
* @return boolean true if the thread is still the current background thread.
*/
- private synchronized boolean isRunning( final Thread thrd )
+ private boolean isRunning( final Thread thrd )
{
- return m_initialized && thrd == m_pageCountSaveThread;
+ lock.lock();
+ try {
+ return m_initialized && thrd == m_pageCountSaveThread;
+ } finally {
+ lock.unlock();
+ }
+
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java
index e8523d0717..8d5bcf415d 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingAttachmentProvider.java
@@ -45,6 +45,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.NoSuchElementException;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -62,6 +63,20 @@ public class CachingAttachmentProvider implements AttachmentProvider {
private boolean allRequested;
private final AtomicLong attachments = new AtomicLong( 0L );
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public CachingAttachmentProvider() {
+ lock = new ReentrantLock();
+ }
+
/**
* {@inheritDoc}
*/
@@ -141,7 +156,8 @@ public List< Attachment > listAllChanged( final Date timestamp ) throws Provider
all = provider.listAllChanged( timestamp );
// Make sure that all attachments are in the cache.
- synchronized( this ) {
+ lock.lock();
+ try {
for( final Attachment att : all ) {
cachingManager.put( CachingManager.CACHE_ATTACHMENTS, att.getName(), att );
}
@@ -149,6 +165,8 @@ public List< Attachment > listAllChanged( final Date timestamp ) throws Provider
allRequested = true;
attachments.set( all.size() );
}
+ } finally {
+ lock.unlock();
}
} else {
final List< String > keys = cachingManager.keys( CachingManager.CACHE_ATTACHMENTS );
@@ -240,14 +258,20 @@ public void deleteAttachment( final Attachment att ) throws ProviderException {
* @return A plain string with all the above-mentioned values.
*/
@Override
- public synchronized String getProviderInfo() {
- final CacheInfo attCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS );
- final CacheInfo attColCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS_COLLECTION );
- return "Real provider: " + provider.getClass().getName() +
- ". Attachment cache hits: " + attCacheInfo.getHits() +
- ". Attachment cache misses: " + attCacheInfo.getMisses() +
- ". Attachment collection cache hits: " + attColCacheInfo.getHits() +
- ". Attachment collection cache misses: " + attColCacheInfo.getMisses();
+ public String getProviderInfo() {
+ lock.lock();
+ try {
+ final CacheInfo attCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS );
+ final CacheInfo attColCacheInfo = cachingManager.info( CachingManager.CACHE_ATTACHMENTS_COLLECTION );
+ return "Real provider: " + provider.getClass().getName() +
+ ". Attachment cache hits: " + attCacheInfo.getHits() +
+ ". Attachment cache misses: " + attCacheInfo.getMisses() +
+ ". Attachment collection cache hits: " + attColCacheInfo.getHits() +
+ ". Attachment collection cache misses: " + attColCacheInfo.getMisses();
+ } finally {
+ lock.unlock();
+ }
+
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java
index 5b1260f468..745e58b734 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/CachingProvider.java
@@ -45,6 +45,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -70,6 +71,20 @@ public class CachingProvider implements PageProvider {
private boolean allRequested;
private final AtomicLong pages = new AtomicLong( 0L );
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public CachingProvider() {
+ lock = new ReentrantLock();
+ }
+
/**
* {@inheritDoc}
*/
@@ -220,7 +235,8 @@ private String getTextFromCache( final String pageName ) throws ProviderExceptio
*/
@Override
public void putPageText( final Page page, final String text ) throws ProviderException {
- synchronized( this ) {
+ lock.lock();
+ try {
provider.putPageText( page, text );
page.setLastModified( new Date() );
@@ -230,6 +246,8 @@ public void putPageText( final Page page, final String text ) throws ProviderExc
cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, page.getName() );
getPageInfoFromCache( page.getName() );
+ } finally {
+ lock.unlock();
}
pages.incrementAndGet();
}
@@ -243,11 +261,14 @@ public Collection< Page > getAllPages() throws ProviderException {
if ( !allRequested ) {
all = provider.getAllPages();
// Make sure that all pages are in the cache.
- synchronized( this ) {
+ lock.lock();
+ try {
for( final Page p : all ) {
cachingManager.put( CachingManager.CACHE_PAGES, p.getName(), p );
}
allRequested = true;
+ } finally {
+ lock.unlock();
}
pages.set( all.size() );
} else {
@@ -349,14 +370,19 @@ public List< Page > getVersionHistory( final String pageName) throws ProviderExc
* @return A plain string with all the above-mentioned values.
*/
@Override
- public synchronized String getProviderInfo() {
- final CacheInfo pageCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES );
- final CacheInfo pageHistoryCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES_HISTORY );
- return "Real provider: " + provider.getClass().getName()+
- ". Page cache hits: " + pageCacheInfo.getHits() +
- ". Page cache misses: " + pageCacheInfo.getMisses() +
- ". History cache hits: " + pageHistoryCacheInfo.getHits() +
- ". History cache misses: " + pageHistoryCacheInfo.getMisses();
+ public String getProviderInfo() {
+ lock.lock();
+ try {
+ final CacheInfo pageCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES );
+ final CacheInfo pageHistoryCacheInfo = cachingManager.info( CachingManager.CACHE_PAGES_HISTORY );
+ return "Real provider: " + provider.getClass().getName()+
+ ". Page cache hits: " + pageCacheInfo.getHits() +
+ ". Page cache misses: " + pageCacheInfo.getMisses() +
+ ". History cache hits: " + pageHistoryCacheInfo.getHits() +
+ ". History cache misses: " + pageHistoryCacheInfo.getMisses();
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -365,7 +391,8 @@ public synchronized String getProviderInfo() {
@Override
public void deleteVersion( final String pageName, final int version ) throws ProviderException {
// Luckily, this is such a rare operation it is okay to synchronize against the whole thing.
- synchronized( this ) {
+ lock.lock();
+ try {
final Page cached = getPageInfoFromCache( pageName );
final int latestcached = ( cached != null ) ? cached.getVersion() : Integer.MIN_VALUE;
@@ -377,6 +404,8 @@ public void deleteVersion( final String pageName, final int version ) throws Pro
provider.deleteVersion( pageName, version );
cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, pageName );
+ } finally {
+ lock.unlock();
}
if( version == PageProvider.LATEST_VERSION ) {
pages.decrementAndGet();
@@ -389,11 +418,14 @@ public void deleteVersion( final String pageName, final int version ) throws Pro
@Override
public void deletePage( final String pageName ) throws ProviderException {
// See note in deleteVersion().
- synchronized( this ) {
+ lock.lock();
+ try {
cachingManager.put( CachingManager.CACHE_PAGES, pageName, null );
cachingManager.put( CachingManager.CACHE_PAGES_TEXT, pageName, null );
cachingManager.put( CachingManager.CACHE_PAGES_HISTORY, pageName, null );
provider.deletePage( pageName );
+ } finally {
+ lock.unlock();
}
pages.decrementAndGet();
}
@@ -404,7 +436,8 @@ public void deletePage( final String pageName ) throws ProviderException {
@Override
public void movePage( final String from, final String to ) throws ProviderException {
provider.movePage( from, to );
- synchronized( this ) {
+ lock.lock();
+ try {
// Clear any cached version of the old page and new page
cachingManager.remove( CachingManager.CACHE_PAGES, from );
cachingManager.remove( CachingManager.CACHE_PAGES_TEXT, from );
@@ -413,6 +446,8 @@ public void movePage( final String from, final String to ) throws ProviderExcept
cachingManager.remove( CachingManager.CACHE_PAGES, to );
cachingManager.remove( CachingManager.CACHE_PAGES_TEXT, to );
cachingManager.remove( CachingManager.CACHE_PAGES_HISTORY, to );
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java
index 35e70e2d6d..d6908853ca 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/providers/VersioningFileProvider.java
@@ -42,6 +42,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Date;
import java.util.List;
import java.util.Properties;
+import java.util.concurrent.locks.ReentrantLock;
/**
* Provides a simple directory based repository for Wiki pages.
@@ -81,6 +82,20 @@ public class VersioningFileProvider extends AbstractFileProvider {
private CachedProperties m_cachedProperties;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public VersioningFileProvider() {
+ lock = new ReentrantLock();
+ }
+
/**
* {@inheritDoc}
*/
@@ -268,21 +283,26 @@ private int realVersion( final String page, final int requestedVersion ) throws
* {@inheritDoc}
*/
@Override
- public synchronized String getPageText( final String page, int version ) throws ProviderException {
- final File dir = findOldPageDir( page );
+ public String getPageText( final String page, int version ) throws ProviderException {
+ lock.lock();
+ try {
+ final File dir = findOldPageDir( page );
- version = realVersion( page, version );
- if( version == -1 ) {
- // We can let the FileSystemProvider take care of these requests.
- return super.getPageText( page, PageProvider.LATEST_VERSION );
- }
+ version = realVersion( page, version );
+ if( version == -1 ) {
+ // We can let the FileSystemProvider take care of these requests.
+ return super.getPageText( page, PageProvider.LATEST_VERSION );
+ }
- final File pageFile = new File( dir, ""+version+FILE_EXT );
- if( !pageFile.exists() ) {
- throw new NoSuchVersionException("Version "+version+"does not exist.");
- }
+ final File pageFile = new File( dir, ""+version+FILE_EXT );
+ if( !pageFile.exists() ) {
+ throw new NoSuchVersionException("Version "+version+"does not exist.");
+ }
- return readFile( pageFile );
+ return readFile( pageFile );
+ } finally {
+ lock.unlock();
+ }
}
@@ -325,76 +345,82 @@ private String readFile( final File pagedata ) throws ProviderException {
* {@inheritDoc}
*/
@Override
- public synchronized void putPageText( final Page page, final String text ) throws ProviderException {
- // This is a bit complicated. We'll first need to copy the old file to be the newest file.
- final int latest = findLatestVersion( page.getName() );
- final File pageDir = findOldPageDir( page.getName() );
- if( !pageDir.exists() ) {
- pageDir.mkdirs();
- }
-
+ public void putPageText( final Page page, final String text ) throws ProviderException {
+ lock.lock();
try {
- // Copy old data to safety, if one exists.
- final File oldFile = findPage( page.getName() );
-
- // Figure out which version should the old page be? Numbers should always start at 1.
- // "most recent" = -1 ==> 1
- // "first" = 1 ==> 2
- int versionNumber = (latest > 0) ? latest : 1;
- final boolean firstUpdate = (versionNumber == 1);
-
- if( oldFile != null && oldFile.exists() ) {
- final File pageFile = new File( pageDir, versionNumber + FILE_EXT );
- try( final InputStream in = new BufferedInputStream( Files.newInputStream( oldFile.toPath() ) );
- final OutputStream out = new BufferedOutputStream( Files.newOutputStream( pageFile.toPath() ) ) ) {
- FileUtil.copyContents( in, out );
-
- // We need also to set the date, since we rely on this.
- pageFile.setLastModified( oldFile.lastModified() );
+ // This is a bit complicated. We'll first need to copy the old file to be the newest file.
+ final int latest = findLatestVersion( page.getName() );
+ final File pageDir = findOldPageDir( page.getName() );
+ if( !pageDir.exists() ) {
+ pageDir.mkdirs();
+ }
- // Kludge to make the property code to work properly.
- versionNumber++;
+ try {
+ // Copy old data to safety, if one exists.
+ final File oldFile = findPage( page.getName() );
+
+ // Figure out which version should the old page be? Numbers should always start at 1.
+ // "most recent" = -1 ==> 1
+ // "first" = 1 ==> 2
+ int versionNumber = (latest > 0) ? latest : 1;
+ final boolean firstUpdate = (versionNumber == 1);
+
+ if( oldFile != null && oldFile.exists() ) {
+ final File pageFile = new File( pageDir, versionNumber + FILE_EXT );
+ try( final InputStream in = new BufferedInputStream( Files.newInputStream( oldFile.toPath() ) );
+ final OutputStream out = new BufferedOutputStream( Files.newOutputStream( pageFile.toPath() ) ) ) {
+ FileUtil.copyContents( in, out );
+
+ // We need also to set the date, since we rely on this.
+ pageFile.setLastModified( oldFile.lastModified() );
+
+ // Kludge to make the property code to work properly.
+ versionNumber++;
+ }
}
- }
- // Let superclass handler writing data to a new version.
- super.putPageText( page, text );
+ // Let superclass handler writing data to a new version.
+ super.putPageText( page, text );
- // Finally, write page version data.
- // FIXME: No rollback available.
- final Properties props = getPageProperties( page.getName() );
+ // Finally, write page version data.
+ // FIXME: No rollback available.
+ final Properties props = getPageProperties( page.getName() );
- String authorFirst = null;
- // if the following file exists, we are NOT migrating from FileSystemProvider
- final File pagePropFile = new File(getPageDirectory() + File.separator + PAGEDIR + File.separator + mangleName(page.getName()) + File.separator + "page" + FileSystemProvider.PROP_EXT);
- if( firstUpdate && ! pagePropFile.exists() ) {
- // we might not yet have a versioned author because the old page was last maintained by FileSystemProvider
- final Properties props2 = getHeritagePageProperties( page.getName() );
+ String authorFirst = null;
+ // if the following file exists, we are NOT migrating from FileSystemProvider
+ final File pagePropFile = new File(getPageDirectory() + File.separator + PAGEDIR + File.separator + mangleName(page.getName()) + File.separator + "page" + FileSystemProvider.PROP_EXT);
+ if( firstUpdate && ! pagePropFile.exists() ) {
+ // we might not yet have a versioned author because the old page was last maintained by FileSystemProvider
+ final Properties props2 = getHeritagePageProperties( page.getName() );
- // remember the simulated original author (or something) in the new properties
- authorFirst = props2.getProperty( "1.author", "unknown" );
- props.setProperty( "1.author", authorFirst );
- }
+ // remember the simulated original author (or something) in the new properties
+ authorFirst = props2.getProperty( "1.author", "unknown" );
+ props.setProperty( "1.author", authorFirst );
+ }
- String newAuthor = page.getAuthor();
- if ( newAuthor == null ) {
- newAuthor = ( authorFirst != null ) ? authorFirst : "unknown";
- }
- page.setAuthor(newAuthor);
- props.setProperty( versionNumber + ".author", newAuthor );
+ String newAuthor = page.getAuthor();
+ if ( newAuthor == null ) {
+ newAuthor = ( authorFirst != null ) ? authorFirst : "unknown";
+ }
+ page.setAuthor(newAuthor);
+ props.setProperty( versionNumber + ".author", newAuthor );
- final String changeNote = page.getAttribute( Page.CHANGENOTE );
- if( changeNote != null ) {
- props.setProperty( versionNumber + ".changenote", changeNote );
- }
+ final String changeNote = page.getAttribute( Page.CHANGENOTE );
+ if( changeNote != null ) {
+ props.setProperty( versionNumber + ".changenote", changeNote );
+ }
- // Get additional custom properties from page and add to props
- getCustomProperties( page, props );
- putPageProperties( page.getName(), props );
- } catch( final IOException e ) {
- LOG.error( "Saving failed", e );
- throw new ProviderException("Could not save page text: "+e.getMessage());
+ // Get additional custom properties from page and add to props
+ getCustomProperties( page, props );
+ putPageProperties( page.getName(), props );
+ } catch( final IOException e ) {
+ LOG.error( "Saving failed", e );
+ throw new ProviderException("Could not save page text: "+e.getMessage());
+ }
+ } finally {
+ lock.unlock();
}
+
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java
index 329672d744..7a08d55765 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/references/DefaultReferenceManager.java
@@ -47,6 +47,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.security.NoSuchAlgorithmException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
/*
@@ -132,6 +133,16 @@ public class DefaultReferenceManager extends BasePageFilter implements Reference
/** We use this also a generic serialization id */
private static final long serialVersionUID = 4L;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
/**
* Builds a new ReferenceManager.
*
@@ -148,6 +159,7 @@ public DefaultReferenceManager( final Engine engine ) {
//
m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy );
m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo );
+ lock = new ReentrantLock();
}
/**
@@ -233,53 +245,63 @@ public void initialize( final Collection< Page > pages ) throws ProviderExceptio
* Reads the serialized data from the disk back to memory. Returns the date when the data was last written on disk
*/
@SuppressWarnings("unchecked")
- private synchronized long unserializeFromDisk() throws IOException, ClassNotFoundException {
- final long saved;
+ private long unserializeFromDisk() throws IOException, ClassNotFoundException {
+ lock.lock();
+ try {
+ final long saved;
- final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
- try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) {
- final StopWatch sw = new StopWatch();
- sw.start();
+ final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
+ try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) {
+ final StopWatch sw = new StopWatch();
+ sw.start();
- final long ver = in.readLong();
+ final long ver = in.readLong();
- if( ver != serialVersionUID ) {
- throw new IOException("File format has changed; I need to recalculate references.");
- }
+ if( ver != serialVersionUID ) {
+ throw new IOException("File format has changed; I need to recalculate references.");
+ }
- saved = in.readLong();
- m_refersTo = ( Map< String, Collection< String > > ) in.readObject();
- m_referredBy = ( Map< String, Set< String > > ) in.readObject();
+ saved = in.readLong();
+ m_refersTo = ( Map< String, Collection< String > > ) in.readObject();
+ m_referredBy = ( Map< String, Set< String > > ) in.readObject();
- m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy );
- m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo );
+ m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy );
+ m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo );
- sw.stop();
- LOG.debug( "Read serialized data successfully in {}", sw );
- }
+ sw.stop();
+ LOG.debug( "Read serialized data successfully in {}", sw );
+ }
- return saved;
+ return saved;
+ } finally {
+ lock.unlock();
+ }
}
/**
* Serializes hashmaps to disk. The format is private, don't touch it.
*/
- private synchronized void serializeToDisk() {
- final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
- try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) {
- final StopWatch sw = new StopWatch();
- sw.start();
+ private void serializeToDisk() {
+ lock.lock();
+ try {
+ final File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
+ try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) {
+ final StopWatch sw = new StopWatch();
+ sw.start();
- out.writeLong( serialVersionUID );
- out.writeLong( System.currentTimeMillis() ); // Timestamp
- out.writeObject( m_refersTo );
- out.writeObject( m_referredBy );
+ out.writeLong( serialVersionUID );
+ out.writeLong( System.currentTimeMillis() ); // Timestamp
+ out.writeObject( m_refersTo );
+ out.writeObject( m_referredBy );
- sw.stop();
+ sw.stop();
- LOG.debug( "serialization done - took {}", sw );
- } catch( final IOException ioe ) {
- LOG.error( "Unable to serialize!", ioe );
+ LOG.debug( "serialization done - took {}", sw );
+ } catch( final IOException ioe ) {
+ LOG.error( "Unable to serialize!", ioe );
+ }
+ } finally {
+ lock.unlock();
}
}
@@ -301,101 +323,110 @@ private String getHashFileName( final String pageName ) {
/**
* Reads the serialized data from the disk back to memory. Returns the date when the data was last written on disk
*/
- private synchronized long unserializeAttrsFromDisk( final Page p ) throws IOException, ClassNotFoundException {
- long saved = 0L;
+ private long unserializeAttrsFromDisk( final Page p ) throws IOException, ClassNotFoundException {
+ lock.lock();
+ try {
+ long saved = 0L;
+
+ // Find attribute cache, and check if it exists
+ final String hashName = getHashFileName( p.getName() );
+ if( hashName != null ) {
+ File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );
+ f = new File( f, hashName );
+ if( !f.exists() ) {
+ return 0L;
+ }
- // Find attribute cache, and check if it exists
- final String hashName = getHashFileName( p.getName() );
- if( hashName != null ) {
- File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );
- f = new File( f, hashName );
- if( !f.exists() ) {
- return 0L;
- }
+ try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) {
+ final StopWatch sw = new StopWatch();
+ sw.start();
+ LOG.debug( "Deserializing attributes for {}", p.getName() );
- try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) {
- final StopWatch sw = new StopWatch();
- sw.start();
- LOG.debug( "Deserializing attributes for {}", p.getName() );
+ final long ver = in.readLong();
+ if( ver != serialVersionUID ) {
+ LOG.debug( "File format has changed; cannot deserialize." );
+ return 0L;
+ }
- final long ver = in.readLong();
- if( ver != serialVersionUID ) {
- LOG.debug( "File format has changed; cannot deserialize." );
- return 0L;
- }
+ saved = in.readLong();
+ final String name = in.readUTF();
+ if( !name.equals( p.getName() ) ) {
+ LOG.debug( "File name does not match ({}), skipping...", name );
+ return 0L; // Not here
+ }
- saved = in.readLong();
- final String name = in.readUTF();
- if( !name.equals( p.getName() ) ) {
- LOG.debug( "File name does not match ({}), skipping...", name );
- return 0L; // Not here
- }
+ final long entries = in.readLong();
+ for( int i = 0; i < entries; i++ ) {
+ final String key = in.readUTF();
+ final Object value = in.readObject();
+ p.setAttribute( key, value );
+ LOG.debug( " attr: {}={}", key, value );
+ }
- final long entries = in.readLong();
- for( int i = 0; i < entries; i++ ) {
- final String key = in.readUTF();
- final Object value = in.readObject();
- p.setAttribute( key, value );
- LOG.debug( " attr: {}={}", key, value );
+ sw.stop();
+ LOG.debug( "Read serialized data for {} successfully in {}", name, sw );
+ p.setHasMetadata();
}
-
- sw.stop();
- LOG.debug( "Read serialized data for {} successfully in {}", name, sw );
- p.setHasMetadata();
}
- }
- return saved;
+ return saved;
+ } finally {
+ lock.unlock();
+ }
}
/**
* Serializes hashmaps to disk. The format is private, don't touch it.
*/
- private synchronized void serializeAttrsToDisk( final Page p ) {
- final StopWatch sw = new StopWatch();
- sw.start();
+ private void serializeAttrsToDisk( final Page p ) {
+ lock.lock();
+ try {
+ final StopWatch sw = new StopWatch();
+ sw.start();
- final String hashName = getHashFileName( p.getName() );
- if( hashName != null ) {
- File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );
- if( !f.exists() ) {
- f.mkdirs();
- }
+ final String hashName = getHashFileName( p.getName() );
+ if( hashName != null ) {
+ File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );
+ if( !f.exists() ) {
+ f.mkdirs();
+ }
- // Create a digest for the name
- f = new File( f, hashName );
+ // Create a digest for the name
+ f = new File( f, hashName );
- try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) {
- // new Set to avoid concurrency issues
- final Set< Map.Entry < String, Object > > entries = new HashSet<>( p.getAttributes().entrySet() );
+ try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) {
+ // new Set to avoid concurrency issues
+ final Set< Map.Entry < String, Object > > entries = new HashSet<>( p.getAttributes().entrySet() );
- if(entries.isEmpty()) {
- // Nothing to serialize, therefore we will just simply remove the serialization file so that the
- // next time we boot, we don't deserialize old data.
- f.delete();
- return;
- }
+ if(entries.isEmpty()) {
+ // Nothing to serialize, therefore we will just simply remove the serialization file so that the
+ // next time we boot, we don't deserialize old data.
+ f.delete();
+ return;
+ }
- out.writeLong( serialVersionUID );
- out.writeLong( System.currentTimeMillis() ); // Timestamp
- out.writeUTF( p.getName() );
- out.writeLong( entries.size() );
+ out.writeLong( serialVersionUID );
+ out.writeLong( System.currentTimeMillis() ); // Timestamp
+ out.writeUTF( p.getName() );
+ out.writeLong( entries.size() );
- for( final Map.Entry< String, Object > e : entries ) {
- if( e.getValue() instanceof Serializable ) {
- out.writeUTF( e.getKey() );
- out.writeObject( e.getValue() );
+ for( final Map.Entry< String, Object > e : entries ) {
+ if( e.getValue() instanceof Serializable ) {
+ out.writeUTF( e.getKey() );
+ out.writeObject( e.getValue() );
+ }
}
- }
- } catch( final IOException e ) {
- LOG.error( "Unable to serialize!", e );
- } finally {
- sw.stop();
- LOG.debug( "serialization for {} done - took {}", p.getName(), sw );
+ } catch( final IOException e ) {
+ LOG.error( "Unable to serialize!", e );
+ } finally {
+ sw.stop();
+ LOG.debug( "serialization for {} done - took {}", p.getName(), sw );
+ }
}
+ } finally {
+ lock.unlock();
}
-
}
/**
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java
index 20cf0d366b..2e3122137d 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/rss/DefaultRSSGenerator.java
@@ -43,6 +43,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -64,6 +65,16 @@ public class DefaultRSSGenerator implements RSSGenerator {
private static final int MAX_CHARACTERS = Integer.MAX_VALUE-1;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
/**
* Builds the RSS generator for a given Engine.
*
@@ -75,6 +86,7 @@ public DefaultRSSGenerator( final Engine engine, final Properties properties ) {
m_channelDescription = properties.getProperty( PROP_CHANNEL_DESCRIPTION, m_channelDescription );
m_channelLanguage = properties.getProperty( PROP_CHANNEL_LANGUAGE, m_channelLanguage );
m_rssFile = TextUtil.getStringProperty( properties, DefaultRSSGenerator.PROP_RSSFILE, "rss.rdf" );
+ this.lock = new ReentrantLock();
}
/**
@@ -200,14 +212,24 @@ public String generateFeed( final Context wikiContext, final List< Page > change
/** {@inheritDoc} */
@Override
- public synchronized boolean isEnabled() {
- return m_enabled;
+ public boolean isEnabled() {
+ lock.lock();
+ try {
+ return m_enabled;
+ } finally {
+ lock.unlock();
+ }
}
/** {@inheritDoc} */
@Override
- public synchronized void setEnabled( final boolean enabled ) {
- m_enabled = enabled;
+ public void setEnabled( final boolean enabled ) {
+ lock.lock();
+ try {
+ m_enabled = enabled;
+ } finally {
+ lock.unlock();
+ }
}
/** {@inheritDoc} */
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java
index 9593712018..758284f006 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/search/LuceneSearchProvider.java
@@ -84,6 +84,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Properties;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
@@ -133,6 +134,20 @@ public class LuceneSearchProvider implements SearchProvider {
private static final String PUNCTUATION_TO_SPACES = StringUtils.repeat( " ", TextUtil.PUNCTUATION_CHARS_ALLOWED.length() );
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
+ public LuceneSearchProvider() {
+ lock = new ReentrantLock();
+ }
+
/** {@inheritDoc} */
@Override
public void initialize( final Engine engine, final Properties props ) throws NoRequiredPropertyException, IOException {
@@ -303,23 +318,28 @@ protected String getAttachmentContent( final Attachment att ) {
* @param page The WikiPage to check
* @param text The page text to index.
*/
- protected synchronized void updateLuceneIndex( final Page page, final String text ) {
- LOG.debug( "Updating Lucene index for page '{}'...", page.getName() );
- pageRemoved( page );
+ protected void updateLuceneIndex( final Page page, final String text ) {
+ lock.lock();
+ try {
+ LOG.debug( "Updating Lucene index for page '{}'...", page.getName() );
+ pageRemoved( page );
+
+ // Now add back the new version.
+ try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() );
+ final IndexWriter writer = getIndexWriter( luceneDir ) ) {
+ luceneIndexPage( page, text, writer );
+ } catch( final IOException e ) {
+ LOG.error( "Unable to update page '{}' from Lucene index", page.getName(), e );
+ // reindexPage( page );
+ } catch( final Exception e ) {
+ LOG.error( "Unexpected Lucene exception - please check configuration!", e );
+ // reindexPage( page );
+ }
- // Now add back the new version.
- try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() );
- final IndexWriter writer = getIndexWriter( luceneDir ) ) {
- luceneIndexPage( page, text, writer );
- } catch( final IOException e ) {
- LOG.error( "Unable to update page '{}' from Lucene index", page.getName(), e );
- // reindexPage( page );
- } catch( final Exception e ) {
- LOG.error( "Unexpected Lucene exception - please check configuration!", e );
- // reindexPage( page );
+ LOG.debug( "Done updating Lucene index for page '{}'.", page.getName() );
+ } finally {
+ lock.unlock();
}
-
- LOG.debug( "Done updating Lucene index for page '{}'.", page.getName() );
}
private Analyzer getLuceneAnalyzer() throws ProviderException {
@@ -389,8 +409,11 @@ protected Document luceneIndexPage( final Page page, final String text, final In
field = new Field( LUCENE_PAGE_KEYWORDS, page.getAttribute( "keywords" ).toString(), TextField.TYPE_STORED );
doc.add( field );
}
- synchronized( writer ) {
+ lock.lock();
+ try {
writer.addDocument( doc );
+ } finally {
+ lock.unlock();
}
return doc;
@@ -400,13 +423,18 @@ protected Document luceneIndexPage( final Page page, final String text, final In
* {@inheritDoc}
*/
@Override
- public synchronized void pageRemoved( final Page page ) {
- try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() );
- final IndexWriter writer = getIndexWriter( luceneDir ) ) {
- final Query query = new TermQuery( new Term( LUCENE_ID, page.getName() ) );
- writer.deleteDocuments( query );
- } catch( final Exception e ) {
- LOG.error( "Unable to remove page '{}' from Lucene index", page.getName(), e );
+ public void pageRemoved( final Page page ) {
+ lock.lock();
+ try {
+ try( final Directory luceneDir = new NIOFSDirectory( new File( m_luceneDirectory ).toPath() );
+ final IndexWriter writer = getIndexWriter( luceneDir ) ) {
+ final Query query = new TermQuery( new Term( LUCENE_ID, page.getName() ) );
+ writer.deleteDocuments( query );
+ } catch( final Exception e ) {
+ LOG.error( "Unable to remove page '{}' from Lucene index", page.getName(), e );
+ }
+ } finally {
+ lock.unlock();
}
}
@@ -540,6 +568,16 @@ private static final class LuceneUpdater extends WikiBackgroundThread {
private WatchDog m_watchdog;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private static final ReentrantLock lock = new ReentrantLock();
+
private LuceneUpdater( final Engine engine, final LuceneSearchProvider provider, final int initialDelay, final int indexDelay ) {
super( engine, indexDelay );
m_provider = provider;
@@ -567,16 +605,19 @@ public void startupTask() throws Exception {
@Override
public void backgroundTask() {
m_watchdog.enterState( "Emptying index queue", 60 );
-
- synchronized( m_provider.m_updates ) {
- while(!m_provider.m_updates.isEmpty()) {
+ lock.lock();
+ try {
+ while( m_provider.m_updates.isEmpty() ) {
final Object[] pair = m_provider.m_updates.remove( 0 );
final Page page = ( Page )pair[ 0 ];
final String text = ( String )pair[ 1 ];
m_provider.updateLuceneIndex( page, text );
}
+ } finally {
+ lock.unlock();
}
+
m_watchdog.exitState();
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java
index e85c86af46..d85ec435c0 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/ui/admin/beans/SearchManagerBean.java
@@ -30,6 +30,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import javax.management.NotCompliantMBeanException;
import java.util.Collection;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -46,9 +47,20 @@ public class SearchManagerBean extends SimpleAdminBean {
private WikiBackgroundThread m_updater;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock;
+
public SearchManagerBean( final Engine engine ) throws NotCompliantMBeanException {
super();
initialize( engine );
+ this.lock = new ReentrantLock();
}
@Override
@@ -74,49 +86,54 @@ public String getTitle()
*
* This method prevents itself from being called twice.
*/
- public synchronized void reload() {
- if( m_updater == null ) {
- m_updater = new WikiBackgroundThread( m_engine, 0 ) {
+ public void reload() {
+ lock.lock();
+ try {
+ if( m_updater == null ) {
+ m_updater = new WikiBackgroundThread( m_engine, 0 ) {
- int m_count;
- int m_max;
+ int m_count;
+ int m_max;
- @Override
- public void startupTask() throws Exception {
- super.startupTask();
+ @Override
+ public void startupTask() throws Exception {
+ super.startupTask();
- setName( "Reindexer started" );
- }
+ setName( "Reindexer started" );
+ }
+
+ @Override
+ public void backgroundTask() throws Exception {
+ final Collection< Page > allPages = m_engine.getManager( PageManager.class ).getAllPages();
- @Override
- public void backgroundTask() throws Exception {
- final Collection< Page > allPages = m_engine.getManager( PageManager.class ).getAllPages();
+ final SearchManager mgr = m_engine.getManager( SearchManager.class );
+ m_max = allPages.size();
- final SearchManager mgr = m_engine.getManager( SearchManager.class );
- m_max = allPages.size();
+ final ProgressItem pi = new ProgressItem() {
- final ProgressItem pi = new ProgressItem() {
+ @Override
+ public int getProgress() {
+ return 100 * m_count / m_max;
+ }
+ };
+ m_engine.getManager( ProgressManager.class ).startProgress( pi, PROGRESS_ID );
- @Override
- public int getProgress() {
- return 100 * m_count / m_max;
+ for( final Page page : allPages ) {
+ mgr.reindexPage( page );
+ m_count++;
}
- };
- m_engine.getManager( ProgressManager.class ).startProgress( pi, PROGRESS_ID );
- for( final Page page : allPages ) {
- mgr.reindexPage( page );
- m_count++;
+ m_engine.getManager( ProgressManager.class ).stopProgress( PROGRESS_ID );
+ shutdown();
+ m_updater = null;
}
- m_engine.getManager( ProgressManager.class ).stopProgress( PROGRESS_ID );
- shutdown();
- m_updater = null;
- }
-
- };
+ };
- m_updater.start();
+ m_updater.start();
+ }
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java
index 3f42bebf6f..20be5cb527 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/AbstractStep.java
@@ -24,6 +24,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.io.Serializable;
import java.security.Principal;
import java.util.*;
+import java.util.concurrent.locks.ReentrantLock;
/**
* Abstract superclass that provides a complete implementation of most Step methods; subclasses need only implement {@link #execute(Context)} and
@@ -58,6 +59,16 @@ public abstract class AbstractStep implements Step {
private boolean m_started;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock = new ReentrantLock();
+
/**
* Protected constructor that creates a new Step with a specified message key. After construction, the method
* {@link #setWorkflow(int, Map)} should be called.
@@ -146,8 +157,13 @@ public final String getMessageKey() {
* {@inheritDoc}
*/
@Override
- public final synchronized Outcome getOutcome() {
- return m_outcome;
+ public final Outcome getOutcome() {
+ lock.lock();
+ try {
+ return m_outcome;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -178,35 +194,46 @@ public final boolean isStarted() {
* {@inheritDoc}
*/
@Override
- public final synchronized void setOutcome(final Outcome outcome ) {
- // Is this an allowed Outcome?
- if( !m_successors.containsKey( outcome ) ) {
- if( !Outcome.STEP_CONTINUE.equals( outcome ) && !Outcome.STEP_ABORT.equals( outcome ) ) {
- throw new IllegalArgumentException( "Outcome " + outcome.getMessageKey() + " is not supported for this Step." );
+ public final void setOutcome(final Outcome outcome ) {
+ lock.lock();
+ try {
+ // Is this an allowed Outcome?
+ if( !m_successors.containsKey( outcome ) ) {
+ if( !Outcome.STEP_CONTINUE.equals( outcome ) && !Outcome.STEP_ABORT.equals( outcome ) ) {
+ throw new IllegalArgumentException( "Outcome " + outcome.getMessageKey() + " is not supported for this Step." );
+ }
}
- }
- // Is this a "completion" outcome?
- if( outcome.isCompletion() ) {
- if( m_completed ) {
- throw new IllegalStateException( "Step has already been marked complete; cannot set again." );
+ // Is this a "completion" outcome?
+ if( outcome.isCompletion() ) {
+ if( m_completed ) {
+ throw new IllegalStateException( "Step has already been marked complete; cannot set again." );
+ }
+ m_completed = true;
+ m_end = new Date( System.currentTimeMillis() );
}
- m_completed = true;
- m_end = new Date( System.currentTimeMillis() );
+ m_outcome = outcome;
+ } finally {
+ lock.unlock();
}
- m_outcome = outcome;
+
}
/**
* {@inheritDoc}
*/
@Override
- public final synchronized void start() throws WikiException {
- if( m_started ) {
- throw new IllegalStateException( "Step already started." );
+ public final void start() throws WikiException {
+ lock.lock();
+ try {
+ if( m_started ) {
+ throw new IllegalStateException( "Step already started." );
+ }
+ m_started = true;
+ m_start = new Date( System.currentTimeMillis() );
+ } finally {
+ lock.unlock();
}
- m_started = true;
- m_start = new Date( System.currentTimeMillis() );
}
/**
@@ -226,9 +253,14 @@ public final Step getSuccessor(final Outcome outcome ) {
* @param workflowContext the parent workflow context to set
*/
@Override
- public final synchronized void setWorkflow(final int workflowId, final Map< String, Serializable > workflowContext ) {
- this.workflowId = workflowId;
- this.workflowContext = workflowContext;
+ public final void setWorkflow(final int workflowId, final Map< String, Serializable > workflowContext ) {
+ lock.lock();
+ try {
+ this.workflowId = workflowId;
+ this.workflowContext = workflowContext;
+ } finally {
+ lock.unlock();
+ }
}
public int getWorkflowId() {
@@ -244,8 +276,13 @@ public Map< String, Serializable > getWorkflowContext() {
*
* @param message the error message
*/
- protected final synchronized void addError( final String message ) {
- m_errors.add( message );
+ protected final void addError( final String message ) {
+ lock.lock();
+ try {
+ m_errors.add( message );
+ } finally {
+ lock.unlock();
+ }
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java
index 7f833ecc59..c7f6393002 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Decision.java
@@ -29,6 +29,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -62,6 +63,16 @@ public abstract class Decision extends AbstractStep {
private final List m_facts;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock = new ReentrantLock();
+
/**
* Constructs a new Decision for a required "actor" Principal, having a default Outcome.
*
@@ -186,11 +197,16 @@ public boolean isReassignable() {
*
* @param actor the actor to reassign the Decision to
*/
- public final synchronized void reassign( final Principal actor ) {
- if( isReassignable() ) {
- m_actor = actor;
- } else {
- throw new IllegalArgumentException( "Decision cannot be reassigned." );
+ public final void reassign( final Principal actor ) {
+ lock.lock();
+ try {
+ if( isReassignable() ) {
+ m_actor = actor;
+ } else {
+ throw new IllegalArgumentException( "Decision cannot be reassigned." );
+ }
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java
index 83d18a3896..592c3eb533 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DecisionQueue.java
@@ -30,6 +30,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.Collection;
import java.util.LinkedList;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -45,6 +46,16 @@ public class DecisionQueue implements Serializable {
private final AtomicInteger next = new AtomicInteger( 1_000 );
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock = new ReentrantLock();
+
/** Constructs a new DecisionQueue. */
public DecisionQueue() {
}
@@ -54,9 +65,14 @@ public DecisionQueue() {
*
* @param decision the Decision to add
*/
- protected synchronized void add( final Decision decision ) {
- m_queue.addLast( decision );
- decision.setId( next.getAndIncrement() );
+ protected void add( final Decision decision ) {
+ lock.lock();
+ try {
+ m_queue.addLast( decision );
+ decision.setId( next.getAndIncrement() );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -74,8 +90,13 @@ protected Decision[] decisions() {
*
* @param decision the decision to remove
*/
- protected synchronized void remove( final Decision decision ) {
- m_queue.remove( decision );
+ protected void remove( final Decision decision ) {
+ lock.lock();
+ try {
+ m_queue.remove( decision );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -133,14 +154,19 @@ public void decide( final Decision decision, final Outcome outcome, final Contex
* @param owner the new owner
* @throws WikiException never
*/
- public synchronized void reassign( final Decision decision, final Principal owner ) throws WikiException {
- if( decision.isReassignable() ) {
- decision.reassign( owner );
-
- WikiEventEmitter.fireWorkflowEvent( decision, WorkflowEvent.DQ_REASSIGN );
- return;
+ public void reassign( final Decision decision, final Principal owner ) throws WikiException {
+ lock.lock();
+ try {
+ if( decision.isReassignable() ) {
+ decision.reassign( owner );
+
+ WikiEventEmitter.fireWorkflowEvent( decision, WorkflowEvent.DQ_REASSIGN );
+ return;
+ }
+ throw new IllegalStateException( "Reassignments not allowed for this decision." );
+ } finally {
+ lock.unlock();
}
- throw new IllegalStateException( "Reassignments not allowed for this decision." );
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java
index 93307699be..45a074eee4 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/DefaultWorkflowManager.java
@@ -39,6 +39,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -62,6 +63,16 @@ public class DefaultWorkflowManager implements WorkflowManager, Serializable {
private Engine m_engine;
private int retainCompleted;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock = new ReentrantLock();
+
/**
* Constructs a new WorkflowManager, with an empty workflow cache.
*/
@@ -70,6 +81,7 @@ public DefaultWorkflowManager() {
m_approvers = new ConcurrentHashMap<>();
m_queue = new DecisionQueue();
WikiEventEmitter.attach( this );
+ // this.lock = new ReentrantLock();
}
/**
@@ -140,49 +152,59 @@ public void initialize( final Engine engine, final Properties props ) {
* @return the date when the data was last written on disk or {@code 0} if there has been problems reading from disk.
*/
@SuppressWarnings( "unchecked" )
- synchronized long unserializeFromDisk( final File f ) {
- long saved = 0L;
- final StopWatch sw = new StopWatch();
- sw.start();
- try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) {
- final long ver = in.readLong();
- if( ver != serialVersionUID ) {
- LOG.warn( "File format has changed; Unable to recover workflows and decision queue from disk." );
- } else {
- saved = in.readLong();
- m_workflows = ( Set< Workflow > )in.readObject();
- m_queue = ( DecisionQueue )in.readObject();
- m_completed = new CircularFifoQueue<>( retainCompleted );
- m_completed.addAll( ( Collection< Workflow > )in.readObject() );
- LOG.debug( "Read serialized data successfully in " + sw );
+ long unserializeFromDisk( final File f ) {
+ lock.lock();
+ try {
+ long saved = 0L;
+ final StopWatch sw = new StopWatch();
+ sw.start();
+ try( final ObjectInputStream in = new ObjectInputStream( new BufferedInputStream( Files.newInputStream( f.toPath() ) ) ) ) {
+ final long ver = in.readLong();
+ if( ver != serialVersionUID ) {
+ LOG.warn( "File format has changed; Unable to recover workflows and decision queue from disk." );
+ } else {
+ saved = in.readLong();
+ m_workflows = ( Set< Workflow > )in.readObject();
+ m_queue = ( DecisionQueue )in.readObject();
+ m_completed = new CircularFifoQueue<>( retainCompleted );
+ m_completed.addAll( ( Collection< Workflow > )in.readObject() );
+ LOG.debug( "Read serialized data successfully in " + sw );
+ }
+ } catch( final IOException | ClassNotFoundException e ) {
+ LOG.warn( "unable to recover from disk workflows and decision queue: " + e.getMessage() );
}
- } catch( final IOException | ClassNotFoundException e ) {
- LOG.warn( "unable to recover from disk workflows and decision queue: " + e.getMessage() );
- }
- sw.stop();
+ sw.stop();
- return saved;
+ return saved;
+ } finally {
+ lock.unlock();
+ }
}
/**
* Serializes workflows and decisionqueue to disk. The format is private, don't touch it.
*/
- synchronized void serializeToDisk( final File f ) {
- try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) {
- final StopWatch sw = new StopWatch();
- sw.start();
-
- out.writeLong( serialVersionUID );
- out.writeLong( System.currentTimeMillis() ); // Timestamp
- out.writeObject( m_workflows );
- out.writeObject( m_queue );
- out.writeObject( m_completed );
-
- sw.stop();
-
- LOG.debug( "serialization done - took " + sw );
- } catch( final IOException ioe ) {
- LOG.error( "Unable to serialize!", ioe );
+ void serializeToDisk( final File f ) {
+ lock.lock();
+ try {
+ try( final ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream( f.toPath() ) ) ) ) {
+ final StopWatch sw = new StopWatch();
+ sw.start();
+
+ out.writeLong( serialVersionUID );
+ out.writeLong( System.currentTimeMillis() ); // Timestamp
+ out.writeObject( m_workflows );
+ out.writeObject( m_queue );
+ out.writeObject( m_completed );
+
+ sw.stop();
+
+ LOG.debug( "serialization done - took " + sw );
+ } catch( final IOException ioe ) {
+ LOG.error( "Unable to serialize!", ioe );
+ }
+ } finally {
+ lock.unlock();
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java
index 87a68400be..9eed1d90fc 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Task.java
@@ -21,6 +21,7 @@ Licensed to the Apache Software Foundation (ASF) under one
import java.io.Serializable;
import java.security.Principal;
import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -38,6 +39,15 @@ public abstract class Task extends AbstractStep {
private static final long serialVersionUID = 4630293957752430807L;
private Step m_successor;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private final ReentrantLock lock = new ReentrantLock();
/**
* Public constructor that creates a new Task with a specified message key. After construction, the protected method
@@ -81,8 +91,13 @@ public final Principal getActor() {
*
* @param step the successor
*/
- public final synchronized void setSuccessor( final Step step ) {
- m_successor = step;
+ public final void setSuccessor( final Step step ) {
+ lock.lock();
+ try {
+ m_successor = step;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -91,8 +106,13 @@ public final synchronized void setSuccessor( final Step step ) {
*
* @return the next step
*/
- public final synchronized Step getSuccessor() {
- return m_successor;
+ public final Step getSuccessor() {
+ lock.lock();
+ try {
+ return m_successor;
+ } finally {
+ lock.unlock();
+ }
}
}
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java
index 4a607e020b..55848038c3 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/workflow/Workflow.java
@@ -23,11 +23,14 @@ Licensed to the Apache Software Foundation (ASF) under one
import org.apache.wiki.event.WikiEventEmitter;
import org.apache.wiki.event.WorkflowEvent;
+import java.io.IOException;
+import java.io.ObjectInputStream;
import java.io.Serializable;
import java.security.Principal;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReentrantLock;
/**
@@ -205,6 +208,16 @@ public class Workflow implements Serializable {
private Step m_currentStep;
+ /**
+ * A lock used to ensure thread safety when accessing shared resources.
+ * This lock provides more flexibility and capabilities than the intrinsic locking mechanism,
+ * such as the ability to attempt to acquire a lock with a timeout, or to interrupt a thread
+ * waiting to acquire a lock.
+ *
+ * @see java.util.concurrent.locks.ReentrantLock
+ */
+ private ReentrantLock lock = new ReentrantLock();
+
/**
* Constructs a new Workflow object with a supplied message key, owner Principal, and undefined unique identifier {@link #ID_NOT_SET}.
* Once instantiated the Workflow is considered to be in the {@link #CREATED} state; a caller must explicitly invoke the
@@ -233,25 +246,30 @@ public Workflow( final String messageKey, final Principal owner ) {
* to completion, but it cannot be called twice. It finishes by calling the {@link #cleanup()} method to flush retained objects.
* If the Workflow had been previously aborted, this method throws an IllegalStateException.
*/
- public final synchronized void abort( final Context context ) {
- // Check corner cases: previous abort or completion
- if( m_state == ABORTED ) {
- throw new IllegalStateException( "The workflow has already been aborted." );
- }
- if( m_state == COMPLETED ) {
- throw new IllegalStateException( "The workflow has already completed." );
- }
+ public final void abort( final Context context ) {
+ lock.lock();
+ try {
+ // Check corner cases: previous abort or completion
+ if( m_state == ABORTED ) {
+ throw new IllegalStateException( "The workflow has already been aborted." );
+ }
+ if( m_state == COMPLETED ) {
+ throw new IllegalStateException( "The workflow has already completed." );
+ }
- if( m_currentStep != null ) {
- if( m_currentStep instanceof Decision ) {
- WikiEventEmitter.fireWorkflowEvent( m_currentStep, WorkflowEvent.DQ_REMOVAL, context );
+ if( m_currentStep != null ) {
+ if( m_currentStep instanceof Decision ) {
+ WikiEventEmitter.fireWorkflowEvent( m_currentStep, WorkflowEvent.DQ_REMOVAL, context );
+ }
+ m_currentStep.setOutcome( Outcome.STEP_ABORT );
+ m_history.addLast( m_currentStep );
}
- m_currentStep.setOutcome( Outcome.STEP_ABORT );
- m_history.addLast( m_currentStep );
+ m_state = ABORTED;
+ WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.ABORTED );
+ cleanup();
+ } finally {
+ lock.unlock();
}
- m_state = ABORTED;
- WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.ABORTED );
- cleanup();
}
/**
@@ -275,11 +293,16 @@ public final void addMessageArgument( final Serializable obj ) {
*
* @return the current actor
*/
- public final synchronized Principal getCurrentActor() {
- if( m_currentStep == null ) {
- return null;
+ public final Principal getCurrentActor() {
+ lock.lock();
+ try {
+ if( m_currentStep == null ) {
+ return null;
+ }
+ return m_currentStep.getActor();
+ } finally {
+ lock.unlock();
}
- return m_currentStep.getActor();
}
/**
@@ -343,9 +366,14 @@ public final Date getEndTime() {
*
* @return the unique identifier
*/
- public final synchronized int getId()
+ public final int getId()
{
- return m_id;
+ lock.lock();
+ try {
+ return m_id;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -434,9 +462,14 @@ public final boolean isAborted()
*
* @return true if the workflow has been started but has no more steps to perform; false if not.
*/
- public final synchronized boolean isCompleted() {
- // If current step is null, then we're done
- return m_started && m_state == COMPLETED;
+ public final boolean isCompleted() {
+ lock.lock();
+ try {
+ // If current step is null, then we're done
+ return m_started && m_state == COMPLETED;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -468,20 +501,25 @@ public final Step getPreviousStep()
* @param context current wiki context
* @throws WikiException if the current task's {@link Task#execute( Context )} method throws an exception
*/
- public final synchronized void restart( final Context context ) throws WikiException {
- if( m_state != WAITING ) {
- throw new IllegalStateException( "Workflow is not paused; cannot restart." );
- }
- WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED );
- m_state = RUNNING;
- WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING );
-
- // Process current step
+ public final void restart( final Context context ) throws WikiException {
+ lock.lock();
try {
- processCurrentStep( context );
- } catch( final WikiException e ) {
- abort( context );
- throw e;
+ if( m_state != WAITING ) {
+ throw new IllegalStateException( "Workflow is not paused; cannot restart." );
+ }
+ WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED );
+ m_state = RUNNING;
+ WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING );
+
+ // Process current step
+ try {
+ processCurrentStep( context );
+ } catch( final WikiException e ) {
+ abort( context );
+ throw e;
+ }
+ } finally {
+ lock.unlock();
}
}
@@ -505,9 +543,14 @@ public final void setAttribute( final String attr, final Serializable obj ) {
*
* @param step the first step for the workflow
*/
- public final synchronized void setFirstStep( final Step step )
+ public final void setFirstStep( final Step step )
{
- m_firstStep = step;
+ lock.lock();
+ try {
+ m_firstStep = step;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -515,9 +558,14 @@ public final synchronized void setFirstStep( final Step step )
*
* @param id the unique identifier
*/
- public final synchronized void setId( final int id )
+ public final void setId( final int id )
{
- this.m_id = id;
+ lock.lock();
+ try {
+ this.m_id = id;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -528,28 +576,33 @@ public final synchronized void setId( final int id )
* @param context current wiki context.
* @throws WikiException if the current Step's {@link Step#start()} method throws an exception of any kind
*/
- public final synchronized void start( final Context context ) throws WikiException {
- if( m_state == ABORTED ) {
- throw new IllegalStateException( "Workflow cannot be started; it has already been aborted." );
- }
- if( m_started ) {
- throw new IllegalStateException( "Workflow has already started." );
- }
- WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED );
- m_started = true;
- m_state = RUNNING;
-
- WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING );
- // Mark the first step as the current one & add to history
- m_currentStep = m_firstStep;
- m_history.add( m_currentStep );
-
- // Process current step
+ public final void start( final Context context ) throws WikiException {
+ lock.lock();
try {
- processCurrentStep( context );
- } catch( final WikiException e ) {
- abort( context );
- throw e;
+ if( m_state == ABORTED ) {
+ throw new IllegalStateException( "Workflow cannot be started; it has already been aborted." );
+ }
+ if( m_started ) {
+ throw new IllegalStateException( "Workflow has already started." );
+ }
+ WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.STARTED );
+ m_started = true;
+ m_state = RUNNING;
+
+ WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.RUNNING );
+ // Mark the first step as the current one & add to history
+ m_currentStep = m_firstStep;
+ m_history.add( m_currentStep );
+
+ // Process current step
+ try {
+ processCurrentStep( context );
+ } catch( final WikiException e ) {
+ abort( context );
+ throw e;
+ }
+ } finally {
+ lock.unlock();
}
}
@@ -557,12 +610,17 @@ public final synchronized void start( final Context context ) throws WikiExcepti
* Sets the Workflow in the {@link #WAITING} state. If the Workflow is not running or has already been paused, this method throws an
* IllegalStateException. Once paused, the Workflow can be un-paused by executing the {@link #restart(Context)} method.
*/
- public final synchronized void waitstate() {
- if ( m_state != RUNNING ) {
- throw new IllegalStateException( "Workflow is not running; cannot pause." );
+ public final void waitstate() {
+ lock.lock();
+ try {
+ if ( m_state != RUNNING ) {
+ throw new IllegalStateException( "Workflow is not running; cannot pause." );
+ }
+ m_state = WAITING;
+ WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.WAITING );
+ } finally {
+ lock.unlock();
}
- m_state = WAITING;
- WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.WAITING );
}
/**
@@ -577,11 +635,16 @@ protected void cleanup() {
* Protected helper method that changes the Workflow's state to {@link #COMPLETED} and sets the current Step to null. It
* calls the {@link #cleanup()} method to flush retained objects. This method will no-op if it has previously been called.
*/
- protected final synchronized void complete() {
- if( !isCompleted() ) {
- m_state = COMPLETED;
- WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.COMPLETED );
- cleanup();
+ protected final void complete() {
+ lock.lock();
+ try {
+ if( !isCompleted() ) {
+ m_state = COMPLETED;
+ WikiEventEmitter.fireWorkflowEvent( this, WorkflowEvent.COMPLETED );
+ cleanup();
+ }
+ } finally {
+ lock.unlock();
}
}
@@ -638,5 +701,4 @@ protected final void processCurrentStep( final Context context ) throws WikiExce
}
}
-
}
diff --git a/jspwiki-main/src/test/resources/wkflmgr.ser b/jspwiki-main/src/test/resources/wkflmgr.ser
index 4a69d74d0cd3f9790448f1b61b021f1f21b44920..82947dccae57294cdaed38d6a6c859fe0bfefcf1 100644
GIT binary patch
delta 982
zcmZ`&&1(}u6n~Rs(XOdh{Y*L_vELM2i1`9ts}XKS6M2V$DZ&4{yGH@BQBUy|?evA2X@fd!V!q
zLFWqspl54nTXh|nVB6^Qo9Pb$q_SUuOekOxvr1unV=Kul-w||`r4)>^?6LAfRow=x
z!mtSPHW}PPCzN>z*m~t&5u}@Voh-tTts5pKjyPUsdqZ1*
zT@Us=G~Uh^L9z8JUJ+ioB7|+7G^t5!EH3WKs4g`eDvnD`Ca9R|musMiSIh0k3`dzVX1&nzUkR>`eF_%CR5eW2nS@kquO3n~^6s
zef^cGJ)|O!tS77?RXso;o|YwRnN6$dw$;5L*KbIlXe-25(H5TsRuP57pRzTV;=0BK
zs9Tut{=OgXdpDMdL+c)h*oDCoyG}eP`>LiA1pxogK^WUruf@6KL0hLQb*XobU5TdK
z9bwBN)mA`QPr6d{Z$U*4B~cM-?Cf6n+yVD`3&2BR9?3WdumiH3<%~WF7seE3^?#+Z
z@6n5+>k{Wti0%)9#s^ArmD#!2NPLY#aG_Ph&V)cdWw&CJ%1b^7r{rwDiX9#G_cxr6
za-)fxQj<&UORR9bcY;KaO{om(F!NAj{C`TY^TP{w^2AxzjJjc#v1p@BYA#*|ZF2u2Le4f@?jcd`f3#Z~FnwgK8Vztc7h{qb{Fh2<>;`HMZn^f$Xg{`X&vAEAIb{LGHMT-?!E5r1nnT+v-NkFWJ7uk6AOo6@d;=G)yH77Y2e<4P|2
zumxxEoUx=YBm$1$7wrH(H72v?Z;Tn-HI6$A7U;gc>7_(%S}^R}Jd!T=JpxvhR45%G
z>1bPE`gRz(t`~+}goM>CsKi`sc5d?c67NYbQPg@80Lv<1cyIUvYvOWonmO1jK4hIo
zr8~3Cg3>-E5}HrQuBoaSx!0Axi=Rr>!|VTmjSCkQ$Bd0^r#1~JVh4+6jdk#ZSv6Y0
z9%P%-dJACiCq6dMSF-vFzAe*071zXXX4UcYC?pwcWD^B&rm|PQLy>3*r#|MF?mjjp
zrdZMcxf{qwCdVV9T_Y0*3DDlcXCtRC-&S}c!Tj69li4hk5(Qx@B_^S;oUS arg0 )
+ public void putAll(final Map< ? , ? > arg0 )
{
- // Shove all of the entries into the property string
- for (final Entry, ?> value : arg0.entrySet()) {
- @SuppressWarnings("unchecked") final Entry