Commit 92791be9 authored by Alejandro De Maria Antolinos's avatar Alejandro De Maria Antolinos

Merge branch 'master' into 'issue_1'

# Conflicts:
#   pom.xml
parents 30941a1c ba5246f7
# Patched
It allows to make the zip file even if the files are missing.
It generates a report listing the missing files:
```
# Archiving Report
# Restoration completed successfully with warnings
[WARNING] Missing file /data/id30a1/inhouse/opid30a1/20181126/RAW_DATA/AFAMIN/AFAMIN-CD024584_B04-3_2/MXPressA_01/opid30a1-line-AFAMIN-CD024584_B04-3_2-line-AFAMIN-CD024584_B04-3_2_3_2450915.h5
[WARNING] Missing file /data/id30a1/inhouse/opid30a1/20181126/RAW_DATA/AFAMIN/AFAMIN-CD024584_B04-3_2/MXPressA_01/opid30a1-id30a1-line-AFAMIN-CD024584_B04-3_2_3_2450915.h5
```
\ No newline at end of file
......@@ -11,7 +11,6 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<repoUrl>https://repo.icatproject.org/repo</repoUrl>
<storage.plugin>ids.storage_test-1.0.0-SNAPSHOT.jar</storage.plugin>
<project.scm.id>github</project.scm.id>
</properties>
......@@ -32,9 +31,9 @@
<scm>
<connection>scm:git:https://github.com/icatproject/ids.server.git</connection>
<developerConnection>scm:git:https://github.com/icatproject/ids.server.git</developerConnection>
<developerConnection>scm:git:git@github.com:icatproject/ids.server.git</developerConnection>
<url>https://github.com/icatproject/ids.server</url>
<tag>v1.8.0</tag>
<tag>v1.9.1</tag>
</scm>
<issueManagement>
......@@ -73,7 +72,7 @@
<dependency>
<groupId>org.icatproject</groupId>
<artifactId>icat.client</artifactId>
<version>4.8.0</version>
<version>4.10.0</version>
</dependency>
<dependency>
......@@ -85,7 +84,7 @@
<dependency>
<groupId>org.icatproject</groupId>
<artifactId>ids.plugin</artifactId>
<version>1.3.1</version>
<version>1.5.0</version>
</dependency>
<dependency>
......@@ -161,7 +160,7 @@
</execution>
</executions>
<configuration>
<licence>cHJvamVjdHxvcmcuaWNhdHByb2plY3QuaWRzLnNlcnZlcnwyMDE4LTA5LTE1fHRydWV8LTEjTUN3Q0ZGemNNQ0g2VWduNVNvbHFsVS8wOG1rRDN3ZFdBaFFnbHRNSVJvUFNuckRqekNjN3hpQTRGUGJqY3c9PQ==</licence>
<licence>cHJvamVjdHxvcmcuaWNhdHByb2plY3QuaWRzLnNlcnZlcnwyMDIyLTA0LTAxfGZhbHNlfC0xI01Dd0NGQjFzVFhsRS9CL1llTklZWndCZnB2UHlramcrQWhSYW5NR2JOS2craFZMUzJwRzZtcnZ3WXlEMzB3PT0=</licence>
<output>
<html>
<location>site/miredot</location>
......@@ -198,18 +197,11 @@
<defaultMessage>Insufficient privileges.</defaultMessage>
</httpStatusCode>
<httpStatusCode>
<httpCode>404</httpCode>
<document>explicit:
org.icatproject.ids.exceptions.DataNotOnlineException</document>
<defaultMessage>Data not online or not found.</defaultMessage>
</httpStatusCode>
<httpStatusCode>
<httpCode>404</httpCode>
<document>explicit:
org.icatproject.ids.exceptions.NotFoundException</document>
<defaultMessage>Data not online or not found.</defaultMessage>
<defaultMessage>Data not found.</defaultMessage>
</httpStatusCode>
<httpStatusCode>
......@@ -239,6 +231,13 @@
org.icatproject.ids.exceptions.NotImplementedException</document>
<defaultMessage>Not implemented.</defaultMessage>
</httpStatusCode>
<httpStatusCode>
<httpCode>503</httpCode>
<document>explicit:
org.icatproject.ids.exceptions.DataNotOnlineException</document>
<defaultMessage>Data not online.</defaultMessage>
</httpStatusCode>
</httpStatusCodes>
</restModel>
</configuration>
......@@ -330,38 +329,16 @@
<version>1.4.0</version>
<executions>
<execution>
<id>Undeploy from glassfish</id>
<phase>pre-integration-test</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>asadmin</executable>
<arguments>
<argument>undeploy</argument>
<argument>ids.server-${project.version}</argument>
</arguments>
<successCodes>
<code>0</code>
<code>1</code>
</successCodes>
</configuration>
</execution>
<execution>
<id>Deploy to glassfish</id>
<id>Force install for first test</id>
<phase>pre-integration-test</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>asadmin</executable>
<executable>rm</executable>
<arguments>
<argument>deploy</argument>
<argument>--deploymentorder</argument>
<argument>120</argument>
<argument>--libraries</argument>
<argument>${storage.plugin}</argument>
<argument>target/ids.server-${project.version}.war</argument>
<argument>-f</argument>
<argument>src/test/install/run.properties</argument>
</arguments>
</configuration>
</execution>
......@@ -394,6 +371,7 @@
<serverUrl>${serverUrl}</serverUrl>
<javax.net.ssl.trustStore>${javax.net.ssl.trustStore}</javax.net.ssl.trustStore>
<containerHome>${containerHome}</containerHome>
<testHome>${testHome}</testHome>
</systemPropertyVariables>
</configuration>
<executions>
......
......@@ -23,4 +23,6 @@
<appender-ref ref="FILE" />
</root>
<logger name="org.icatproject.ids.FileChecker" level="INFO" />
</configuration>
......@@ -18,7 +18,8 @@ maxIdsInQuery = 1000
# Properties for archive storage
plugin.archive.class = org.icatproject.ids.storage.ArchiveFileStorage
plugin.archive.dir = ${HOME}/ids/archive/
writeDelaySeconds = 60
delayDatasetWritesSeconds = 60
delayDatafileOperationsSeconds = 60
startArchivingLevel1024bytes = 5000000
stopArchivingLevel1024bytes = 4000000
storageUnit = dataset
......
......@@ -109,7 +109,7 @@ public class DataSelection {
for (Long dfid : dfids) {
List<Object> dss = icat.search(sessionId,
"SELECT ds FROM Dataset ds JOIN ds.datafiles df WHERE df.id = " + dfid
+ " INCLUDE ds.investigation.facility");
+ " AND df.location IS NOT NULL INCLUDE ds.investigation.facility");
if (dss.size() == 1) {
Dataset ds = (Dataset) dss.get(0);
long dsid = ds.getId();
......@@ -130,7 +130,7 @@ public class DataSelection {
Dataset ds = (Dataset) icat.get(sessionId, "Dataset ds INCLUDE ds.investigation.facility", dsid);
dsInfos.put(dsid, new DsInfoImpl(ds));
String query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = "
+ dsid;
+ dsid + " AND df.location IS NOT NULL";
JsonArray result = Json.createReader(new ByteArrayInputStream(restSession.search(query).getBytes()))
.readArray().getJsonArray(0);
if (result.getJsonNumber(2).longValueExact() == 0) { // Count 0
......@@ -210,7 +210,7 @@ public class DataSelection {
visitId, facilityId, facilityName));
query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = "
+ dsid;
+ dsid + " AND df.location IS NOT NULL";
result = Json.createReader(new ByteArrayInputStream(restSession.search(query).getBytes()))
.readArray().getJsonArray(0);
if (result.getJsonNumber(2).longValueExact() == 0) {
......@@ -245,7 +245,7 @@ public class DataSelection {
if (count != 0) {
if (count <= maxEntities) {
String query = "SELECT df.id, df.name, df.location, df.createId, df.modId FROM Datafile df WHERE df.dataset.id = "
+ dsid + " AND df.id BETWEEN " + min + " AND " + max;
+ dsid + " AND df.location IS NOT NULL AND df.id BETWEEN " + min + " AND " + max;
result = Json.createReader(new ByteArrayInputStream(restSession.search(query).getBytes())).readArray();
for (JsonValue tupV : result) {
JsonArray tup = (JsonArray) tupV;
......@@ -257,12 +257,12 @@ public class DataSelection {
} else {
long half = (min + max) / 2;
String query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = "
+ dsid + " AND df.id BETWEEN " + min + " AND " + half;
+ dsid + " AND df.location IS NOT NULL AND df.id BETWEEN " + min + " AND " + half;
result = Json.createReader(new ByteArrayInputStream(restSession.search(query).getBytes())).readArray()
.getJsonArray(0);
manyDfs(dsid, result);
query = "SELECT min(df.id), max(df.id), count(df.id) FROM Datafile df WHERE df.dataset.id = " + dsid
+ " AND df.id BETWEEN " + (half + 1) + " AND " + max;
+ " AND df.location IS NOT NULL AND df.id BETWEEN " + (half + 1) + " AND " + max;
result = Json.createReader(new ByteArrayInputStream(restSession.search(query).getBytes())).readArray()
.getJsonArray(0);
manyDfs(dsid, result);
......
......@@ -75,8 +75,13 @@ public class FileChecker {
if (twoLevel) {
Dataset ds = (Dataset) eb;
logger.info("Checking Dataset " + ds.getId() + " (" + ds.getName() + ")");
List<Datafile> dfs = ds.getDatafiles();
if (!dfs.isEmpty()) {
Map<String, CrcAndLength> crcAndLength = new HashMap<>();
for (Datafile df : ds.getDatafiles()) {
if (df.getLocation() != null) {
crcAndLength.put(df.getName(), new CrcAndLength(df));
}
}
if (!crcAndLength.isEmpty()) {
String dfName = null;
DsInfo dsInfo;
......@@ -86,15 +91,11 @@ public class FileChecker {
report(ds, dfName, "Reports: " + e.getClass().getSimpleName() + " " + e.getMessage());
return;
}
Map<String, CrcAndLength> crcAndLength = new HashMap<>();
Path tPath = null;
try {
tPath = Files.createTempFile(null, null);
archiveStorage.get(dsInfo, tPath);
try (ZipInputStream zis = new ZipInputStream(Files.newInputStream(tPath))) {
for (Datafile df : dfs) {
crcAndLength.put(df.getName(), new CrcAndLength(df));
}
ZipEntry ze = zis.getNextEntry();
while (ze != null) {
dfName = zipMapper.getFileName(ze.getName());
......@@ -231,10 +232,10 @@ public class FileChecker {
}
} else {
if (maxId != null) {
query = "SELECT df FROM Datafile df WHERE df.id > " + maxId + " ORDER BY df.id LIMIT 0, "
query = "SELECT df FROM Datafile df WHERE df.id > " + maxId + " AND df.location IS NOT NULL ORDER BY df.id LIMIT 0, "
+ filesCheckParallelCount;
} else {
query = "SELECT df FROM Datafile df ORDER BY df.id LIMIT 0, " + filesCheckParallelCount;
query = "SELECT df FROM Datafile df WHERE df.location IS NOT NULL ORDER BY df.id LIMIT 0, " + filesCheckParallelCount;
}
}
List<Object> os = reader.search(query);
......
......@@ -796,4 +796,40 @@ public class IdsService {
idsBean.restore(sessionId, investigationIds, datasetIds, datafileIds, request.getRemoteAddr());
}
/**
* Write data specified by the investigationIds, datasetIds
* and datafileIds specified along with a sessionId to archive
* storage. If two level storage is not in use this has no
* effect.
*
* @summary write
*
* @param sessionId
* A sessionId returned by a call to the icat server.
* @param investigationIds
* If present, a comma separated list of investigation id values
* @param datasetIds
* If present, a comma separated list of data set id values or
* null
* @param datafileIds
* If present, a comma separated list of datafile id values.
*
* @throws BadRequestException
* @throws InsufficientPrivilegesException
* @throws InternalException
* @throws NotFoundException
*
* @statuscode 200 To indicate success
*/
@POST
@Path("write")
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
public void write(@Context HttpServletRequest request, @FormParam("sessionId") String sessionId,
@FormParam("investigationIds") String investigationIds, @FormParam("datasetIds") String datasetIds,
@FormParam("datafileIds") String datafileIds)
throws BadRequestException, InsufficientPrivilegesException, InternalException, NotFoundException,
DataNotOnlineException {
idsBean.write(sessionId, investigationIds, datasetIds, datafileIds, request.getRemoteAddr());
}
}
\ No newline at end of file
package org.icatproject.ids;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.ejb.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.icatproject.ids.plugin.AlreadyLockedException;
import org.icatproject.ids.plugin.DsInfo;
import org.icatproject.ids.plugin.MainStorageInterface;
@Singleton
public class LockManager {
public enum LockType {
SHARED, EXCLUSIVE
}
public class LockInfo {
public final Long id;
public final LockType type;
public final int count;
LockInfo(LockEntry le) {
this.id = le.id;
this.type = le.type;
this.count = le.count;
}
}
private class LockEntry {
final Long id;
final LockType type;
int count;
LockEntry(Long id, LockType type) {
this.id = id;
this.type = type;
this.count = 0;
lockMap.put(id, this);
}
void inc() {
count += 1;
}
void dec() {
assert count > 0;
count -= 1;
if (count == 0) {
lockMap.remove(id);
}
}
}
/**
* Define the common interface of SingleLock and LockCollection
*/
public abstract class Lock implements AutoCloseable {
public abstract void release();
public void close() {
release();
}
}
private class SingleLock extends Lock {
private final Long id;
private boolean isValid;
private AutoCloseable storageLock;
SingleLock(Long id, AutoCloseable storageLock) {
this.id = id;
this.isValid = true;
this.storageLock = storageLock;
}
public void release() {
synchronized (lockMap) {
if (isValid) {
lockMap.get(id).dec();
isValid = false;
if (storageLock != null) {
try {
storageLock.close();
} catch (Exception e) {
logger.error("Error while closing lock on {} in the storage plugin: {}.", id, e.getMessage());
}
}
logger.debug("Released a lock on {}.", id);
}
}
}
}
private class LockCollection extends Lock {
private ArrayList<Lock> locks;
LockCollection() {
locks = new ArrayList<>();
}
void add(Lock l) {
locks.add(l);
}
public void release() {
for (Lock l : locks) {
l.release();
}
}
}
private static Logger logger = LoggerFactory.getLogger(LockManager.class);
private PropertyHandler propertyHandler;
private MainStorageInterface mainStorage;
private Map<Long, LockEntry> lockMap = new HashMap<>();
@PostConstruct
private void init() {
propertyHandler = PropertyHandler.getInstance();
mainStorage = propertyHandler.getMainStorage();
logger.debug("LockManager initialized.");
}
public Lock lock(DsInfo ds, LockType type) throws AlreadyLockedException, IOException {
Long id = ds.getDsId();
assert id != null;
synchronized (lockMap) {
LockEntry le = lockMap.get(id);
if (le == null) {
le = new LockEntry(id, type);
} else {
if (type == LockType.EXCLUSIVE || le.type == LockType.EXCLUSIVE) {
throw new AlreadyLockedException();
}
}
le.inc();
AutoCloseable storageLock;
try {
storageLock = mainStorage.lock(ds, type == LockType.SHARED);
} catch (AlreadyLockedException | IOException e) {
le.dec();
throw e;
}
logger.debug("Acquired a {} lock on {}.", type, id);
return new SingleLock(id, storageLock);
}
}
public Lock lock(Collection<DsInfo> datasets, LockType type) throws AlreadyLockedException, IOException {
LockCollection locks = new LockCollection();
try {
for (DsInfo ds : datasets) {
locks.add(lock(ds, type));
}
} catch (AlreadyLockedException | IOException e) {
locks.release();
throw e;
}
return locks;
}
public Collection<LockInfo> getLockInfo() {
Collection<LockInfo> lockInfo = new ArrayList<>();
synchronized (lockMap) {
for (LockEntry le : lockMap.values()) {
lockInfo.add(new LockInfo(le));
}
return lockInfo;
}
}
}
\ No newline at end of file
......@@ -68,7 +68,8 @@ public class PropertyHandler {
private long startArchivingLevel;
private long stopArchivingLevel;
private StorageUnit storageUnit;
private long writeDelaySeconds;
private long delayDatasetWrites;
private long delayDatafileOperations;
private ZipMapperInterface zipMapper;
private int tidyBlockSize;
private String key;
......@@ -145,8 +146,6 @@ public class PropertyHandler {
if (!props.has("plugin.archive.class")) {
logger.info("Property plugin.archive.class not set, single storage enabled.");
} else {
writeDelaySeconds = props.getPositiveLong("writeDelaySeconds");
try {
Class<ArchiveStorageInterface> klass = (Class<ArchiveStorageInterface>) Class
.forName(props.getString("plugin.archive.class"));
......@@ -174,6 +173,23 @@ public class PropertyHandler {
}
abort("storageUnit value " + props.getString("storageUnit") + " must be taken from " + vs);
}
if (storageUnit == StorageUnit.DATASET) {
if (!props.has("delayDatasetWritesSeconds") && props.has("writeDelaySeconds")) {
// compatibility mode
logger.warn("writeDelaySeconds is deprecated, please use delayDatasetWritesSeconds instead");
delayDatasetWrites = props.getPositiveLong("writeDelaySeconds");
} else {
delayDatasetWrites = props.getPositiveLong("delayDatasetWritesSeconds");
}
} else if (storageUnit == StorageUnit.DATAFILE) {
if (!props.has("delayDatafileOperationsSeconds") && props.has("writeDelaySeconds")) {
// compatibility mode
logger.warn("writeDelaySeconds is deprecated, please use delayDatafileOperationsSeconds instead");
delayDatafileOperations = props.getPositiveLong("writeDelaySeconds");
} else {
delayDatafileOperations = props.getPositiveLong("delayDatafileOperationsSeconds");
}
}
tidyBlockSize = props.getPositiveInt("tidyBlockSize");
}
......@@ -375,8 +391,12 @@ public class PropertyHandler {
return tidyBlockSize;
}
public long getWriteDelaySeconds() {
return writeDelaySeconds;
public long getDelayDatasetWrites() {
return delayDatasetWrites;
}
public long getDelayDatafileOperations() {
return delayDatafileOperations;
}
public ZipMapperInterface getZipMapper() {
......
......@@ -170,36 +170,36 @@ public class Tidier {
}
}
private boolean addStringConstraint(StringBuilder sb, String var, String value, boolean andNeeded) {
if (value != null) {
if (andNeeded) {
sb.append(" AND ");
} else {
sb.append(" ");
andNeeded = true;
}
sb.append(var + " = '" + value + "'");
}
private final static Logger logger = LoggerFactory.getLogger(Tidier.class);
static boolean addStringConstraint(StringBuilder sb, String var, String value, boolean andNeeded) {
if (value != null) {
if (andNeeded) {
sb.append(" AND ");
} else {
sb.append(" ");
andNeeded = true;
}
return andNeeded;
sb.append(var + " = '" + value.replaceAll("'", "''") + "'");
}
return andNeeded;
}
private boolean addNumericConstraint(StringBuilder sb, String var, Long value, boolean andNeeded) {
if (value != null) {
if (andNeeded) {
sb.append(" AND ");
} else {
sb.append(" ");
andNeeded = true;
}
sb.append(var + " = " + value);
static boolean addNumericConstraint(StringBuilder sb, String var, Long value, boolean andNeeded) {
if (value != null) {
if (andNeeded) {
sb.append(" AND ");
} else {
sb.append(" ");
andNeeded = true;
}
return andNeeded;
sb.append(var + " = " + value);
}
return andNeeded;
}
private final static Logger logger = LoggerFactory.getLogger(Tidier.class);;
static void cleanPreparedDir(Path preparedDir, int preparedCount) throws IOException {
Map<Long, Path> dateMap = new HashMap<>();
......
......@@ -38,7 +38,7 @@ public class Transmitter {
.lookup(propertyHandler.getJmsTopicConnectionFactory());
topicConnection = topicConnectionFactory.createTopicConnection();
topic = (Topic) ic.lookup("jms/IDS/log");
logger.info("Transmitter created");
logger.info("Notification Transmitter created");
} catch (JMSException | NamingException e) {
logger.error(fatal, "Problem with JMS " + e);
throw new IllegalStateException(e.getMessage());
......@@ -52,15 +52,14 @@ public class Transmitter {
if (topicConnection != null) {
topicConnection.close();
}
logger.info("Transmitter closing down");
logger.info("Notification Transmitter closing down");
} catch (JMSException e) {
throw new IllegalStateException(e.getMessage());
}
}
public void processMessage(String operation, String ip, String body, long startMillis) {
try {
Session jmsSession = topicConnection.createSession(false, Session.AUTO_ACKNOWLEDGE);
try (Session jmsSession = topicConnection.createSession(false, Session.AUTO_ACKNOWLEDGE)) {
TextMessage jmsg = jmsSession.createTextMessage(body);
jmsg.setStringProperty("operation", operation);
jmsg.setStringProperty("ip", ip);
......@@ -69,7 +68,6 @@ public class Transmitter {
MessageProducer jmsProducer = jmsSession.createProducer(topic);
jmsProducer.send(jmsg);
logger.debug("Sent jms message " + operation + " " + ip);
jmsSession.close();
} catch (JMSException e) {
logger.error("Failed to send jms message " + operation + " " + ip);
}
......
......@@ -6,7 +6,7 @@ import java.net.HttpURLConnection;
public class DataNotOnlineException extends IdsException {
public DataNotOnlineException(String msg) {
super(HttpURLConnection.HTTP_NOT_FOUND, msg);
super(HttpURLConnection.HTTP_UNAVAILABLE, msg);
}
}
......@@ -2,9 +2,11 @@ package org.icatproject.ids.thread;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;
import org.icatproject.ids.FiniteStateMachine;
import org.icatproject.ids.LockManager.Lock;
import org.icatproject.ids.PropertyHandler;
import org.icatproject.ids.plugin.DfInfo;
import org.icatproject.ids.plugin.MainStorageInterface;
......@@ -21,30 +23,38 @@ public class DfArchiver implements Runnable {
private FiniteStateMachine fsm;
private List<DfInfo> dfInfos;
private Path markerDir;
private Collection<Lock> locks;
public DfArchiver(List<DfInfo> dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm) {
public DfArchiver(List<DfInfo> dfInfos, PropertyHandler propertyHandler, FiniteStateMachine fsm, Collection<Lock> locks) {
this.dfInfos = dfInfos;
this.fsm = fsm;
this.locks = locks;
mainStorageInterface = propertyHandler.getMainStorage();
markerDir = propertyHandler.getCacheDir().resolve("marker");
}
@Override
public void run() {
for (DfInfo dfInfo : dfInfos) {
try {
if (Files.exists(markerDir.resolve(Long.toString(dfInfo.getDfId())))) {
logger.error("Archive of " + dfInfo
+ " not carried out because a write to secondary storage operation failed previously");
} else {
String dfLocation = dfInfo.getDfLocation();
mainStorageInterface.delete(dfLocation, dfInfo.getCreateId(), dfInfo.getModId());
logger.debug("Archive of " + dfInfo + " completed");
try {
for (DfInfo dfInfo : dfInfos) {
try {
if (Files.exists(markerDir.resolve(Long.toString(dfInfo.getDfId())))) {
logger.error("Archive of " + dfInfo
+ " not carried out because a write to secondary storage operation failed previously");
} else {
String dfLocation = dfInfo.getDfLocation();
mainStorageInterface.delete(dfLocation, dfInfo.getCreateId(), dfInfo.getModId());
logger.debug("Archive of " + dfInfo + " completed");
}
} catch (Exception e) {
logger.error("Archive of " + dfInfo + " failed due to " + e.getClass() + " " + e.getMessage());
} finally {
fsm.removeFromChanging(dfInfo);