dhis2-devs team mailing list archive
-
dhis2-devs team
-
Mailing list archive
-
Message #35707
[Branch ~dhis2-devs-core/dhis2/trunk] Rev 18277: remove ExportOptions class, replace with IdSchemes
------------------------------------------------------------
revno: 18277
committer: Morten Olav Hansen <mortenoh@xxxxxxxxx>
branch nick: dhis2
timestamp: Mon 2015-02-16 16:43:40 +0700
message:
remove ExportOptions class, replace with IdSchemes
removed:
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/metadata/ExportOptions.java
modified:
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/synch/DefaultSynchronizationManager.java
dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java
dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java
--
lp:dhis2
https://code.launchpad.net/~dhis2-devs-core/dhis2/trunk
Your team DHIS 2 developers is subscribed to branch lp:dhis2.
To unsubscribe from this branch go to https://code.launchpad.net/~dhis2-devs-core/dhis2/trunk/+edit-subscription
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java 2015-01-17 07:41:26 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java 2015-02-16 09:43:40 +0000
@@ -28,6 +28,14 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+import org.hisp.dhis.dataset.DataSet;
+import org.hisp.dhis.dxf2.importsummary.ImportSummary;
+import org.hisp.dhis.dxf2.metadata.ImportOptions;
+import org.hisp.dhis.dxf2.utils.IdSchemes;
+import org.hisp.dhis.node.types.RootNode;
+import org.hisp.dhis.period.Period;
+import org.hisp.dhis.scheduling.TaskId;
+
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
@@ -35,35 +43,26 @@
import java.util.List;
import java.util.Set;
-import org.hisp.dhis.dataset.DataSet;
-import org.hisp.dhis.dxf2.importsummary.ImportSummary;
-import org.hisp.dhis.dxf2.metadata.ExportOptions;
-import org.hisp.dhis.dxf2.metadata.ImportOptions;
-import org.hisp.dhis.node.types.RootNode;
-import org.hisp.dhis.period.Period;
-import org.hisp.dhis.scheduling.TaskId;
-
/**
* @author Lars Helge Overland
*/
public interface DataValueSetService
{
- void writeDataValueSetXml( String dataSet, String period, String orgUnit, OutputStream out, ExportOptions exportOptions );
-
- void writeDataValueSetXml( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits, boolean includeChildren, OutputStream out, ExportOptions exportOptions );
-
- void writeDataValueSetJson( String ds, String period, String ou, OutputStream outputStream, ExportOptions exportOptions );
-
- void writeDataValueSetJson( Set<String> dataSet, Date startDate, Date endDate, Set<String> ous, boolean includeChildren, OutputStream outputStream, ExportOptions exportOptions );
-
- void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, ExportOptions exportOptions );
-
- void writeDataValueSetCsv( String dataSet, String period, String orgUnit, Writer writer, ExportOptions exportOptions );
-
- void writeDataValueSetCsv( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits, boolean includeChildren, Writer writer, ExportOptions exportOptions );
-
- RootNode getDataValueSetTemplate( DataSet dataSet, Period period, List<String> orgUnits,
- boolean writeComments, String ouScheme, String deScheme );
+ void writeDataValueSetXml( String dataSet, String period, String orgUnit, OutputStream out, IdSchemes idSchemes );
+
+ void writeDataValueSetXml( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits, boolean includeChildren, OutputStream out, IdSchemes idSchemes );
+
+ void writeDataValueSetJson( String ds, String period, String ou, OutputStream outputStream, IdSchemes idSchemes );
+
+ void writeDataValueSetJson( Set<String> dataSet, Date startDate, Date endDate, Set<String> ous, boolean includeChildren, OutputStream outputStream, IdSchemes idSchemes );
+
+ void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes );
+
+ void writeDataValueSetCsv( String dataSet, String period, String orgUnit, Writer writer, IdSchemes idSchemes );
+
+ void writeDataValueSetCsv( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits, boolean includeChildren, Writer writer, IdSchemes idSchemes );
+
+ RootNode getDataValueSetTemplate( DataSet dataSet, Period period, List<String> orgUnits, boolean writeComments, String ouScheme, String deScheme );
ImportSummary saveDataValueSet( InputStream in );
@@ -72,7 +71,7 @@
ImportSummary saveDataValueSet( InputStream in, ImportOptions importOptions );
ImportSummary saveDataValueSetJson( InputStream in, ImportOptions importOptions );
-
+
ImportSummary saveDataValueSetCsv( InputStream in, ImportOptions importOptions );
ImportSummary saveDataValueSet( InputStream in, ImportOptions importOptions, TaskId taskId );
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java 2015-01-17 07:41:26 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java 2015-02-16 09:43:40 +0000
@@ -28,29 +28,29 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+import org.hisp.dhis.dataset.DataSet;
+import org.hisp.dhis.dxf2.utils.IdSchemes;
+import org.hisp.dhis.organisationunit.OrganisationUnit;
+import org.hisp.dhis.period.Period;
+
import java.io.OutputStream;
import java.io.Writer;
import java.util.Date;
import java.util.Set;
-import org.hisp.dhis.dataset.DataSet;
-import org.hisp.dhis.dxf2.metadata.ExportOptions;
-import org.hisp.dhis.organisationunit.OrganisationUnit;
-import org.hisp.dhis.period.Period;
-
/**
* @author Lars Helge Overland
*/
public interface DataValueSetStore
{
public void writeDataValueSetXml( Set<DataSet> dataSets, Date completeDate, Period period,
- OrganisationUnit orgUnit, Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, ExportOptions exportOptions );
+ OrganisationUnit orgUnit, Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, IdSchemes idSchemes );
public void writeDataValueSetJson( Set<DataSet> dataSets, Date completeDate, Period period,
- OrganisationUnit orgUnit, Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, ExportOptions exportOptions );
-
- public void writeDataValueSetCsv( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, Writer writer, ExportOptions exportOptions );
-
- void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, ExportOptions exportOptions );
+ OrganisationUnit orgUnit, Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, IdSchemes idSchemes );
+
+ public void writeDataValueSetCsv( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
+ Set<Period> periods, Set<OrganisationUnit> orgUnits, Writer writer, IdSchemes idSchemes );
+
+ void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes );
}
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java 2015-02-13 13:17:36 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java 2015-02-16 09:43:40 +0000
@@ -28,36 +28,16 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import static com.google.common.collect.Sets.newHashSet;
-import static org.apache.commons.lang.StringUtils.trimToNull;
-import static org.hisp.dhis.common.IdentifiableProperty.UUID;
-import static org.hisp.dhis.system.notification.NotificationLevel.ERROR;
-import static org.hisp.dhis.system.notification.NotificationLevel.INFO;
-import static org.hisp.dhis.system.util.ConversionUtils.wrap;
-import static org.hisp.dhis.system.util.DateUtils.getDefaultDate;
-import static org.hisp.dhis.system.util.DateUtils.parseDate;
-
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.nio.charset.Charset;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.csvreader.CsvReader;
import org.amplecode.quick.BatchHandler;
import org.amplecode.quick.BatchHandlerFactory;
import org.amplecode.staxwax.factory.XMLFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.common.DxfNamespaces;
-import org.hisp.dhis.common.IdentifiableProperty;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.common.IdentifiableObjectUtils;
+import org.hisp.dhis.common.IdentifiableProperty;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataelement.DataElementCategoryService;
@@ -70,9 +50,9 @@
import org.hisp.dhis.dxf2.importsummary.ImportCount;
import org.hisp.dhis.dxf2.importsummary.ImportStatus;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
-import org.hisp.dhis.dxf2.metadata.ExportOptions;
import org.hisp.dhis.dxf2.metadata.ImportOptions;
import org.hisp.dhis.dxf2.pdfform.PdfDataEntryFormUtil;
+import org.hisp.dhis.dxf2.utils.IdSchemes;
import org.hisp.dhis.dxf2.utils.JacksonUtils;
import org.hisp.dhis.i18n.I18n;
import org.hisp.dhis.i18n.I18nManager;
@@ -95,7 +75,26 @@
import org.hisp.dhis.user.CurrentUserService;
import org.springframework.beans.factory.annotation.Autowired;
-import com.csvreader.CsvReader;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.nio.charset.Charset;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static com.google.common.collect.Sets.newHashSet;
+import static org.apache.commons.lang.StringUtils.trimToNull;
+import static org.hisp.dhis.common.IdentifiableProperty.UUID;
+import static org.hisp.dhis.system.notification.NotificationLevel.ERROR;
+import static org.hisp.dhis.system.notification.NotificationLevel.INFO;
+import static org.hisp.dhis.system.util.ConversionUtils.wrap;
+import static org.hisp.dhis.system.util.DateUtils.getDefaultDate;
+import static org.hisp.dhis.system.util.DateUtils.parseDate;
/**
* @author Lars Helge Overland
@@ -109,7 +108,7 @@
private static final String ERROR_INVALID_PERIOD = "Invalid period: ";
private static final String ERROR_INVALID_ORG_UNIT = "Invalid org unit: ";
private static final String ERROR_OBJECT_NEEDED_TO_COMPLETE = "Must be provided to complete data set";
-
+
@Autowired
private IdentifiableObjectManager identifiableObjectManager;
@@ -136,7 +135,7 @@
@Autowired
private DataValueSetStore dataValueSetStore;
-
+
@Autowired
private I18nManager i18nManager;
@@ -157,7 +156,7 @@
//--------------------------------------------------------------------------
@Override
- public void writeDataValueSetXml( String dataSet, String period, String orgUnit, OutputStream out, ExportOptions exportOptions )
+ public void writeDataValueSetXml( String dataSet, String period, String orgUnit, OutputStream out, IdSchemes idSchemes )
{
DataSet dataSet_ = dataSetService.getDataSet( dataSet );
Period period_ = PeriodType.getPeriodFromIsoString( period );
@@ -188,12 +187,12 @@
period_ = periodService.reloadPeriod( period_ );
dataValueSetStore.writeDataValueSetXml( newHashSet( dataSet_ ), completeDate, period_, orgUnit_, wrap( period_ ),
- wrap( orgUnit_ ), out, exportOptions );
+ wrap( orgUnit_ ), out, idSchemes );
}
@Override
- public void writeDataValueSetXml( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits,
- boolean includeChildren, OutputStream out, ExportOptions exportOptions )
+ public void writeDataValueSetXml( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits,
+ boolean includeChildren, OutputStream out, IdSchemes idSchemes )
{
Set<DataSet> ds = new HashSet<>( dataSetService.getDataSetsByUid( dataSets ) );
Set<Period> pe = new HashSet<>( periodService.getPeriodsBetweenDates( startDate, endDate ) );
@@ -203,27 +202,27 @@
{
throw new IllegalArgumentException( "At least one data set must be specified" );
}
-
+
if ( pe.isEmpty() )
{
throw new IllegalArgumentException( "At least one period must be specified" );
}
-
+
if ( ou.isEmpty() )
{
throw new IllegalArgumentException( "At least one organisation unit must be specified" );
}
-
+
if ( includeChildren )
{
ou = new HashSet<>( organisationUnitService.getOrganisationUnitsWithChildren( IdentifiableObjectUtils.getUids( ou ) ) );
}
-
- dataValueSetStore.writeDataValueSetXml( ds, null, null, null, pe, ou, out, exportOptions );
+
+ dataValueSetStore.writeDataValueSetXml( ds, null, null, null, pe, ou, out, idSchemes );
}
@Override
- public void writeDataValueSetJson( String dataSet, String period, String orgUnit, OutputStream outputStream, ExportOptions exportOptions )
+ public void writeDataValueSetJson( String dataSet, String period, String orgUnit, OutputStream outputStream, IdSchemes idSchemes )
{
DataSet dataSet_ = dataSetService.getDataSet( dataSet );
Period period_ = PeriodType.getPeriodFromIsoString( period );
@@ -254,18 +253,18 @@
period_ = periodService.reloadPeriod( period_ );
dataValueSetStore.writeDataValueSetJson( newHashSet( dataSet_ ), completeDate, period_, orgUnit_, wrap( period_ ),
- wrap( orgUnit_ ), outputStream, exportOptions );
+ wrap( orgUnit_ ), outputStream, idSchemes );
}
@Override
- public void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, ExportOptions exportOptions )
+ public void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes )
{
- dataValueSetStore.writeDataValueSetJson( lastUpdated, outputStream, exportOptions );
+ dataValueSetStore.writeDataValueSetJson( lastUpdated, outputStream, idSchemes );
}
@Override
public void writeDataValueSetJson( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits,
- boolean includeChildren, OutputStream outputStream, ExportOptions exportOptions )
+ boolean includeChildren, OutputStream outputStream, IdSchemes idSchemes )
{
Set<DataSet> ds = new HashSet<>( dataSetService.getDataSetsByUid( dataSets ) );
Set<Period> pe = new HashSet<>( periodService.getPeriodsBetweenDates( startDate, endDate ) );
@@ -275,27 +274,27 @@
{
throw new IllegalArgumentException( "At least one data set must be specified" );
}
-
+
if ( pe.isEmpty() )
{
throw new IllegalArgumentException( "At least one period must be specified" );
}
-
+
if ( ou.isEmpty() )
{
throw new IllegalArgumentException( "At least one organisation unit must be specified" );
}
-
+
if ( includeChildren )
{
ou = new HashSet<>( organisationUnitService.getOrganisationUnitsWithChildren( IdentifiableObjectUtils.getUids( ou ) ) );
}
- dataValueSetStore.writeDataValueSetJson( ds, null, null, null, pe, ou, outputStream, exportOptions );
+ dataValueSetStore.writeDataValueSetJson( ds, null, null, null, pe, ou, outputStream, idSchemes );
}
@Override
- public void writeDataValueSetCsv( String dataSet, String period, String orgUnit, Writer writer, ExportOptions exportOptions )
+ public void writeDataValueSetCsv( String dataSet, String period, String orgUnit, Writer writer, IdSchemes idSchemes )
{
DataSet dataSet_ = dataSetService.getDataSet( dataSet );
Period period_ = PeriodType.getPeriodFromIsoString( period );
@@ -326,12 +325,12 @@
period_ = periodService.reloadPeriod( period_ );
dataValueSetStore.writeDataValueSetCsv( newHashSet( dataSet_ ), completeDate, period_, orgUnit_, wrap( period_ ),
- wrap( orgUnit_ ), writer, exportOptions );
+ wrap( orgUnit_ ), writer, idSchemes );
}
@Override
public void writeDataValueSetCsv( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits,
- boolean includeChildren, Writer writer, ExportOptions exportOptions )
+ boolean includeChildren, Writer writer, IdSchemes idSchemes )
{
Set<DataSet> ds = new HashSet<>( dataSetService.getDataSetsByUid( dataSets ) );
Set<Period> pe = new HashSet<>( periodService.getPeriodsBetweenDates( startDate, endDate ) );
@@ -341,23 +340,23 @@
{
throw new IllegalArgumentException( "At least one data set must be specified" );
}
-
+
if ( pe.isEmpty() )
{
throw new IllegalArgumentException( "At least one period must be specified" );
}
-
+
if ( ou.isEmpty() )
{
throw new IllegalArgumentException( "At least one organisation unit must be specified" );
}
-
+
if ( includeChildren )
{
ou = new HashSet<>( organisationUnitService.getOrganisationUnitsWithChildren( IdentifiableObjectUtils.getUids( ou ) ) );
}
- dataValueSetStore.writeDataValueSetCsv( ds, null, null, null, pe, ou, writer, exportOptions );
+ dataValueSetStore.writeDataValueSetCsv( ds, null, null, null, pe, ou, writer, idSchemes );
}
@Override
@@ -567,18 +566,18 @@
* a generic id scheme for all objects. The specific id schemes will take
* precedence over the generic id scheme. The generic id scheme also applies
* to data set and category option combo.
- *
+ * <p/>
* The id schemes uses the following order of precedence:
- *
+ * <p/>
* <ul>
* <li>Id scheme from the data value set</li>
* <li>Id scheme from the import options</li>
* <li>Default id scheme which is UID</li>
* <ul>
- *
+ * <p/>
* If id scheme is specific in the data value set, any id schemes in the import
* options will be ignored.
- *
+ *
* @param importOptions
* @param id
* @param dataValueSet
@@ -590,13 +589,13 @@
notifier.clear( id ).notify( id, "Process started" );
ImportSummary summary = new ImportSummary();
-
+
I18n i18n = i18nManager.getI18n();
importOptions = importOptions != null ? importOptions : ImportOptions.getDefaultImportOptions();
log.info( "Import options: " + importOptions );
-
+
//----------------------------------------------------------------------
// Get id scheme
//----------------------------------------------------------------------
@@ -604,21 +603,21 @@
IdentifiableProperty dvSetIdScheme = dataValueSet.getIdSchemeProperty();
IdentifiableProperty dvSetDataElementIdScheme = dataValueSet.getDataElementIdSchemeProperty();
IdentifiableProperty dvSetOrgUnitIdScheme = dataValueSet.getOrgUnitIdSchemeProperty();
-
+
log.info( "Data value set scheme: " + dvSetIdScheme + ", data element scheme: " + dvSetDataElementIdScheme + ", org unit scheme: " + dvSetOrgUnitIdScheme );
-
- IdentifiableProperty idScheme = dvSetIdScheme != null ? dvSetIdScheme : importOptions.getIdScheme();
+
+ IdentifiableProperty idScheme = dvSetIdScheme != null ? dvSetIdScheme : importOptions.getIdScheme();
IdentifiableProperty dataElementIdScheme = dvSetDataElementIdScheme != null ? dvSetDataElementIdScheme : importOptions.getDataElementIdScheme();
IdentifiableProperty orgUnitIdScheme = dvSetOrgUnitIdScheme != null ? dvSetOrgUnitIdScheme : importOptions.getOrgUnitIdScheme();
-
+
log.info( "Scheme: " + idScheme + ", data element scheme: " + dataElementIdScheme + ", org unit scheme: " + orgUnitIdScheme );
-
+
boolean dryRun = dataValueSet.getDryRun() != null ? dataValueSet.getDryRun() : importOptions.isDryRun();
-
+
ImportStrategy strategy = dataValueSet.getStrategy() != null ?
ImportStrategy.valueOf( dataValueSet.getStrategy() ) :
importOptions.getImportStrategy();
-
+
boolean skipExistingCheck = importOptions.isSkipExistingCheck();
//----------------------------------------------------------------------
@@ -660,19 +659,19 @@
summary.getConflicts().add( new ImportConflict( dataValueSet.getDataSet(), "Data set not found or not accessible" ) );
summary.setStatus( ImportStatus.ERROR );
}
-
+
if ( outerOrgUnit == null && trimToNull( dataValueSet.getOrgUnit() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValueSet.getDataSet(), "Org unit not found or not accessible" ) );
- summary.setStatus( ImportStatus.ERROR );
+ summary.setStatus( ImportStatus.ERROR );
}
-
+
if ( outerAttrOptionCombo == null && trimToNull( dataValueSet.getAttributeOptionCombo() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValueSet.getDataSet(), "Attribute option combo not found or not accessible" ) );
- summary.setStatus( ImportStatus.ERROR );
+ summary.setStatus( ImportStatus.ERROR );
}
-
+
if ( ImportStatus.ERROR.equals( summary.getStatus() ) )
{
summary.setDescription( "Import process was aborted" );
@@ -680,7 +679,7 @@
dataValueSet.close();
return summary;
}
-
+
if ( dataSet != null && completeDate != null )
{
notifier.notify( id, "Completing data set" );
@@ -704,7 +703,7 @@
// ---------------------------------------------------------------------
Date now = new Date();
-
+
notifier.notify( id, "Importing data values" );
log.info( "Importing data values" );
@@ -720,7 +719,7 @@
Period period = outerPeriod != null ? outerPeriod : PeriodType.getPeriodFromIsoString( trimToNull( dataValue.getPeriod() ) );
OrganisationUnit orgUnit = outerOrgUnit != null ? outerOrgUnit : orgUnitMap.get( trimToNull( dataValue.getOrgUnit() ) );
DataElementCategoryOptionCombo categoryOptionCombo = categoryOptionComboMap.get( trimToNull( dataValue.getCategoryOptionCombo() ) );
- DataElementCategoryOptionCombo attrOptionCombo = outerAttrOptionCombo != null ? outerAttrOptionCombo :
+ DataElementCategoryOptionCombo attrOptionCombo = outerAttrOptionCombo != null ? outerAttrOptionCombo :
categoryOptionComboMap.get( trimToNull( dataValue.getAttributeOptionCombo() ) );
// -----------------------------------------------------------------
@@ -750,13 +749,13 @@
summary.getConflicts().add( new ImportConflict( dataValue.getCategoryOptionCombo(), "Category option combo not found or not accessible" ) );
continue;
}
-
+
if ( attrOptionCombo == null && trimToNull( dataValue.getAttributeOptionCombo() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getAttributeOptionCombo(), "Attribute option combo not found or not accessible" ) );
continue;
}
-
+
if ( categoryOptionCombo == null )
{
categoryOptionCombo = fallbackCategoryOptionCombo;
@@ -806,7 +805,7 @@
internalValue.setValue( trimToNull( dataValue.getValue() ) );
String storedByValid = ValidationUtils.storedByIsValid( dataValue.getStoredBy() );
-
+
if ( dataValue.getStoredBy() == null || dataValue.getStoredBy().trim().isEmpty() )
{
internalValue.setStoredBy( currentUser );
@@ -827,7 +826,7 @@
internalValue.setFollowup( dataValue.getFollowup() );
boolean zeroInsignificant = ValidationUtils.dataValueIsZeroAndInsignificant( internalValue.getValue(), dataElement );
-
+
if ( zeroInsignificant )
{
summary.getConflicts().add( new ImportConflict( internalValue.getValue(), "Value is zero and not significant" ) );
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java 2015-01-17 07:41:26 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java 2015-02-16 09:43:40 +0000
@@ -28,25 +28,13 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import static org.hisp.dhis.common.IdentifiableObjectUtils.getIdentifiers;
-import static org.hisp.dhis.system.util.DateUtils.getLongDateString;
-import static org.hisp.dhis.system.util.TextUtils.getCommaDelimitedString;
-
-import java.io.OutputStream;
-import java.io.Writer;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Set;
-
+import com.csvreader.CsvWriter;
import org.amplecode.staxwax.factory.XMLFactory;
import org.hisp.dhis.calendar.Calendar;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dxf2.datavalue.DataValue;
-import org.hisp.dhis.dxf2.metadata.ExportOptions;
+import org.hisp.dhis.dxf2.utils.IdSchemes;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
@@ -56,7 +44,18 @@
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowCallbackHandler;
-import com.csvreader.CsvWriter;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.hisp.dhis.common.IdentifiableObjectUtils.getIdentifiers;
+import static org.hisp.dhis.system.util.DateUtils.getLongDateString;
+import static org.hisp.dhis.system.util.TextUtils.getCommaDelimitedString;
/**
* @author Lars Helge Overland
@@ -68,65 +67,65 @@
@Autowired
private JdbcTemplate jdbcTemplate;
-
+
//--------------------------------------------------------------------------
// DataValueSetStore implementation
//--------------------------------------------------------------------------
@Override
public void writeDataValueSetXml( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, ExportOptions exportOptions )
+ Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, IdSchemes idSchemes )
{
DataValueSet dataValueSet = new StreamingDataValueSet( XMLFactory.getXMLWriter( out ) );
- writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, exportOptions ), dataSets, completeDate, period, orgUnit, dataValueSet );
+ writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, idSchemes ), dataSets, completeDate, period, orgUnit, dataValueSet );
StreamUtils.closeOutputStream( out );
}
@Override
- public void writeDataValueSetJson( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream outputStream, ExportOptions exportOptions )
+ public void writeDataValueSetJson( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
+ Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream outputStream, IdSchemes idSchemes )
{
DataValueSet dataValueSet = new StreamingJsonDataValueSet( outputStream );
- writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, exportOptions ), dataSets, completeDate, period, orgUnit, dataValueSet );
+ writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, idSchemes ), dataSets, completeDate, period, orgUnit, dataValueSet );
StreamUtils.closeOutputStream( outputStream );
}
@Override
- public void writeDataValueSetCsv( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, Writer writer, ExportOptions exportOptions )
+ public void writeDataValueSetCsv( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
+ Set<Period> periods, Set<OrganisationUnit> orgUnits, Writer writer, IdSchemes idSchemes )
{
DataValueSet dataValueSet = new StreamingCsvDataValueSet( new CsvWriter( writer, CSV_DELIM ) );
- writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, exportOptions ), dataSets, completeDate, period, orgUnit, dataValueSet );
-
+ writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, idSchemes ), dataSets, completeDate, period, orgUnit, dataValueSet );
+
StreamUtils.closeWriter( writer );
}
@Override
- public void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, ExportOptions exportOptions )
+ public void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes )
{
- String deScheme = exportOptions.getDataElementIdSchemeFallback().toString().toLowerCase();
- String ouScheme = exportOptions.getOrgUnitIdSchemeFallback().toString().toLowerCase();
- String ocScheme = exportOptions.getCategoryOptionComboIdSchemeFallback().toString().toLowerCase();
-
+ String deScheme = idSchemes.getDataElementIdScheme().toString().toLowerCase();
+ String ouScheme = idSchemes.getOrgUnitIdScheme().toString().toLowerCase();
+ String ocScheme = idSchemes.getCategoryOptionComboIdScheme().toString().toLowerCase();
+
DataValueSet dataValueSet = new StreamingJsonDataValueSet( outputStream );
final String sql =
"select de." + deScheme + " as deid, pe.startdate as pestart, pt.name as ptname, ou." + ouScheme + " as ouid, " +
- "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
- "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
- "from datavalue dv " +
- "join dataelement de on (dv.dataelementid=de.dataelementid) " +
- "join period pe on (dv.periodid=pe.periodid) " +
- "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
- "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
- "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
- "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
- "where dv.lastupdated >= '" + DateUtils.getLongDateString( lastUpdated ) + "'";
+ "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
+ "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
+ "from datavalue dv " +
+ "join dataelement de on (dv.dataelementid=de.dataelementid) " +
+ "join period pe on (dv.periodid=pe.periodid) " +
+ "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
+ "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
+ "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
+ "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
+ "where dv.lastupdated >= '" + DateUtils.getLongDateString( lastUpdated ) + "'";
writeDataValueSet( sql, null, null, null, null, dataValueSet );
}
@@ -134,13 +133,13 @@
private void writeDataValueSet( String sql, Set<DataSet> dataSets, Date completeDate, Period period,
OrganisationUnit orgUnit, final DataValueSet dataValueSet )
{
- dataValueSet.setDataSet( ( dataSets != null && dataSets.size() == 1 ) ? dataSets.iterator().next().getUid() : null );
+ dataValueSet.setDataSet( (dataSets != null && dataSets.size() == 1) ? dataSets.iterator().next().getUid() : null );
dataValueSet.setCompleteDate( getLongDateString( completeDate ) );
dataValueSet.setPeriod( period != null ? period.getIsoDate() : null );
dataValueSet.setOrgUnit( orgUnit != null ? orgUnit.getUid() : null );
final Calendar calendar = PeriodType.getCalendar();
-
+
jdbcTemplate.query( sql, new RowCallbackHandler()
{
@Override
@@ -161,49 +160,49 @@
dataValue.setComment( rs.getString( "comment" ) );
dataValue.setFollowup( rs.getBoolean( "followup" ) );
dataValue.close();
- }
+ }
} );
-
+
dataValueSet.close();
}
-
+
//--------------------------------------------------------------------------
// DataValueSetStore implementation
//--------------------------------------------------------------------------
- private String getDataValueSql( Set<DataSet> dataSets, Collection<Period> periods, Collection<OrganisationUnit> orgUnits, ExportOptions exportOptions )
+ private String getDataValueSql( Set<DataSet> dataSets, Collection<Period> periods, Collection<OrganisationUnit> orgUnits, IdSchemes idSchemes )
{
- exportOptions = exportOptions != null ? exportOptions : new ExportOptions();
-
- String deScheme = exportOptions.getDataElementIdSchemeFallback().toString().toLowerCase();
- String ouScheme = exportOptions.getOrgUnitIdSchemeFallback().toString().toLowerCase();
- String ocScheme = exportOptions.getCategoryOptionComboIdSchemeFallback().toString().toLowerCase();
-
+ idSchemes = idSchemes != null ? idSchemes : new IdSchemes();
+
+ String deScheme = idSchemes.getDataElementIdScheme().toString().toLowerCase();
+ String ouScheme = idSchemes.getOrgUnitIdScheme().toString().toLowerCase();
+ String ocScheme = idSchemes.getCategoryOptionComboIdScheme().toString().toLowerCase();
+
return
"select de." + deScheme + " as deid, pe.startdate as pestart, pt.name as ptname, ou." + ouScheme + " as ouid, " +
- "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
- "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
- "from datavalue dv " +
- "join dataelement de on (dv.dataelementid=de.dataelementid) " +
- "join period pe on (dv.periodid=pe.periodid) " +
- "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
- "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
- "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
- "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
- "where de.dataelementid in (" + getCommaDelimitedString( getIdentifiers( getDataElements( dataSets ) ) ) + ") " +
- "and dv.periodid in (" + getCommaDelimitedString( getIdentifiers( periods ) ) + ") " +
- "and dv.sourceid in (" + getCommaDelimitedString( getIdentifiers( orgUnits ) ) + ")";
+ "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
+ "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
+ "from datavalue dv " +
+ "join dataelement de on (dv.dataelementid=de.dataelementid) " +
+ "join period pe on (dv.periodid=pe.periodid) " +
+ "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
+ "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
+ "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
+ "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
+ "where de.dataelementid in (" + getCommaDelimitedString( getIdentifiers( getDataElements( dataSets ) ) ) + ") " +
+ "and dv.periodid in (" + getCommaDelimitedString( getIdentifiers( periods ) ) + ") " +
+ "and dv.sourceid in (" + getCommaDelimitedString( getIdentifiers( orgUnits ) ) + ")";
}
-
+
private Set<DataElement> getDataElements( Set<DataSet> dataSets )
{
Set<DataElement> elements = new HashSet<>();
-
+
for ( DataSet dataSet : dataSets )
{
elements.addAll( dataSet.getDataElements() );
}
-
+
return elements;
}
}
=== removed file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/metadata/ExportOptions.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/metadata/ExportOptions.java 2015-01-17 07:41:26 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/metadata/ExportOptions.java 1970-01-01 00:00:00 +0000
@@ -1,122 +0,0 @@
-package org.hisp.dhis.dxf2.metadata;
-
-/*
- * Copyright (c) 2004-2015, University of Oslo
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- * Redistributions of source code must retain the above copyright notice, this
- * list of conditions and the following disclaimer.
- *
- * Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * Neither the name of the HISP project nor the names of its contributors may
- * be used to endorse or promote products derived from this software without
- * specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
- * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
- * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
- * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
- * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-import org.hisp.dhis.common.IdentifiableProperty;
-
-/**
- * @author Lars Helge Overland
- */
-public class ExportOptions
-{
- private IdentifiableProperty dataElementIdScheme;
-
- private IdentifiableProperty orgUnitIdScheme;
-
- private IdentifiableProperty categoryOptionComboIdScheme;
-
- // -------------------------------------------------------------------------
- // Constructors
- // -------------------------------------------------------------------------
-
- public ExportOptions()
- {
- }
-
- public ExportOptions( IdentifiableProperty dataElementIdScheme, IdentifiableProperty orgUnitIdScheme, IdentifiableProperty categoryOptionComboIdScheme )
- {
- this.dataElementIdScheme = dataElementIdScheme;
- this.orgUnitIdScheme = orgUnitIdScheme;
- this.categoryOptionComboIdScheme = categoryOptionComboIdScheme;
- }
-
- // -------------------------------------------------------------------------
- // toString
- // -------------------------------------------------------------------------
-
- public String toString()
- {
- return "[Data element id scheme: " + dataElementIdScheme +
- ", org unit id scheme: " + orgUnitIdScheme +
- ", category option combo id scheme: " + categoryOptionComboIdScheme + "]";
- }
-
- // -------------------------------------------------------------------------
- // Logic
- // -------------------------------------------------------------------------
-
- public IdentifiableProperty getDataElementIdSchemeFallback()
- {
- return dataElementIdScheme != null ? dataElementIdScheme : IdentifiableProperty.UID;
- }
-
- public IdentifiableProperty getOrgUnitIdSchemeFallback()
- {
- return orgUnitIdScheme != null ? orgUnitIdScheme : IdentifiableProperty.UID;
- }
-
- public IdentifiableProperty getCategoryOptionComboIdSchemeFallback()
- {
- return categoryOptionComboIdScheme != null ? categoryOptionComboIdScheme : IdentifiableProperty.UID;
- }
-
- // -------------------------------------------------------------------------
- // Getters and setters
- // -------------------------------------------------------------------------
-
- public IdentifiableProperty getDataElementIdScheme()
- {
- return dataElementIdScheme;
- }
-
- public void setDataElementIdScheme( IdentifiableProperty dataElementIdScheme )
- {
- this.dataElementIdScheme = dataElementIdScheme;
- }
-
- public IdentifiableProperty getOrgUnitIdScheme()
- {
- return orgUnitIdScheme;
- }
-
- public void setOrgUnitIdScheme( IdentifiableProperty orgUnitIdScheme )
- {
- this.orgUnitIdScheme = orgUnitIdScheme;
- }
-
- public IdentifiableProperty getCategoryOptionComboIdScheme()
- {
- return categoryOptionComboIdScheme;
- }
-
- public void setCategoryOptionComboIdScheme( IdentifiableProperty categoryOptionComboIdScheme )
- {
- this.categoryOptionComboIdScheme = categoryOptionComboIdScheme;
- }
-}
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/synch/DefaultSynchronizationManager.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/synch/DefaultSynchronizationManager.java 2015-01-17 07:41:26 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/synch/DefaultSynchronizationManager.java 2015-02-16 09:43:40 +0000
@@ -28,12 +28,6 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import static org.apache.commons.lang.StringUtils.trimToNull;
-import static org.hisp.dhis.setting.SystemSettingManager.KEY_LAST_SUCCESSFUL_DATA_SYNC;
-
-import java.io.IOException;
-import java.util.Date;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.configuration.Configuration;
@@ -42,7 +36,7 @@
import org.hisp.dhis.dxf2.datavalueset.DataValueSetService;
import org.hisp.dhis.dxf2.importsummary.ImportStatus;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
-import org.hisp.dhis.dxf2.metadata.ExportOptions;
+import org.hisp.dhis.dxf2.utils.IdSchemes;
import org.hisp.dhis.dxf2.utils.ImportSummaryResponseExtractor;
import org.hisp.dhis.setting.SystemSettingManager;
import org.hisp.dhis.system.util.CodecUtils;
@@ -63,6 +57,12 @@
import org.springframework.web.client.ResponseExtractor;
import org.springframework.web.client.RestTemplate;
+import java.io.IOException;
+import java.util.Date;
+
+import static org.apache.commons.lang.StringUtils.trimToNull;
+import static org.hisp.dhis.setting.SystemSettingManager.KEY_LAST_SUCCESSFUL_DATA_SYNC;
+
/**
* @author Lars Helge Overland
*/
@@ -70,25 +70,25 @@
implements SynchronizationManager
{
private static final Log log = LogFactory.getLog( DefaultSynchronizationManager.class );
-
+
private static final String PING_PATH = "/api/system/ping";
private static final String HEADER_AUTHORIZATION = "Authorization";
-
+
@Autowired
private DataValueSetService dataValueSetService;
-
+
@Autowired
private DataValueService dataValueService;
-
+
@Autowired
private ConfigurationService configurationService;
-
+
@Autowired
private SystemSettingManager systemSettingManager;
-
+
@Autowired
private RestTemplate restTemplate;
-
+
@Autowired
private TaskScheduler taskScheduler;
@@ -100,23 +100,23 @@
public AvailabilityStatus isRemoteServerAvailable()
{
Configuration config = configurationService.getConfiguration();
-
+
if ( !isRemoteServerConfigured( config ) )
{
return new AvailabilityStatus( false, "Remote server is not configured" );
- }
-
+ }
+
String url = config.getRemoteServerUrl() + PING_PATH;
-
+
log.info( "Remote server ping URL: " + url + ", username: " + config.getRemoteServerUsername() );
-
+
HttpEntity<String> request = getBasicAuthRequestEntity( config.getRemoteServerUsername(), config.getRemoteServerPassword() );
-
+
ResponseEntity<String> response = null;
HttpStatus sc = null;
String st = null;
AvailabilityStatus status = null;
-
+
try
{
response = restTemplate.exchange( url, HttpMethod.GET, request, String.class );
@@ -132,13 +132,13 @@
sc = ex.getStatusCode();
st = ex.getStatusText();
}
- catch( ResourceAccessException ex )
+ catch ( ResourceAccessException ex )
{
return new AvailabilityStatus( false, "Network is unreachable" );
}
-
+
log.info( "Response: " + response + ", status code: " + sc );
-
+
if ( HttpStatus.FOUND.equals( sc ) )
{
status = new AvailabilityStatus( false, "Server is available but no authentication was provided, status code: " + sc );
@@ -150,7 +150,7 @@
else if ( HttpStatus.INTERNAL_SERVER_ERROR.equals( sc ) )
{
status = new AvailabilityStatus( false, "Server is available but experienced an internal error, status code: " + sc );
- }
+ }
else if ( HttpStatus.OK.equals( sc ) )
{
status = new AvailabilityStatus( true, "Server is available and authentication was successful" );
@@ -159,18 +159,18 @@
{
status = new AvailabilityStatus( false, "Server is not available, status code: " + sc + ", text: " + st );
}
-
+
log.info( status );
-
+
return status;
}
-
+
@Override
public ImportSummary executeDataSynch()
throws HttpServerErrorException
{
AvailabilityStatus availability = isRemoteServerAvailable();
-
+
if ( !availability.isAvailable() )
{
log.info( "Aborting synch, server not available" );
@@ -184,11 +184,11 @@
final Date startTime = new Date();
final Date lastSuccessTime = getLastSynchSuccessFallback();
-
+
int lastUpdatedCount = dataValueService.getDataValueCountLastUpdatedAfter( lastSuccessTime );
-
+
log.info( "Values: " + lastUpdatedCount + " since last synch success: " + lastSuccessTime );
-
+
if ( lastUpdatedCount == 0 )
{
log.info( "Skipping synch, no new or updated data values" );
@@ -196,43 +196,44 @@
}
final Configuration config = configurationService.getConfiguration();
-
+
String url = config.getRemoteServerUrl() + "/api/dataValueSets";
-
+
log.info( "Remote server POST URL: " + url );
-
- final RequestCallback requestCallback = new RequestCallback() {
-
+
+ final RequestCallback requestCallback = new RequestCallback()
+ {
+
@Override
public void doWithRequest( ClientHttpRequest request ) throws IOException
{
request.getHeaders().setContentType( MediaType.APPLICATION_JSON );
request.getHeaders().add( HEADER_AUTHORIZATION, CodecUtils.getBasicAuthString( config.getRemoteServerUsername(), config.getRemoteServerPassword() ) );
- dataValueSetService.writeDataValueSetJson( lastSuccessTime, request.getBody(), new ExportOptions() );
- }
+ dataValueSetService.writeDataValueSetJson( lastSuccessTime, request.getBody(), new IdSchemes() );
+ }
};
-
+
ResponseExtractor<ImportSummary> responseExtractor = new ImportSummaryResponseExtractor();
-
+
ImportSummary summary = restTemplate.execute( url, HttpMethod.POST, requestCallback, responseExtractor );
-
+
log.info( "Synch summary: " + summary );
-
+
if ( summary != null && ImportStatus.SUCCESS.equals( summary.getStatus() ) )
{
- setLastSynchSuccess( startTime );
- log.info( "Synch successful, setting last success time: " + startTime );
+ setLastSynchSuccess( startTime );
+ log.info( "Synch successful, setting last success time: " + startTime );
}
-
+
return summary;
}
-
+
@Override
public Date getLastSynchSuccess()
{
return (Date) systemSettingManager.getSystemSetting( KEY_LAST_SUCCESSFUL_DATA_SYNC );
}
-
+
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
@@ -244,7 +245,7 @@
private Date getLastSynchSuccessFallback()
{
Date fallback = new DateTime().minusDays( 3 ).toDate();
-
+
return (Date) systemSettingManager.getSystemSetting( KEY_LAST_SUCCESSFUL_DATA_SYNC, fallback );
}
@@ -266,24 +267,24 @@
log.info( "Remote server URL not set" );
return false;
}
-
+
if ( trimToNull( config.getRemoteServerUsername() ) == null || trimToNull( config.getRemoteServerPassword() ) == null )
{
log.info( "Remote server username or password not set" );
return false;
}
-
+
return true;
}
-
+
/**
- * Creates an HTTP entity for requests with appropriate header for basic
+ * Creates an HTTP entity for requests with appropriate header for basic
* authentication.
*/
private <T> HttpEntity<T> getBasicAuthRequestEntity( String username, String password )
{
HttpHeaders headers = new HttpHeaders();
- headers.set( HEADER_AUTHORIZATION, CodecUtils.getBasicAuthString( username, password ) );
+ headers.set( HEADER_AUTHORIZATION, CodecUtils.getBasicAuthString( username, password ) );
return new HttpEntity<>( headers );
}
}
=== modified file 'dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java'
--- dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java 2015-01-17 07:41:26 +0000
+++ dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java 2015-02-16 09:43:40 +0000
@@ -32,8 +32,8 @@
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.dxf2.datavalueset.DataValueSetService;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
-import org.hisp.dhis.dxf2.metadata.ExportOptions;
import org.hisp.dhis.dxf2.metadata.ImportOptions;
+import org.hisp.dhis.dxf2.utils.IdSchemes;
import org.hisp.dhis.dxf2.utils.JacksonUtils;
import org.hisp.dhis.webapi.utils.ContextUtils;
import org.springframework.beans.factory.annotation.Autowired;
@@ -77,8 +77,7 @@
@RequestParam( required = false ) @DateTimeFormat( pattern = "yyyy-MM-dd" ) Date endDate,
@RequestParam Set<String> orgUnit,
@RequestParam( required = false ) boolean children,
- ExportOptions exportOptions,
- HttpServletResponse response ) throws IOException
+ IdSchemes idSchemes, HttpServletResponse response ) throws IOException
{
response.setContentType( CONTENT_TYPE_XML );
@@ -91,13 +90,13 @@
log.debug( "Get XML data value set for data set: " + ds + ", period: " + period + ", org unit: " + ou );
- dataValueSetService.writeDataValueSetXml( ds, period, ou, response.getOutputStream(), exportOptions );
+ dataValueSetService.writeDataValueSetXml( ds, period, ou, response.getOutputStream(), idSchemes );
}
else
{
log.debug( "Get XML bulk data value set for start date: " + startDate + ", end date: " + endDate );
- dataValueSetService.writeDataValueSetXml( dataSet, startDate, endDate, orgUnit, children, response.getOutputStream(), exportOptions );
+ dataValueSetService.writeDataValueSetXml( dataSet, startDate, endDate, orgUnit, children, response.getOutputStream(), idSchemes );
}
}
@@ -109,8 +108,7 @@
@RequestParam( required = false ) @DateTimeFormat( pattern = "yyyy-MM-dd" ) Date endDate,
@RequestParam Set<String> orgUnit,
@RequestParam( required = false ) boolean children,
- ExportOptions exportOptions,
- HttpServletResponse response ) throws IOException
+ IdSchemes idSchemes, HttpServletResponse response ) throws IOException
{
response.setContentType( CONTENT_TYPE_JSON );
@@ -123,13 +121,13 @@
log.debug( "Get JSON data value set for data set: " + ds + ", period: " + period + ", org unit: " + ou );
- dataValueSetService.writeDataValueSetJson( ds, period, ou, response.getOutputStream(), exportOptions );
+ dataValueSetService.writeDataValueSetJson( ds, period, ou, response.getOutputStream(), idSchemes );
}
else
{
log.debug( "Get JSON bulk data value set for start date: " + startDate + ", end date: " + endDate );
- dataValueSetService.writeDataValueSetJson( dataSet, startDate, endDate, orgUnit, children, response.getOutputStream(), exportOptions );
+ dataValueSetService.writeDataValueSetJson( dataSet, startDate, endDate, orgUnit, children, response.getOutputStream(), idSchemes );
}
}
@@ -141,7 +139,7 @@
@RequestParam( required = false ) @DateTimeFormat( pattern = "yyyy-MM-dd" ) Date endDate,
@RequestParam Set<String> orgUnit,
@RequestParam( required = false ) boolean children,
- ExportOptions exportOptions,
+ IdSchemes idSchemes,
HttpServletResponse response ) throws IOException
{
response.setContentType( CONTENT_TYPE_CSV );
@@ -155,13 +153,13 @@
log.debug( "Get CSV data value set for data set: " + ds + ", period: " + period + ", org unit: " + ou );
- dataValueSetService.writeDataValueSetCsv( ds, period, ou, response.getWriter(), exportOptions );
+ dataValueSetService.writeDataValueSetCsv( ds, period, ou, response.getWriter(), idSchemes );
}
else
{
log.debug( "Get CSV bulk data value set for start date: " + startDate + ", end date: " + endDate );
- dataValueSetService.writeDataValueSetCsv( dataSet, startDate, endDate, orgUnit, children, response.getWriter(), exportOptions );
+ dataValueSetService.writeDataValueSetCsv( dataSet, startDate, endDate, orgUnit, children, response.getWriter(), idSchemes );
}
}
=== modified file 'dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java'
--- dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java 2015-01-17 07:41:26 +0000
+++ dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java 2015-02-16 09:43:40 +0000
@@ -28,31 +28,26 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import static org.hisp.dhis.system.util.CodecUtils.filenameEncode;
-import static org.hisp.dhis.system.util.DateUtils.getMediumDate;
-import static org.hisp.dhis.util.ContextUtils.CONTENT_TYPE_CSV;
-import static org.hisp.dhis.util.ContextUtils.CONTENT_TYPE_XML;
-import static org.hisp.dhis.util.ContextUtils.CONTENT_TYPE_JSON;
-import static org.hisp.dhis.util.ContextUtils.getZipOut;
-
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.HashSet;
-import java.util.Set;
-
-import javax.servlet.http.HttpServletResponse;
-
+import com.opensymphony.xwork2.Action;
import org.apache.struts2.ServletActionContext;
+import org.hisp.dhis.common.IdentifiableObjectUtils;
import org.hisp.dhis.common.IdentifiableProperty;
-import org.hisp.dhis.common.IdentifiableObjectUtils;
import org.hisp.dhis.dxf2.datavalueset.DataValueSetService;
-import org.hisp.dhis.dxf2.metadata.ExportOptions;
+import org.hisp.dhis.dxf2.utils.IdSchemes;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.oust.manager.SelectionTreeManager;
import org.hisp.dhis.util.ContextUtils;
import org.springframework.beans.factory.annotation.Autowired;
-import com.opensymphony.xwork2.Action;
+import javax.servlet.http.HttpServletResponse;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.hisp.dhis.system.util.CodecUtils.filenameEncode;
+import static org.hisp.dhis.system.util.DateUtils.getMediumDate;
+import static org.hisp.dhis.util.ContextUtils.*;
/**
* @author Lars Helge Overland
@@ -76,7 +71,7 @@
@Autowired
private OrganisationUnitService organisationUnitService;
-
+
@Autowired
private DataValueSetService dataValueSetService;
@@ -142,37 +137,40 @@
throws Exception
{
//TODO reimplement to use web api
-
- ExportOptions exportOptions = new ExportOptions( dataElementIdScheme, orgUnitIdScheme, categoryOptionComboIdScheme );
-
+
+ IdSchemes idSchemes = new IdSchemes();
+ idSchemes.setDataElementIdScheme( dataElementIdScheme );
+ idSchemes.setOrgUnitIdScheme( orgUnitIdScheme );
+ idSchemes.setCategoryOptionComboIdScheme( categoryOptionComboIdScheme );
+
Set<String> orgUnits = new HashSet<>( IdentifiableObjectUtils.getUids( selectionTreeManager.getSelectedOrganisationUnits() ) );
-
+
HttpServletResponse response = ServletActionContext.getResponse();
-
+
if ( FORMAT_CSV.equals( exportFormat ) )
{
ContextUtils.configureResponse( response, CONTENT_TYPE_CSV, true, getFileName( EXTENSION_CSV_ZIP ), true );
-
+
Writer writer = new OutputStreamWriter( getZipOut( response, getFileName( EXTENSION_CSV ) ) );
-
- dataValueSetService.writeDataValueSetCsv( selectedDataSets, getMediumDate( startDate ),
- getMediumDate( endDate ), orgUnits, true, writer, exportOptions );
+
+ dataValueSetService.writeDataValueSetCsv( selectedDataSets, getMediumDate( startDate ),
+ getMediumDate( endDate ), orgUnits, true, writer, idSchemes );
}
else if ( FORMAT_JSON.equals( exportFormat ) )
{
ContextUtils.configureResponse( response, CONTENT_TYPE_JSON, true, getFileName( EXTENSION_JSON_ZIP ), true );
-
- dataValueSetService.writeDataValueSetJson( selectedDataSets, getMediumDate( startDate ),
- getMediumDate( endDate ), orgUnits, true, getZipOut( response, getFileName( EXTENSION_JSON ) ), exportOptions );
+
+ dataValueSetService.writeDataValueSetJson( selectedDataSets, getMediumDate( startDate ),
+ getMediumDate( endDate ), orgUnits, true, getZipOut( response, getFileName( EXTENSION_JSON ) ), idSchemes );
}
else
{
ContextUtils.configureResponse( response, CONTENT_TYPE_XML, true, getFileName( EXTENSION_XML_ZIP ), true );
-
- dataValueSetService.writeDataValueSetXml( selectedDataSets, getMediumDate( startDate ),
- getMediumDate( endDate ), orgUnits, true, getZipOut( response, getFileName( EXTENSION_XML ) ), exportOptions );
+
+ dataValueSetService.writeDataValueSetXml( selectedDataSets, getMediumDate( startDate ),
+ getMediumDate( endDate ), orgUnits, true, getZipOut( response, getFileName( EXTENSION_XML ) ), idSchemes );
}
-
+
return SUCCESS;
}
@@ -183,12 +181,12 @@
private String getFileName( String extension )
{
String fileName = FILE_PREFIX + FILE_SEPARATOR + startDate + FILE_SEPARATOR + endDate;
-
+
if ( selectionTreeManager.getSelectedOrganisationUnits().size() == 1 )
{
fileName += FILE_SEPARATOR + filenameEncode( selectionTreeManager.getSelectedOrganisationUnits().iterator().next().getShortName() );
}
-
+
return fileName + extension;
}
}