dhis2-devs team mailing list archive
-
dhis2-devs team
-
Mailing list archive
-
Message #37673
[Branch ~dhis2-devs-core/dhis2/trunk] Rev 19233: Data export. Refactored and centralized parameter handling and validation. Step 2, done.
Merge authors:
Lars Helge Øverland (larshelge)
------------------------------------------------------------
revno: 19233 [merge]
committer: Lars Helge Overland <larshelge@xxxxxxxxx>
branch nick: dhis2
timestamp: Tue 2015-06-02 01:45:15 +0200
message:
Data export. Refactored and centralized parameter handling and validation. Step 2, done.
modified:
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataExportParams.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java
dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java
dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java
--
lp:dhis2
https://code.launchpad.net/~dhis2-devs-core/dhis2/trunk
Your team DHIS 2 developers is subscribed to branch lp:dhis2.
To unsubscribe from this branch go to https://code.launchpad.net/~dhis2-devs-core/dhis2/trunk/+edit-subscription
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataExportParams.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataExportParams.java 2015-06-01 22:07:39 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataExportParams.java 2015-06-01 22:46:00 +0000
@@ -28,7 +28,6 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import java.util.Date;
import java.util.HashSet;
import java.util.Set;
@@ -37,6 +36,8 @@
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.Period;
+import com.google.common.base.MoreObjects;
+
/**
* @author Lars Helge Overland
*/
@@ -46,10 +47,6 @@
private Set<Period> periods = new HashSet<>();
- private Date startDate;
-
- private Date endDate;
-
private Set<OrganisationUnit> organisationUnits = new HashSet<>();
private boolean includeChildren;
@@ -83,6 +80,26 @@
return organisationUnits != null && !organisationUnits.isEmpty() ? organisationUnits.iterator().next() : null;
}
+ /**
+ * Indicates whether this parameters represents a single data value set, implying
+ * that it contains exactly one of data sets, periods and organisation units.
+ */
+ public boolean isSingleDataValueSet()
+ {
+ return dataSets.size() == 1 && periods.size() == 1 && organisationUnits.size() == 1;
+ }
+
+ @Override
+ public String toString()
+ {
+ return MoreObjects.toStringHelper( this ).
+ add( "data sets", dataSets ).
+ add( "periods", periods ).
+ add( "org units", organisationUnits ).
+ add( "children", includeChildren ).
+ add( "id schemes", idSchemes ).toString();
+ }
+
// -------------------------------------------------------------------------
// Get and set methods
// -------------------------------------------------------------------------
@@ -107,26 +124,6 @@
this.periods = periods;
}
- public Date getStartDate()
- {
- return startDate;
- }
-
- public void setStartDate( Date startDate )
- {
- this.startDate = startDate;
- }
-
- public Date getEndDate()
- {
- return endDate;
- }
-
- public void setEndDate( Date endDate )
- {
- this.endDate = endDate;
- }
-
public Set<OrganisationUnit> getOrganisationUnits()
{
return organisationUnits;
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java 2015-06-01 22:07:39 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java 2015-06-01 23:12:38 +0000
@@ -55,18 +55,12 @@
void writeDataValueSetXml( DataExportParams params, OutputStream out );
- void writeDataValueSetXml( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits, boolean includeChildren, OutputStream out, IdSchemes idSchemes );
-
void writeDataValueSetJson( DataExportParams params, OutputStream out );
- void writeDataValueSetJson( Set<String> dataSet, Date startDate, Date endDate, Set<String> ous, boolean includeChildren, OutputStream outputStream, IdSchemes idSchemes );
-
void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes );
void writeDataValueSetCsv( DataExportParams params, Writer writer );
- void writeDataValueSetCsv( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits, boolean includeChildren, Writer writer, IdSchemes idSchemes );
-
RootNode getDataValueSetTemplate( DataSet dataSet, Period period, List<String> orgUnits, boolean writeComments, String ouScheme, String deScheme );
ImportSummary saveDataValueSet( InputStream in );
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java 2015-02-17 06:00:52 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetStore.java 2015-06-01 23:12:38 +0000
@@ -28,29 +28,22 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import org.hisp.dhis.dataset.DataSet;
-import org.hisp.dhis.dxf2.common.IdSchemes;
-import org.hisp.dhis.organisationunit.OrganisationUnit;
-import org.hisp.dhis.period.Period;
-
import java.io.OutputStream;
import java.io.Writer;
import java.util.Date;
-import java.util.Set;
+
+import org.hisp.dhis.dxf2.common.IdSchemes;
/**
* @author Lars Helge Overland
*/
public interface DataValueSetStore
{
- public void writeDataValueSetXml( Set<DataSet> dataSets, Date completeDate, Period period,
- OrganisationUnit orgUnit, Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, IdSchemes idSchemes );
-
- public void writeDataValueSetJson( Set<DataSet> dataSets, Date completeDate, Period period,
- OrganisationUnit orgUnit, Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, IdSchemes idSchemes );
-
- public void writeDataValueSetCsv( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, Writer writer, IdSchemes idSchemes );
+ public void writeDataValueSetXml( DataExportParams params, Date completeDate, OutputStream out );
+
+ public void writeDataValueSetJson( DataExportParams params, Date completeDate, OutputStream out );
+
+ public void writeDataValueSetCsv( DataExportParams params, Date completeDate, Writer writer );
void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes );
}
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java 2015-06-01 22:07:39 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java 2015-06-01 23:45:15 +0000
@@ -55,7 +55,6 @@
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.common.IdentifiableObjectManager;
-import org.hisp.dhis.common.IdentifiableObjectUtils;
import org.hisp.dhis.common.IdentifiableProperty;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
@@ -169,10 +168,15 @@
params.getDataSets().addAll( identifiableObjectManager.getByUid( DataSet.class, dataSets ) );
}
- if ( periods != null )
+ if ( periods != null && !periods.isEmpty() )
{
params.getPeriods().addAll( periodService.reloadIsoPeriods( new ArrayList<String>( periods ) ) );
}
+ else if ( startDate != null && endDate != null )
+ {
+ List<Period> pes = new ArrayList<Period>( periodService.getPeriodsBetweenDates( startDate, endDate ) );
+ params.getPeriods().addAll( periodService.reloadPeriods( pes ) );
+ }
if ( organisationUnits != null )
{
@@ -185,8 +189,6 @@
}
}
- params.setStartDate( startDate );
- params.setEndDate( endDate );
params.setIncludeChildren( includeChildren );
params.setIdSchemes( idSchemes );
@@ -208,7 +210,7 @@
violation = "At least one valid data set must be specified";
}
- if ( params.getPeriods().isEmpty() && ( params.getStartDate() == null || params.getEndDate() == null ) )
+ if ( params.getPeriods().isEmpty() )
{
violation = "At least one valid period or start/end dates must be specified";
}
@@ -243,146 +245,46 @@
{
validate( params );
- DataElementCategoryOptionCombo optionCombo = categoryService.getDefaultDataElementCategoryOptionCombo(); //TODO
-
- CompleteDataSetRegistration registration = registrationService
- .getCompleteDataSetRegistration( params.getFirstDataSet(), params.getFirstPeriod(), params.getFirstOrganisationUnit(), optionCombo );
-
- Date completeDate = registration != null ? registration.getDate() : null;
-
- dataValueSetStore.writeDataValueSetXml( params.getDataSets(), completeDate, params.getFirstPeriod(), params.getFirstOrganisationUnit(),
- params.getPeriods(), params.getOrganisationUnits(), out, params.getIdSchemes() );
- }
-
- @Override
- public void writeDataValueSetXml( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits,
- boolean includeChildren, OutputStream out, IdSchemes idSchemes )
- {
- Set<DataSet> ds = new HashSet<>( dataSetService.getDataSetsByUid( dataSets ) );
- Set<Period> pe = new HashSet<>( periodService.getPeriodsBetweenDates( startDate, endDate ) );
- Set<OrganisationUnit> ou = new HashSet<>( organisationUnitService.getOrganisationUnitsByUid( orgUnits ) );
-
- if ( ds.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one data set must be specified" );
- }
-
- if ( pe.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one period must be specified" );
- }
-
- if ( ou.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one organisation unit must be specified" );
- }
-
- if ( includeChildren )
- {
- ou = new HashSet<>( organisationUnitService.getOrganisationUnitsWithChildren( IdentifiableObjectUtils.getUids( ou ) ) );
- }
-
- dataValueSetStore.writeDataValueSetXml( ds, null, null, null, pe, ou, out, idSchemes );
- }
-
+ dataValueSetStore.writeDataValueSetXml( params, getCompleteDate( params ), out );
+ }
+
@Override
public void writeDataValueSetJson( DataExportParams params, OutputStream out )
{
validate( params );
- DataElementCategoryOptionCombo optionCombo = categoryService.getDefaultDataElementCategoryOptionCombo(); //TODO
-
- CompleteDataSetRegistration registration = registrationService
- .getCompleteDataSetRegistration( params.getFirstDataSet(), params.getFirstPeriod(), params.getFirstOrganisationUnit(), optionCombo );
-
- Date completeDate = registration != null ? registration.getDate() : null;
-
- dataValueSetStore.writeDataValueSetJson( params.getDataSets(), completeDate, params.getFirstPeriod(), params.getFirstOrganisationUnit(),
- params.getPeriods(), params.getOrganisationUnits(), out, params.getIdSchemes() );
+ dataValueSetStore.writeDataValueSetJson( params, getCompleteDate( params ), out );
}
@Override
public void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes )
{
+ //TODO validate
+
dataValueSetStore.writeDataValueSetJson( lastUpdated, outputStream, idSchemes );
}
@Override
- public void writeDataValueSetJson( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits,
- boolean includeChildren, OutputStream outputStream, IdSchemes idSchemes )
- {
- Set<DataSet> ds = new HashSet<>( dataSetService.getDataSetsByUid( dataSets ) );
- Set<Period> pe = new HashSet<>( periodService.getPeriodsBetweenDates( startDate, endDate ) );
- Set<OrganisationUnit> ou = new HashSet<>( organisationUnitService.getOrganisationUnitsByUid( orgUnits ) );
-
- if ( ds.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one data set must be specified" );
- }
-
- if ( pe.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one period must be specified" );
- }
-
- if ( ou.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one organisation unit must be specified" );
- }
-
- if ( includeChildren )
- {
- ou = new HashSet<>( organisationUnitService.getOrganisationUnitsWithChildren( IdentifiableObjectUtils.getUids( ou ) ) );
- }
-
- dataValueSetStore.writeDataValueSetJson( ds, null, null, null, pe, ou, outputStream, idSchemes );
- }
-
- @Override
public void writeDataValueSetCsv( DataExportParams params, Writer writer )
{
validate( params );
- DataElementCategoryOptionCombo optionCombo = categoryService.getDefaultDataElementCategoryOptionCombo(); //TODO
-
- CompleteDataSetRegistration registration = registrationService
- .getCompleteDataSetRegistration( params.getFirstDataSet(), params.getFirstPeriod(), params.getFirstOrganisationUnit(), optionCombo );
-
- Date completeDate = registration != null ? registration.getDate() : null;
-
- dataValueSetStore.writeDataValueSetCsv( params.getDataSets(), completeDate, params.getFirstPeriod(), params.getFirstOrganisationUnit(),
- params.getPeriods(), params.getOrganisationUnits(), writer, params.getIdSchemes() );
+ dataValueSetStore.writeDataValueSetCsv( params, getCompleteDate( params ), writer );
}
- @Override
- public void writeDataValueSetCsv( Set<String> dataSets, Date startDate, Date endDate, Set<String> orgUnits,
- boolean includeChildren, Writer writer, IdSchemes idSchemes )
+ private Date getCompleteDate( DataExportParams params )
{
- Set<DataSet> ds = new HashSet<>( dataSetService.getDataSetsByUid( dataSets ) );
- Set<Period> pe = new HashSet<>( periodService.getPeriodsBetweenDates( startDate, endDate ) );
- Set<OrganisationUnit> ou = new HashSet<>( organisationUnitService.getOrganisationUnitsByUid( orgUnits ) );
-
- if ( ds.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one data set must be specified" );
- }
-
- if ( pe.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one period must be specified" );
- }
-
- if ( ou.isEmpty() )
- {
- throw new IllegalArgumentException( "At least one organisation unit must be specified" );
- }
-
- if ( includeChildren )
- {
- ou = new HashSet<>( organisationUnitService.getOrganisationUnitsWithChildren( IdentifiableObjectUtils.getUids( ou ) ) );
- }
-
- dataValueSetStore.writeDataValueSetCsv( ds, null, null, null, pe, ou, writer, idSchemes );
+ if ( params.isSingleDataValueSet() )
+ {
+ DataElementCategoryOptionCombo optionCombo = categoryService.getDefaultDataElementCategoryOptionCombo(); //TODO
+
+ CompleteDataSetRegistration registration = registrationService
+ .getCompleteDataSetRegistration( params.getFirstDataSet(), params.getFirstPeriod(), params.getFirstOrganisationUnit(), optionCombo );
+
+ return registration != null ? registration.getDate() : null;
+ }
+
+ return null;
}
//--------------------------------------------------------------------------
=== modified file 'dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java'
--- dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java 2015-06-01 22:07:39 +0000
+++ dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java 2015-06-01 23:12:38 +0000
@@ -28,15 +28,24 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import com.csvreader.CsvWriter;
+import static org.hisp.dhis.common.IdentifiableObjectUtils.getIdentifiers;
+import static org.hisp.dhis.system.util.DateUtils.getLongGmtDateString;
+import static org.hisp.dhis.util.TextUtils.getCommaDelimitedString;
+
+import java.io.OutputStream;
+import java.io.Writer;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+
import org.amplecode.staxwax.factory.XMLFactory;
import org.hisp.dhis.calendar.Calendar;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataset.DataSet;
+import org.hisp.dhis.dxf2.common.IdSchemes;
import org.hisp.dhis.dxf2.datavalue.DataValue;
-import org.hisp.dhis.dxf2.common.IdSchemes;
-import org.hisp.dhis.organisationunit.OrganisationUnit;
-import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.system.util.DateUtils;
import org.hisp.dhis.util.StreamUtils;
@@ -44,18 +53,7 @@
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowCallbackHandler;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Set;
-
-import static org.hisp.dhis.common.IdentifiableObjectUtils.getIdentifiers;
-import static org.hisp.dhis.system.util.DateUtils.getLongGmtDateString;
-import static org.hisp.dhis.util.TextUtils.getCommaDelimitedString;
+import com.csvreader.CsvWriter;
/**
* @author Lars Helge Overland
@@ -73,34 +71,31 @@
//--------------------------------------------------------------------------
@Override
- public void writeDataValueSetXml( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream out, IdSchemes idSchemes )
+ public void writeDataValueSetXml( DataExportParams params, Date completeDate, OutputStream out )
{
DataValueSet dataValueSet = new StreamingDataValueSet( XMLFactory.getXMLWriter( out ) );
- writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, idSchemes ), dataSets, completeDate, period, orgUnit, dataValueSet );
+ writeDataValueSet( getDataValueSql( params ), params, completeDate, dataValueSet );
StreamUtils.closeOutputStream( out );
}
@Override
- public void writeDataValueSetJson( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, OutputStream outputStream, IdSchemes idSchemes )
+ public void writeDataValueSetJson( DataExportParams params, Date completeDate, OutputStream out )
{
- DataValueSet dataValueSet = new StreamingJsonDataValueSet( outputStream );
-
- writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, idSchemes ), dataSets, completeDate, period, orgUnit, dataValueSet );
-
- StreamUtils.closeOutputStream( outputStream );
+ DataValueSet dataValueSet = new StreamingJsonDataValueSet( out );
+
+ writeDataValueSet( getDataValueSql( params ), params, completeDate, dataValueSet );
+
+ StreamUtils.closeOutputStream( out );
}
@Override
- public void writeDataValueSetCsv( Set<DataSet> dataSets, Date completeDate, Period period, OrganisationUnit orgUnit,
- Set<Period> periods, Set<OrganisationUnit> orgUnits, Writer writer, IdSchemes idSchemes )
+ public void writeDataValueSetCsv( DataExportParams params, Date completeDate, Writer writer )
{
DataValueSet dataValueSet = new StreamingCsvDataValueSet( new CsvWriter( writer, CSV_DELIM ) );
- writeDataValueSet( getDataValueSql( dataSets, periods, orgUnits, idSchemes ), dataSets, completeDate, period, orgUnit, dataValueSet );
+ writeDataValueSet( getDataValueSql( params ), params, completeDate, dataValueSet );
StreamUtils.closeWriter( writer );
}
@@ -116,27 +111,29 @@
final String sql =
"select de." + deScheme + " as deid, pe.startdate as pestart, pt.name as ptname, ou." + ouScheme + " as ouid, " +
- "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
- "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
- "from datavalue dv " +
- "join dataelement de on (dv.dataelementid=de.dataelementid) " +
- "join period pe on (dv.periodid=pe.periodid) " +
- "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
- "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
- "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
- "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
- "where dv.lastupdated >= '" + DateUtils.getLongDateString( lastUpdated ) + "'";
+ "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
+ "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
+ "from datavalue dv " +
+ "join dataelement de on (dv.dataelementid=de.dataelementid) " +
+ "join period pe on (dv.periodid=pe.periodid) " +
+ "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
+ "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
+ "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
+ "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
+ "where dv.lastupdated >= '" + DateUtils.getLongDateString( lastUpdated ) + "'";
- writeDataValueSet( sql, null, null, null, null, dataValueSet );
+ writeDataValueSet( sql, new DataExportParams(), null, dataValueSet );
}
- private void writeDataValueSet( String sql, Set<DataSet> dataSets, Date completeDate, Period period,
- OrganisationUnit orgUnit, final DataValueSet dataValueSet )
+ private void writeDataValueSet( String sql, DataExportParams params, Date completeDate, final DataValueSet dataValueSet )
{
- dataValueSet.setDataSet( ( dataSets != null && dataSets.size() == 1 ) ? dataSets.iterator().next().getUid() : null );
- dataValueSet.setCompleteDate( getLongGmtDateString( completeDate ) );
- dataValueSet.setPeriod( period != null ? period.getIsoDate() : null );
- dataValueSet.setOrgUnit( orgUnit != null ? orgUnit.getUid() : null );
+ if ( params.isSingleDataValueSet() )
+ {
+ dataValueSet.setDataSet( params.getFirstDataSet().getUid() ); //TODO id scheme
+ dataValueSet.setCompleteDate( getLongGmtDateString( completeDate ) );
+ dataValueSet.setPeriod( params.getFirstPeriod().getIsoDate() );
+ dataValueSet.setOrgUnit( params.getFirstOrganisationUnit().getUid() );
+ }
final Calendar calendar = PeriodType.getCalendar();
@@ -167,12 +164,12 @@
}
//--------------------------------------------------------------------------
- // DataValueSetStore implementation
+ // Supportive methods
//--------------------------------------------------------------------------
- private String getDataValueSql( Set<DataSet> dataSets, Collection<Period> periods, Collection<OrganisationUnit> orgUnits, IdSchemes idSchemes )
+ private String getDataValueSql( DataExportParams params )
{
- idSchemes = idSchemes != null ? idSchemes : new IdSchemes();
+ IdSchemes idSchemes = params.getIdSchemes() != null ? params.getIdSchemes() : new IdSchemes();
String deScheme = idSchemes.getDataElementIdScheme().toString().toLowerCase();
String ouScheme = idSchemes.getOrgUnitIdScheme().toString().toLowerCase();
@@ -180,18 +177,18 @@
return
"select de." + deScheme + " as deid, pe.startdate as pestart, pt.name as ptname, ou." + ouScheme + " as ouid, " +
- "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
- "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
- "from datavalue dv " +
- "join dataelement de on (dv.dataelementid=de.dataelementid) " +
- "join period pe on (dv.periodid=pe.periodid) " +
- "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
- "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
- "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
- "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
- "where de.dataelementid in (" + getCommaDelimitedString( getIdentifiers( getDataElements( dataSets ) ) ) + ") " +
- "and dv.periodid in (" + getCommaDelimitedString( getIdentifiers( periods ) ) + ") " +
- "and dv.sourceid in (" + getCommaDelimitedString( getIdentifiers( orgUnits ) ) + ")";
+ "coc." + ocScheme + " as cocid, aoc." + ocScheme + " as aocid, " +
+ "dv.value, dv.storedby, dv.created, dv.lastupdated, dv.comment, dv.followup " +
+ "from datavalue dv " +
+ "join dataelement de on (dv.dataelementid=de.dataelementid) " +
+ "join period pe on (dv.periodid=pe.periodid) " +
+ "join periodtype pt on (pe.periodtypeid=pt.periodtypeid) " +
+ "join organisationunit ou on (dv.sourceid=ou.organisationunitid) " +
+ "join categoryoptioncombo coc on (dv.categoryoptioncomboid=coc.categoryoptioncomboid) " +
+ "join categoryoptioncombo aoc on (dv.attributeoptioncomboid=aoc.categoryoptioncomboid) " +
+ "where de.dataelementid in (" + getCommaDelimitedString( getIdentifiers( getDataElements( params.getDataSets() ) ) ) + ") " +
+ "and dv.periodid in (" + getCommaDelimitedString( getIdentifiers( params.getPeriods() ) ) + ") " +
+ "and dv.sourceid in (" + getCommaDelimitedString( getIdentifiers( params.getOrganisationUnits() ) ) + ")";
}
private Set<DataElement> getDataElements( Set<DataSet> dataSets )
=== modified file 'dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java'
--- dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java 2015-06-01 22:07:39 +0000
+++ dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataValueSetController.java 2015-06-01 23:45:15 +0000
@@ -28,8 +28,17 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.hisp.dhis.webapi.utils.ContextUtils.CONTENT_TYPE_CSV;
+import static org.hisp.dhis.webapi.utils.ContextUtils.CONTENT_TYPE_JSON;
+import static org.hisp.dhis.webapi.utils.ContextUtils.CONTENT_TYPE_XML;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Date;
+import java.util.Set;
+
+import javax.servlet.http.HttpServletResponse;
+
import org.hisp.dhis.dxf2.common.IdSchemes;
import org.hisp.dhis.dxf2.common.ImportOptions;
import org.hisp.dhis.dxf2.common.JacksonUtils;
@@ -46,17 +55,6 @@
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
-import com.google.common.collect.Sets;
-
-import javax.servlet.http.HttpServletResponse;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Date;
-import java.util.Set;
-
-import static org.hisp.dhis.webapi.utils.ContextUtils.*;
-
/**
* @author Lars Helge Overland
*/
@@ -66,8 +64,6 @@
{
public static final String RESOURCE_PATH = "/dataValueSets";
- private static final Log log = LogFactory.getLog( DataValueSetController.class );
-
@Autowired
private DataValueSetService dataValueSetService;
@@ -78,7 +74,7 @@
@RequestMapping( method = RequestMethod.GET, produces = CONTENT_TYPE_XML )
public void getDataValueSetXml(
@RequestParam Set<String> dataSet,
- @RequestParam( required = false ) String period,
+ @RequestParam( required = false ) Set<String> period,
@RequestParam( required = false ) Date startDate,
@RequestParam( required = false ) Date endDate,
@RequestParam Set<String> orgUnit,
@@ -87,32 +83,16 @@
{
response.setContentType( CONTENT_TYPE_XML );
- DataExportParams params = dataValueSetService.getFromUrl( dataSet, Sets.newHashSet( period ),
+ DataExportParams params = dataValueSetService.getFromUrl( dataSet, period,
startDate, endDate, orgUnit, children, idSchemes );
- boolean isSingleDataValueSet = dataSet.size() == 1 && period != null && orgUnit.size() == 1;
-
- if ( isSingleDataValueSet )
- {
- String ds = dataSet.iterator().next();
- String ou = orgUnit.iterator().next();
-
- log.debug( "Get XML data value set for data set: " + ds + ", period: " + period + ", org unit: " + ou );
-
- dataValueSetService.writeDataValueSetXml( params, response.getOutputStream() );
- }
- else
- {
- log.debug( "Get XML bulk data value set for start date: " + startDate + ", end date: " + endDate );
-
- dataValueSetService.writeDataValueSetXml( dataSet, startDate, endDate, orgUnit, children, response.getOutputStream(), idSchemes );
- }
+ dataValueSetService.writeDataValueSetXml( params, response.getOutputStream() );
}
@RequestMapping( method = RequestMethod.GET, produces = CONTENT_TYPE_JSON )
public void getDataValueSetJson(
@RequestParam Set<String> dataSet,
- @RequestParam( required = false ) String period,
+ @RequestParam( required = false ) Set<String> period,
@RequestParam( required = false ) Date startDate,
@RequestParam( required = false ) Date endDate,
@RequestParam Set<String> orgUnit,
@@ -121,32 +101,16 @@
{
response.setContentType( CONTENT_TYPE_JSON );
- DataExportParams params = dataValueSetService.getFromUrl( dataSet, Sets.newHashSet( period ),
+ DataExportParams params = dataValueSetService.getFromUrl( dataSet, period,
startDate, endDate, orgUnit, children, idSchemes );
- boolean isSingleDataValueSet = dataSet.size() == 1 && period != null && orgUnit.size() == 1;
-
- if ( isSingleDataValueSet )
- {
- String ds = dataSet.iterator().next();
- String ou = orgUnit.iterator().next();
-
- log.debug( "Get JSON data value set for data set: " + ds + ", period: " + period + ", org unit: " + ou );
-
- dataValueSetService.writeDataValueSetJson( params, response.getOutputStream() );
- }
- else
- {
- log.debug( "Get JSON bulk data value set for start date: " + startDate + ", end date: " + endDate );
-
- dataValueSetService.writeDataValueSetJson( dataSet, startDate, endDate, orgUnit, children, response.getOutputStream(), idSchemes );
- }
+ dataValueSetService.writeDataValueSetJson( params, response.getOutputStream() );
}
@RequestMapping( method = RequestMethod.GET, produces = CONTENT_TYPE_CSV )
public void getDataValueSetCsv(
@RequestParam Set<String> dataSet,
- @RequestParam( required = false ) String period,
+ @RequestParam( required = false ) Set<String> period,
@RequestParam( required = false ) Date startDate,
@RequestParam( required = false ) Date endDate,
@RequestParam Set<String> orgUnit,
@@ -156,26 +120,10 @@
{
response.setContentType( CONTENT_TYPE_CSV );
- DataExportParams params = dataValueSetService.getFromUrl( dataSet, Sets.newHashSet( period ),
+ DataExportParams params = dataValueSetService.getFromUrl( dataSet, period,
startDate, endDate, orgUnit, children, idSchemes );
- boolean isSingleDataValueSet = dataSet.size() == 1 && period != null && orgUnit.size() == 1;
-
- if ( isSingleDataValueSet )
- {
- String ds = dataSet.iterator().next();
- String ou = orgUnit.iterator().next();
-
- log.debug( "Get CSV data value set for data set: " + ds + ", period: " + period + ", org unit: " + ou );
-
- dataValueSetService.writeDataValueSetCsv( params, response.getWriter() );
- }
- else
- {
- log.debug( "Get CSV bulk data value set for start date: " + startDate + ", end date: " + endDate );
-
- dataValueSetService.writeDataValueSetCsv( dataSet, startDate, endDate, orgUnit, children, response.getWriter(), idSchemes );
- }
+ dataValueSetService.writeDataValueSetCsv( params, response.getWriter() );
}
// -------------------------------------------------------------------------
@@ -189,8 +137,6 @@
{
ImportSummary summary = dataValueSetService.saveDataValueSet( in, importOptions );
- log.debug( "Data values set saved" );
-
response.setContentType( CONTENT_TYPE_XML );
JacksonUtils.toXml( response.getOutputStream(), summary );
}
@@ -202,8 +148,6 @@
{
ImportSummary summary = dataValueSetService.saveDataValueSetJson( in, importOptions );
- log.debug( "Data values set saved" );
-
response.setContentType( CONTENT_TYPE_JSON );
JacksonUtils.toJson( response.getOutputStream(), summary );
}
@@ -215,8 +159,6 @@
{
ImportSummary summary = dataValueSetService.saveDataValueSetCsv( in, importOptions );
- log.debug( "Data values set saved" );
-
response.setContentType( CONTENT_TYPE_XML );
JacksonUtils.toXml( response.getOutputStream(), summary );
}
=== modified file 'dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java'
--- dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java 2015-05-28 16:10:07 +0000
+++ dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ExportDataValueAction.java 2015-06-01 23:12:38 +0000
@@ -28,26 +28,31 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-import com.opensymphony.xwork2.Action;
+import static org.hisp.dhis.system.util.DateUtils.getMediumDate;
+import static org.hisp.dhis.util.CodecUtils.filenameEncode;
+import static org.hisp.dhis.util.ContextUtils.CONTENT_TYPE_CSV;
+import static org.hisp.dhis.util.ContextUtils.CONTENT_TYPE_JSON;
+import static org.hisp.dhis.util.ContextUtils.CONTENT_TYPE_XML;
+import static org.hisp.dhis.util.ContextUtils.getZipOut;
+
+import java.io.OutputStreamWriter;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.servlet.http.HttpServletResponse;
+
import org.apache.struts2.ServletActionContext;
import org.hisp.dhis.common.IdentifiableObjectUtils;
import org.hisp.dhis.common.IdentifiableProperty;
+import org.hisp.dhis.dxf2.common.IdSchemes;
+import org.hisp.dhis.dxf2.datavalueset.DataExportParams;
import org.hisp.dhis.dxf2.datavalueset.DataValueSetService;
-import org.hisp.dhis.dxf2.common.IdSchemes;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.oust.manager.SelectionTreeManager;
import org.hisp.dhis.util.ContextUtils;
import org.springframework.beans.factory.annotation.Autowired;
-import javax.servlet.http.HttpServletResponse;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.HashSet;
-import java.util.Set;
-
-import static org.hisp.dhis.util.CodecUtils.filenameEncode;
-import static org.hisp.dhis.system.util.DateUtils.getMediumDate;
-import static org.hisp.dhis.util.ContextUtils.*;
+import com.opensymphony.xwork2.Action;
/**
* @author Lars Helge Overland
@@ -136,7 +141,7 @@
public String execute()
throws Exception
{
- //TODO reimplement to use web api
+ //TODO re-implement using Web API
IdSchemes idSchemes = new IdSchemes();
idSchemes.setDataElementIdScheme( dataElementIdScheme );
@@ -147,28 +152,26 @@
HttpServletResponse response = ServletActionContext.getResponse();
+ DataExportParams params = dataValueSetService.getFromUrl( selectedDataSets, null,
+ getMediumDate( startDate ), getMediumDate( endDate ), orgUnits, true, idSchemes );
+
if ( FORMAT_CSV.equals( exportFormat ) )
{
ContextUtils.configureResponse( response, CONTENT_TYPE_CSV, true, getFileName( EXTENSION_CSV_ZIP ), true );
- Writer writer = new OutputStreamWriter( getZipOut( response, getFileName( EXTENSION_CSV ) ) );
-
- dataValueSetService.writeDataValueSetCsv( selectedDataSets, getMediumDate( startDate ),
- getMediumDate( endDate ), orgUnits, true, writer, idSchemes );
+ dataValueSetService.writeDataValueSetCsv( params, new OutputStreamWriter( getZipOut( response, getFileName( EXTENSION_CSV ) ) ) );
}
else if ( FORMAT_JSON.equals( exportFormat ) )
{
ContextUtils.configureResponse( response, CONTENT_TYPE_JSON, true, getFileName( EXTENSION_JSON_ZIP ), true );
- dataValueSetService.writeDataValueSetJson( selectedDataSets, getMediumDate( startDate ),
- getMediumDate( endDate ), orgUnits, true, getZipOut( response, getFileName( EXTENSION_JSON ) ), idSchemes );
+ dataValueSetService.writeDataValueSetJson( params, getZipOut( response, getFileName( EXTENSION_JSON ) ) );
}
else
{
ContextUtils.configureResponse( response, CONTENT_TYPE_XML, true, getFileName( EXTENSION_XML_ZIP ), true );
- dataValueSetService.writeDataValueSetXml( selectedDataSets, getMediumDate( startDate ),
- getMediumDate( endDate ), orgUnits, true, getZipOut( response, getFileName( EXTENSION_XML ) ), idSchemes );
+ dataValueSetService.writeDataValueSetXml( params, getZipOut( response, getFileName( EXTENSION_XML ) ) );
}
return SUCCESS;