dhis2-devs team mailing list archive
-
dhis2-devs team
-
Mailing list archive
-
Message #14481
[Branch ~dhis2-devs-core/dhis2/trunk] Rev 4935: Removed explicit flushing of bufferedwriter for pivot data export
------------------------------------------------------------
revno: 4935
committer: Lars Helge Overland <larshelge@xxxxxxxxx>
branch nick: dhis2
timestamp: Thu 2011-10-13 20:35:12 +0200
message:
Removed explicit flushing of bufferedwriter for pivot data export
modified:
dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/aggregation/jdbc/JdbcAggregatedDataValueStore.java
dhis-2/dhis-services/dhis-service-importexport/src/main/java/org/hisp/dhis/importexport/synchronous/ExportPivotViewService.java
dhis-2/dhis-web/dhis-web-reporting/src/main/java/org/hisp/dhis/reporting/exp/ExportDataMartAction.java
--
lp:dhis2
https://code.launchpad.net/~dhis2-devs-core/dhis2/trunk
Your team DHIS 2 developers is subscribed to branch lp:dhis2.
To unsubscribe from this branch go to https://code.launchpad.net/~dhis2-devs-core/dhis2/trunk/+edit-subscription
=== modified file 'dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/aggregation/jdbc/JdbcAggregatedDataValueStore.java'
--- dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/aggregation/jdbc/JdbcAggregatedDataValueStore.java 2011-10-13 16:28:18 +0000
+++ dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/aggregation/jdbc/JdbcAggregatedDataValueStore.java 2011-10-13 18:35:12 +0000
@@ -579,8 +579,6 @@
" AND ous.idlevel" + rootlevel + "=" + rootOrgunit.getId() +
" AND aiv.periodid IN (" + periodids + ") ";
- log.info("sql: " + sql);
-
Statement statement = holder.getStatement();
statement.setFetchSize( FETCH_SIZE );
@@ -597,7 +595,6 @@
finally
{
// don't close holder or we lose resultset - iterator must close
- // holder.close();
}
}
=== modified file 'dhis-2/dhis-services/dhis-service-importexport/src/main/java/org/hisp/dhis/importexport/synchronous/ExportPivotViewService.java'
--- dhis-2/dhis-services/dhis-service-importexport/src/main/java/org/hisp/dhis/importexport/synchronous/ExportPivotViewService.java 2011-10-12 10:32:57 +0000
+++ dhis-2/dhis-services/dhis-service-importexport/src/main/java/org/hisp/dhis/importexport/synchronous/ExportPivotViewService.java 2011-10-13 18:35:12 +0000
@@ -64,18 +64,11 @@
{
private static final Log log = LogFactory.getLog( ExportPivotViewService.class );
- // The number of values to buffer before forcing attempt to flush
- // Ideally this value should be set to a number of bytes say twice the size of the underlying buffer
- // 500 is just a conservative thumbsuck
- private static final int CHUNK = 500;
-
- // service can export either aggregated datavalues or aggregated indicator values
public enum RequestType
{
DATAVALUE, INDICATORVALUE
};
- // precision to use when formatting double values
public static int PRECISION = 5;
// -------------------------------------------------------------------------
@@ -198,8 +191,6 @@
writer.write( "# period, orgunit, dataelement, catoptcombo, value\n" );
- int values = 0;
-
while ( adv != null )
{
writer.write( "'" + periodIdIsoMap.get( adv.getPeriodId() ) + "'," );
@@ -208,14 +199,6 @@
writer.write( adv.getCategoryOptionComboId() + "," );
writer.write( adv.getValue() + "\n" );
- // defend against expanding the writer buffer uncontrollably
- values = ++values % CHUNK;
-
- if ( values == 0 )
- {
- writer.flush();
- }
-
adv = iterator.next();
}
}
@@ -241,8 +224,6 @@
writer.write( "# period, orgunit, indicator, factor, numerator, denominator\n" );
- int values = 0;
-
while ( aiv != null )
{
writer.write( "'" + periodIdIsoMap.get( aiv.getPeriodId() ) + "'," );
@@ -252,21 +233,13 @@
writer.write( MathUtils.roundToString( aiv.getNumeratorValue(), PRECISION ) + "," );
writer.write( MathUtils.roundToString( aiv.getDenominatorValue(), PRECISION ) + "\n" );
- // defend against expanding the writer buffer uncontrollably
- values = ++values % CHUNK;
-
- if ( values == 0 )
- {
- writer.flush();
- }
-
aiv = iterator.next();
}
}
catch ( IOException ex )
{
iterator.close();
- throw (ex);
+ throw ( ex );
}
writer.flush();
=== modified file 'dhis-2/dhis-web/dhis-web-reporting/src/main/java/org/hisp/dhis/reporting/exp/ExportDataMartAction.java'
--- dhis-2/dhis-web/dhis-web-reporting/src/main/java/org/hisp/dhis/reporting/exp/ExportDataMartAction.java 2011-10-12 08:34:07 +0000
+++ dhis-2/dhis-web/dhis-web-reporting/src/main/java/org/hisp/dhis/reporting/exp/ExportDataMartAction.java 2011-10-13 18:35:12 +0000
@@ -52,13 +52,12 @@
/**
* @author Bob Jolliffe
- *
- * This action is called to export a csv formatted selection of aggregated indicator or
- * data values from datamart.
- * It requires 4 parameters:
- * startdate and enddate: 8 character string representation of date - 20100624
- * root: id of root organization unit
- * level: level number to fetch aggregated values for
+ *
+ * This action is called to export a csv formatted selection of
+ * aggregated indicator or data values from datamart. It requires 4
+ * parameters: startdate and enddate: 8 character string representation
+ * of date - 20100624 root: id of root organization unit level: level
+ * number to fetch aggregated values for
*/
public class ExportDataMartAction
implements Action
@@ -69,19 +68,23 @@
private static final Log log = LogFactory.getLog( ExportDataMartAction.class );
private static final DateFormat dateFormat = new SimpleDateFormat( "yyyyMMdd" );
-
+
private static final String NO_STARTDATE = "The request is missing a startDate parameter";
+
private static final String NO_ENDDATE = "The request is missing an endDate parameter";
+
private static final String BAD_STARTDATE = "The request has a bad startDate parameter. Required format is YYYMMDD";
+
private static final String BAD_ENDDATE = "The request has a bad endDate parameter. Required format is YYYMMDD";
+
private static final String NO_ROOT = "The request is missing a non-zero dataSourceRoot parameter";
+
private static final String NO_LEVEL = "The request is missing a non-zero dataSourceLevel parameter";
- // http header result type
private static final String CLIENT_ERROR = "client-error";
private static final int HTTP_ERROR = 400;
-
+
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
@@ -131,7 +134,7 @@
{
this.dataSourceRoot = dataSourceRoot;
}
-
+
private RequestType requestType;
public void setRequestType( RequestType requestType )
@@ -149,13 +152,14 @@
// -------------------------------------------------------------------------
// Action implementation
// -------------------------------------------------------------------------
-
- public String execute() throws IOException
+
+ public String execute()
+ throws IOException
{
HttpServletRequest request = ServletActionContext.getRequest();
- log.info( "DataMart export request from " + currentUserService.getCurrentUsername() +
- " @ " + request.getRemoteAddr() );
+ log.info( "DataMart export request from " + currentUserService.getCurrentUsername() + " @ "
+ + request.getRemoteAddr() );
HttpServletResponse response = ServletActionContext.getResponse();
@@ -205,7 +209,7 @@
{
paramError = BAD_ENDDATE;
}
- }
+ }
catch ( java.text.ParseException ex )
{
paramError = ex.getMessage();
@@ -221,16 +225,15 @@
// timestamp filename
SimpleDateFormat format = new SimpleDateFormat( "_yyyy_MM_dd_HHmm_ss" );
- String filename = requestType + format.format(Calendar.getInstance().getTime()) + ".csv.gz";
+ String filename = requestType + format.format( Calendar.getInstance().getTime() ) + ".csv.gz";
PeriodType pType = PeriodType.getPeriodTypeByName( periodType );
-
+
// prepare to write output
OutputStream out = null;
// how many rows do we expect
- int count = exportPivotViewService.count( requestType, pType, start, end,
- dataSourceLevel, dataSourceRoot);
+ int count = exportPivotViewService.count( requestType, pType, start, end, dataSourceLevel, dataSourceRoot );
ContextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_GZIP, true, filename, true );
@@ -239,9 +242,8 @@
try
{
- out = new GZIPOutputStream(response.getOutputStream(), GZIPBUFFER);
- exportPivotViewService.execute(out, requestType, pType, start, end,
- dataSourceLevel, dataSourceRoot);
+ out = new GZIPOutputStream( response.getOutputStream(), GZIPBUFFER );
+ exportPivotViewService.execute( out, requestType, pType, start, end, dataSourceLevel, dataSourceRoot );
}
finally
{